#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # mysqlred.py # # Copyright 2015 GOLDERWEB – Jonathan Golder # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. # # """ Provides interface classes for communication of redundances bot with mysql-db """ # Prefere using oursql then MySQLdb try: import oursql as mysqldb except ImportError: import MySQLdb as mysqldb from pywikibot import config import jogobot class MysqlRed: """ Basic interface class, containing opening of connection Specific querys should be defined in descendant classes per data type """ # Save mysqldb-connection as class attribute to use only one # in descendant classes connection = False db_hostname = config.db_hostname db_username = config.db_username db_password = config.db_password db_name = config.db_username + jogobot.db_namesuffix # Class variables for storing cached querys _cached_update_data = [] _update_query = '' _cached_insert_data = {} _insert_query = '' def __init__( self ): """ Opens a connection to MySQL-DB @returns mysql-stream MySQL Connection """ # Connect to mysqldb only once if not type( self ).connection: type( self ).connection = mysqldb.connect( host=type( self ).db_hostname, user=type( self ).db_username, passwd=type( self ).db_password, db=type( self ).db_name ) def __del__( self ): """ Before deleting class, close connection to MySQL-DB """ type( self ).connection.close() @classmethod def flush( cls ): """ Run cached querys """ cursor = cls.connection.cursor() # Execute insert query if cls._cached_insert_data: # Since cls._cached_insert_data is a dict, we need to have a custom # Generator to iterate over it cursor.executemany( cls._insert_query, ( cls._cached_insert_data[ key ] for key in cls._cached_insert_data ) ) # Reset after writing cls._cached_insert_data = {} # Execute update query # Use executemany since update could not be reduced to one query if cls._cached_update_data: cursor.executemany( cls._update_query, cls._cached_update_data ) # Reset after writing cls._cached_update_data = [] # Commit db changes if cls._cached_insert_data or cls._cached_update_data: cls.connection.commit() class MysqlRedPage( MysqlRed ): """ MySQL-db Interface for handling querys for RedPages """ # Class variables for storing cached querys _cached_update_data = [] _update_query = 'UPDATE `red_pages` \ SET `page_title` = ?, `rev_id` = ?, `status`= ? WHERE `page_id` = ?;' _cached_insert_data = {} _insert_query = 'INSERT INTO `red_pages` \ ( page_id, page_title, rev_id, status ) VALUES ( ?, ?, ?, ? );' def __init__( self, page_id ): """ Creates a new instance, runs __init__ of parent class """ super().__init__( ) self.__page_id = int( page_id ) self.data = self.get_page() def __del__( self ): pass def get_page( self ): """ Retrieves a red page row from MySQL-Database for given page_id @param int page_id MediaWiki page_id for page to retrieve @returns tuple Tuple with data for given page_id bool FALSE if none found """ cursor = type( self ).connection.cursor(mysqldb.DictCursor) cursor.execute( 'SELECT * FROM `red_pages` WHERE `page_id` = ?;', ( self.__page_id, ) ) res = cursor.fetchone() if res: return res else: return False def add_page( self, page_title, rev_id, status=0 ): """ Inserts a red page row in MySQL-Database for given page_id @param int rev_id MediaWiki current rev_id @param str page_title MediaWiki new page_title @param int status Page parsing status """ insert_data = { self.__page_id: ( self.__page_id, page_title, rev_id, status ) } type( self )._cached_insert_data.update( insert_data ) # Manualy construct self.data dict self.data = { 'page_id': self.__page_id, 'rev_id': rev_id, 'page_title': page_title, 'status': status } def update_page( self, rev_id=None, page_title=None, status=0 ): """ Updates the red page row in MySQL-Database for given page_id @param int rev_id MediaWiki current rev_id @param str page_title MediaWiki new page_title @param int status Page parsing status """ if not page_title: page_title = self.data[ 'page_title' ] if not rev_id: rev_id = self.data[ 'rev_id' ] type( self )._cached_update_data.append( ( page_title, rev_id, status, self.__page_id ) ) class MysqlRedFam( MysqlRed ): """ MySQL-db Interface for handling querys for RedFams """ # Class variables for storing cached querys _cached_update_data = [] _update_query = 'UPDATE `red_families` \ SET `red_page_id` = ?, `heading` = ?, `beginning` = ?, `ending` = ?, \ `status`= ? WHERE `fam_hash` = ?;' _cached_insert_data = {} _insert_query = 'INSERT INTO `red_families` \ ( fam_hash, red_page_id, beginning, ending, status, heading, \ article0, article1, article2, article3, article4, article5, article6, \ article7 ) VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ? );' def __init__( self ): """ Creates a new instance, runs __init__ of parent class """ super().__init__( ) def __del__( self ): pass def get_fam( self, fam_hash ): """ Retrieves a red family row from MySQL-Database for given fam_hash @returns dict Dictionairy with data for given fam hash False if none found """ self.__fam_hash = fam_hash cursor = type( self ).connection.cursor( mysqldb.DictCursor ) cursor.execute( 'SELECT * FROM `red_families` WHERE `fam_hash` = ?;', ( fam_hash, ) ) self.data = cursor.fetchone() def add_fam( self, articlesList, heading, red_page_id, beginning, ending=None, status=0 ): data = [ self.__fam_hash, red_page_id, beginning, ending, status, heading ] for article in articlesList: data.append( str( article ) ) while len( data ) < 14: data.append( None ) data = tuple( data ) insert_data = { self.__fam_hash: data } type( self )._cached_insert_data.update( insert_data ) # Manualy construct self.data dict data_keys = ( 'fam_hash', 'red_page_id', 'beginning', 'ending', 'status', 'heading', 'article0', 'article1', 'article2', 'article3', 'article4', 'article5', 'article6', 'article7' ) self.data = dict( zip( data_keys, data ) ) def update_fam( self, red_page_id, heading, beginning, ending, status ): """ Updates the red fam row in MySQL-Database for given fam_hash @param int red_page_id MediaWiki page_id @param datetime beginning Timestamp of beginning qparam datetime ending Timestamp of ending of @param int status red_fam status """ type( self )._cached_update_data.append( ( red_page_id, heading, beginning, ending, status, self.__fam_hash ) ) def get_by_status( self, status ): """ Generator witch fetches redFams with given status from DB """ cursor = type( self ).connection.cursor( mysqldb.DictCursor ) cursor.execute( 'SELECT * FROM `red_families` WHERE `status` = ?;', ( status, ) ) while True: res = cursor.fetchmany( 1000 ) if not res: break for row in res: yield row