diff --git a/lib/mysqlred.py b/lib/mysqlred.py index 499816f..f57ae2b 100644 --- a/lib/mysqlred.py +++ b/lib/mysqlred.py @@ -175,6 +175,10 @@ SET `page_title` = ?, `rev_id` = ?, `status`= ? WHERE `page_id` = ?;' self.data = self.get_page() def __del__( self ): + """ + Needed to prevent descendant classes of MYSQL_RED from deleting + connection to db + """ pass def get_page( self ): @@ -246,20 +250,27 @@ class MysqlRedFam( MysqlRed ): _update_query = 'UPDATE `{prefix}_red_families` \ SET `red_page_id` = ?, `heading` = ?, `beginning` = ?, `ending` = ?, \ `status`= ? WHERE `fam_hash` = ?;' + _cached_insert_data = {} _insert_query = 'INSERT INTO `{prefix}_red_families` \ ( fam_hash, red_page_id, beginning, ending, status, heading, \ article0, article1, article2, article3, article4, article5, article6, \ article7 ) VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ? );' - def __init__( self ): + def __init__( self, fam_hash=None ): """ Creates a new instance, runs __init__ of parent class """ + self.__fam_hash = fam_hash + super().__init__( ) def __del__( self ): + """ + Needed to prevent descendant classes of MYSQL_RED from deleting + connection to db + """ pass def get_fam( self, fam_hash ): @@ -335,6 +346,29 @@ article7 ) VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ? );' for row in res: yield row + def get_by_status_and_ending( self, status, ending ): + """ + Generator witch fetches redFams with given status from DB + """ + + cursor = type( self ).connection.cursor( mysqldb.DictCursor ) + + cursor.execute( ( + 'SELECT * ' + + 'FROM `{prefix}_red_families` `F` ' + + 'INNER JOIN `{prefix}_red_pages` `P` ' + + 'ON `F`.`status` = ? ' + + 'AND `F`.`ending` >= ? ' + 'AND `F`.`red_page_id` = `P`.`page_id`;').format( + prefix=type( self ).db_table_prefix), ( status, ending ) ) + + while True: + res = cursor.fetchmany( 1000 ) + if not res: + break + for row in res: + yield row + class MysqlRedError(Exception): """ diff --git a/lib/redfam.py b/lib/redfam.py index 30dd22d..37162c4 100644 --- a/lib/redfam.py +++ b/lib/redfam.py @@ -57,6 +57,9 @@ class RedFam: @param heading str Original heading of RedFam (Link) """ + # Database interface + self._mysql = MysqlRedFam( fam_hash ) + # Initial attribute values self._articlesList = articlesList self._beginning = beginning @@ -108,6 +111,28 @@ class RedFam: else: self._fam_hash = h.hexdigest() + def changed( self ): + """ + Checks wether anything has changed and maybe triggers db update + """ + + # On archived red_fams do not delete possibly existing ending + if( not self._ending and self._status > 1 and + self._mysql.data[ 'ending' ] ): + + self._ending = self._mysql.data[ 'ending' ] + + # Since status change means something has changed, update database + if( self._status != self._mysql.data[ 'status' ] or + self._beginning != self._mysql.data[ 'beginning' ] or + self._ending != self._mysql.data[ 'ending' ] or + self._red_page_id != self._mysql.data[ 'red_page_id' ] or + self._heading != self._mysql.data[ 'heading' ]): + + self._mysql.update_fam( self._red_page_id, self._heading, + self._beginning, self._ending, + self._status ) + @classmethod def flush_db_cache( cls ): """ @@ -194,13 +219,13 @@ class RedFamParser( RedFam ): """ # We need a connection to our mysqldb - self.__mysql = MysqlRedFam( ) - self.__mysql.get_fam( self._fam_hash ) + self._mysql = MysqlRedFam( ) + self._mysql.get_fam( self._fam_hash ) - if not self.__mysql.data: - self.__mysql.add_fam( self._articlesList, self._heading, - self._red_page_id, self._beginning, - self._ending ) + if not self._mysql.data: + self._mysql.add_fam( self._articlesList, self._heading, + self._red_page_id, self._beginning, + self._ending ) def heading_parser( self, heading ): """ @@ -226,6 +251,7 @@ class RedFamParser( RedFam ): if len( self._articlesList ) > 8: # For repression in output we need to know the fam hash self.calc_fam_hash() + jogobot.output( ( "\03{{lightred}}" + "Maximum number of articles in red_fam exceeded, " + @@ -289,7 +315,7 @@ class RedFamParser( RedFam ): """ # Do not change stati set by worker script etc. - if not self.__mysql.data['status'] > 2: + if not self._mysql.data['status'] > 2: # No ending, discussion is running: # Sometimes archived discussions also have no detectable ending @@ -301,29 +327,8 @@ class RedFamParser( RedFam ): else: self._status = 2 else: - self._status = self.__mysql.data[ 'status' ] - def changed( self ): - """ - Checks wether anything has changed and maybe triggers db update - """ - - # On archived red_fams do not delete possibly existing ending - if( not self._ending and self._status > 1 and - self.__mysql.data[ 'ending' ] ): - - self._ending = self.__mysql.data[ 'ending' ] - - # Since status change means something has changed, update database - if( self._status != self.__mysql.data[ 'status' ] or - self._beginning != self.__mysql.data[ 'beginning' ] or - self._ending != self.__mysql.data[ 'ending' ] or - self._red_page_id != self.__mysql.data[ 'red_page_id' ] or - self._heading != self.__mysql.data[ 'heading' ]): - - self.__mysql.update_fam( self._red_page_id, self._heading, - self._beginning, self._ending, - self._status ) + self._status = self._mysql.data[ 'status' ] @classmethod def is_section_redfam_cb( cls, heading ): @@ -440,6 +445,23 @@ class RedFamWorker( RedFam ): mysql_data[ 'status' ], mysql_data[ 'fam_hash' ], mysql_data[ 'heading' ] ) + self._mysql.data = mysql_data + + # Get related RedPage-Information + self.redpageid = mysql_data[ 'page_id' ] + self.redpagetitle = mysql_data[ 'page_title' ] + + # Make sure locale is set to 'de_DE.UTF-8' to prevent problems + # with wrong month abreviations in strptime + locale.setlocale(locale.LC_ALL, 'de_DE.UTF-8') + + def update_status( self ): + """ + Sets status to 3 when worked on + """ + + self._status = 3 + @classmethod def list_by_status( cls, status ): """ @@ -453,6 +475,20 @@ class RedFamWorker( RedFam ): print(fam) raise + @classmethod + def gen_by_status_and_ending( cls, status, ending ): + """ + Yield red_fams stored in db by given status which have an ending after + given one + """ + mysql = MysqlRedFam() + for fam in mysql.get_by_status_and_ending( status, ending ): + try: + yield cls( fam ) + except RedFamHashError: + print(fam) + raise + class RedFamError( Exception ): """ diff --git a/lib/redpage.py b/lib/redpage.py index 176f6bc..ebedaba 100644 --- a/lib/redpage.py +++ b/lib/redpage.py @@ -39,19 +39,22 @@ class RedPage: Class for handling redundance discussion pages and archives """ - def __init__( self, page, archive=False ): + def __init__( self, page=None, pageid=None, archive=False ): """ Generate a new RedPage object based on the given pywikibot page object - @param page page Pywikibot/MediaWiki page object for page + @param page Pywikibot/MediaWiki page object for page + @type page pywikibot.Page + @param pageid MW-Pageid for related page + @type pageid int """ # Safe the pywikibot page object self.page = page + self.pageid = pageid self._archive = archive self.__handle_db( ) - self.is_page_changed() self._parsed = None @@ -62,7 +65,16 @@ class RedPage: """ # We need a connection to our mysqldb - self.__mysql = MysqlRedPage( self.page._pageid ) + if self.page: + self.__mysql = MysqlRedPage( self.page._pageid ) + self.pageid = self.page._pageid + elif self.pageid: + self.__mysql = MysqlRedPage( self.pageid ) + self.page = pywikibot.Page( pywikibot.Site(), + self.__mysql.data['page_title'] ) + self.page.exists() + else: + raise ValueError( "Page NOR pagid provided!" ) if not self.__mysql.data: self.__mysql.add_page( self.page.title(), self.page._revid )