Browse Source

Some cleanups

Remove old commented out code from manual mysql solution
develop
Jonathan Golder 8 years ago
parent
commit
467f829af2
  1. 2
      bots/reddiscparser.py
  2. 336
      lib/mysqlred.py
  3. 180
      lib/redfam.py
  4. 97
      lib/redpage.py

2
bots/reddiscparser.py

@ -153,7 +153,7 @@ class DiscussionParserBot(
else:
redpage = RedPage( self.current_page )
#~ # Check whether parsing is needed
# Check whether parsing is needed
if redpage.is_parsing_needed():
# Count families for failure analysis
fam_counter = 0

336
lib/mysqlred.py

@ -67,6 +67,7 @@ session = Session()
family = "dewpbeta"
class Mysql(object):
session = session
@declared_attr
@ -122,6 +123,7 @@ class ColumnList( list, MutableComposite ):
"""
return self
class Status( types.TypeDecorator ):
impl = types.String
@ -157,7 +159,6 @@ class Status( types.TypeDecorator ):
return Status(self.impl.length)
class MysqlRedFam( Mysql, Base ):
famhash = Column( String(64), primary_key=True, unique=True )
@ -243,6 +244,7 @@ class MysqlRedFam( Mysql, Base ):
def articlesStatus(self, articlesStatus):
self.__articlesStatus = ColumnList(articlesStatus)
class MysqlRedPage( Mysql, Base ):
pageid = Column( Integer, unique=True, primary_key=True )
revid = Column( Integer, unique=True, nullable=False )
@ -254,338 +256,8 @@ class MysqlRedPage( Mysql, Base ):
collection_class=attribute_mapped_collection("famhash"))
Base.metadata.create_all(engine)
#~ class MysqlRed:
#~ """
#~ Basic interface class, containing opening of connection
#~ Specific querys should be defined in descendant classes per data type
#~ """
#~ # Save mysqldb-connection as class attribute to use only one
#~ # in descendant classes
#~ connection = False
#~ db_hostname = config.db_hostname
#~ db_port = config.db_port
#~ db_username = config.db_username
#~ db_password = config.db_password
#~ db_name = config.db_username + jogobot.config['db_suffix']
#~ db_table_prefix = False
#~ # Class variables for storing cached querys
#~ _cached_update_data = []
#~ _update_query = ''
#~ _cached_insert_data = {}
#~ _insert_query = ''
#~ def __init__( self ):
#~ """
#~ Opens a connection to MySQL-DB
#~ @returns mysql-stream MySQL Connection
#~ """
#~ # Needs to be generated after Parsing of Args (not at import time)
#~ if not type(self).db_table_prefix:
#~ type(self).db_table_prefix = \
#~ pywikibot.Site().family.dbName(pywikibot.Site().code)
#~ # Now we can setup prepared queries
#~ self._prepare_queries()
#~ # Connect to mysqldb only once
#~ if not type( self ).connection:
#~ type( self ).connection = mysqldb.connect(
#~ host=type( self ).db_hostname,
#~ port=type( self ).db_port,
#~ user=type( self ).db_username,
#~ passwd=type( self ).db_password,
#~ db=type( self ).db_name )
#~ # Register callback for warnig if exit with cached db write querys
#~ atexit.register( type(self).warn_if_not_flushed )
#~ def __del__( self ):
#~ """
#~ Before deleting class, close connection to MySQL-DB
#~ """
#~ type( self ).connection.close()
#~ def _prepare_queries( self ):
#~ """
#~ Used to replace placeholders in prepared queries
#~ """
#~ type(self)._update_query = type(self)._update_query.format(
#~ prefix=type(self).db_table_prefix)
#~ type(self)._insert_query = type(self)._insert_query.format(
#~ prefix=type(self).db_table_prefix)
#~ @classmethod
#~ def flush( cls ):
#~ """
#~ Run cached querys
#~ """
#~ if not cls.connection:
#~ raise MysqlRedConnectionError( "No connection exists!" )
#~ cursor = cls.connection.cursor()
#~ # Execute insert query
#~ if cls._cached_insert_data:
#~ # Since cls._cached_insert_data is a dict, we need to have a custom
#~ # Generator to iterate over it
#~ cursor.executemany( cls._insert_query,
#~ ( cls._cached_insert_data[ key ]
#~ for key in cls._cached_insert_data ) )
#~ # Reset after writing
#~ cls._cached_insert_data = {}
#~ # Execute update query
#~ # Use executemany since update could not be reduced to one query
#~ if cls._cached_update_data:
#~ cursor.executemany( cls._update_query, cls._cached_update_data )
#~ # Reset after writing
#~ cls._cached_update_data = []
#~ # Commit db changes
#~ if cls._cached_insert_data or cls._cached_update_data:
#~ cls.connection.commit()
#~ @classmethod
#~ def warn_if_not_flushed(cls):
#~ """
#~ Outputs a warning if there are db write querys cached and not flushed
#~ before exiting programm!
#~ """
#~ if cls._cached_update_data or cls._cached_insert_data:
#~ jogobot.output( "Cached Database write querys not flushed!!! " +
#~ "Data loss is possible!", "WARNING" )
#~ class MysqlRedPage( MysqlRed ):
#~ """
#~ MySQL-db Interface for handling querys for RedPages
#~ """
#~ # Class variables for storing cached querys
#~ # '{prefix}' will be replaced during super().__init__()
#~ _cached_update_data = []
#~ _update_query = 'UPDATE `{prefix}_redpages` \
#~ SET `pagetitle` = ?, `revid` = ?, `status`= ? WHERE `pageid` = ?;'
#~ _cached_insert_data = {}
#~ _insert_query = 'INSERT INTO `{prefix}_redpages` \
#~ ( pageid, pagetitle, revid, status ) VALUES ( ?, ?, ?, ? );'
#~ def __init__( self, pageid ):
#~ """
#~ Creates a new instance, runs __init__ of parent class
#~ """
#~ super().__init__( )
#~ self.__pageid = int( pageid )
#~ self.data = self.get_page()
#~ def __del__( self ):
#~ """
#~ Needed to prevent descendant classes of MYSQL_RED from deleting
#~ connection to db
#~ """
#~ pass
#~ def get_page( self ):
#~ """
#~ Retrieves a red page row from MySQL-Database for given page_id
#~ @param int pageid MediaWiki page_id for page to retrieve
#~ @returns tuple Tuple with data for given page_id
#~ bool FALSE if none found
#~ """
#~ cursor = type( self ).connection.cursor(mysqldb.DictCursor)
#~ cursor.execute(
#~ 'SELECT * FROM `{prefix}_redpages` WHERE `pageid` = ?;'.format(
#~ prefix=type(self).db_table_prefix), ( self.__pageid, ) )
#~ res = cursor.fetchone()
#~ if res:
#~ return res
#~ else:
#~ return False
#~ def add_page( self, pagetitle, revid, status=0 ):
#~ """
#~ Inserts a red page row in MySQL-Database for given pageid
#~ @param int revid MediaWiki current revid
#~ @param str pagetitle MediaWiki new pagetitle
#~ @param int status Page parsing status
#~ """
#~ insert_data = { self.__pageid: ( self.__pageid, pagetitle,
#~ revid, status ) }
#~ type( self )._cached_insert_data.update( insert_data )
#~ # Manualy construct self.data dict
#~ self.data = { 'pageid': self.__pageid, 'revid': revid,
#~ 'pagetitle': pagetitle, 'status': status }
#~ def update_page( self, revid=None, pagetitle=None, status=0 ):
#~ """
#~ Updates the red page row in MySQL-Database for given page_id
#~ @param int revid MediaWiki current rev_id
#~ @param str pagetitle MediaWiki new page_title
#~ @param int status Page parsing status
#~ """
#~ if not pagetitle:
#~ pagetitle = self.data[ 'pagetitle' ]
#~ if not revid:
#~ revid = self.data[ 'revid' ]
#~ type( self )._cached_update_data.append( ( pagetitle, revid,
#~ status, self.__pageid ) )
#~ class MysqlRedFam( MysqlRed ):
#~ """
#~ MySQL-db Interface for handling querys for RedFams
#~ """
#~ # Class variables for storing cached querys
#~ _cached_update_data = []
#~ _update_query = 'UPDATE `{prefix}_redfams` \
#~ SET `redpageid` = ?, `heading` = ?, `beginning` = ?, `ending` = ?, \
#~ `status`= ? WHERE `famhash` = ?;'
#~ _cached_insert_data = {}
#~ _insert_query = 'INSERT INTO `{prefix}_redfams` \
#~ ( famhash, redpageid, beginning, ending, status, heading, \
#~ article0, article1, article2, article3, article4, article5, article6, \
#~ article7 ) VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ? );'
#~ def __init__( self, famhash=None ):
#~ """
#~ Creates a new instance, runs __init__ of parent class
#~ """
#~ self.__famhash = famhash
#~ super().__init__( )
#~ def __del__( self ):
#~ """
#~ Needed to prevent descendant classes of MYSQL_RED from deleting
#~ connection to db
#~ """
#~ pass
#~ def get_fam( self, famhash ):
#~ """
#~ Retrieves a red family row from MySQL-Database for given fam_hash
#~ @returns dict Dictionairy with data for given fam hash
#~ False if none found
#~ """
#~ self.__famhash = famhash
#~ cursor = type( self ).connection.cursor( mysqldb.DictCursor )
#~ cursor.execute(
#~ 'SELECT * FROM `{prefix}_redfams` WHERE `famhash` = ?;'.
#~ format( prefix=type(self).db_table_prefix), ( famhash, ) )
#~ self.data = cursor.fetchone()
#~ def add_fam( self, articlesList, heading, redpageid,
#~ beginning, ending=None, status=0 ):
#~ data = [ self.__famhash, redpageid, beginning, ending,
#~ status, heading ]
#~ for article in articlesList:
#~ data.append( str( article ) )
#~ while len( data ) < 14:
#~ data.append( None )
#~ data = tuple( data )
#~ insert_data = { self.__famhash: data }
#~ type( self )._cached_insert_data.update( insert_data )
#~ # Manualy construct self.data dict
#~ data_keys = ( 'famhash', 'redpageid', 'beginning', 'ending',
#~ 'status', 'heading', 'article0', 'article1', 'article2',
#~ 'article3', 'article4', 'article5', 'article6',
#~ 'article7' )
#~ self.data = dict( zip( data_keys, data ) )
#~ def update_fam( self, redpageid, heading, beginning, ending, status ):
#~ """
#~ Updates the red fam row in MySQL-Database for given fam_hash
#~ @param int redpageid MediaWiki page_id
#~ @param datetime beginning Timestamp of beginning
#~ qparam datetime ending Timestamp of ending of
#~ @param int status red_fam status
#~ """
#~ type( self )._cached_update_data.append( ( redpageid, heading,
#~ beginning, ending, status,
#~ self.__famhash ) )
#~ def get_by_status( self, status ):
#~ """
#~ Generator witch fetches redFams with given status from DB
#~ """
#~ cursor = type( self ).connection.cursor( mysqldb.DictCursor )
#~ cursor.execute(
#~ 'SELECT * FROM `{prefix}_redfams` WHERE `status` = LIKE %?%;'.
#~ format( prefix=type( self ).db_table_prefix), ( status, ) )
#~ while True:
#~ res = cursor.fetchmany( 1000 )
#~ if not res:
#~ break
#~ for row in res:
#~ yield row
#~ def get_by_status_and_ending( self, status, ending ):
#~ """
#~ Generator witch fetches redFams with given status from DB
#~ """
#~ cursor = type( self ).connection.cursor( mysqldb.DictCursor )
#~ cursor.execute( (
#~ 'SELECT * ' +
#~ 'FROM `{prefix}_redfams` `F` ' +
#~ 'INNER JOIN `{prefix}_redpages` `P` ' +
#~ 'ON `F`.`status` = ? ' +
#~ 'AND `F`.`ending` >= ? ' +
#~ 'AND `F`.`redpageid` = `P`.`pageid`;').format(
#~ prefix=type( self ).db_table_prefix),
#~ ( status, ending ) )
#~ while True:
#~ res = cursor.fetchmany( 1000 )
#~ if not res:
#~ break
#~ for row in res:
#~ yield row
Base.metadata.create_all(engine)
class MysqlRedError(Exception):

180
lib/redfam.py

@ -35,8 +35,7 @@ import pywikibot # noqa
from pywikibot.tools import deprecated # noqa
import jogobot
#~ from lib.mysqlred import Column, Integer, String, Text, DateTime, ForeignKey, ColumnList, Status
from lib.mysqlred import MysqlRedFam, MutableSet, ColumnList #, Mysql, Base, relationship, composite,
from lib.mysqlred import MysqlRedFam
class RedFam( MysqlRedFam ):
@ -45,7 +44,7 @@ class RedFam( MysqlRedFam ):
"""
def __init__( self, articlesList, beginning, ending=None, redpageid=None,
status=MutableSet(), famhash=None, heading=None ):
status=None, famhash=None, heading=None ):
"""
Generates a new RedFam object
@ -61,34 +60,9 @@ class RedFam( MysqlRedFam ):
# Having pywikibot.Site() is a good idea most of the time
self.site = pywikibot.Site()
# Database interface
#self._mysql = MysqlRedFam( famhash )
# Initial attribute values
#~ self.articlesList = articlesList
#~ self.beginning = beginning
#~ self.ending = ending
#~ self.redpageid = redpageid
#~ # self._status = set()
#~ # self._status = self._parse_status(status)
#~ self.famhash = famhash
#~ self.heading = heading
#self.status = status
#articlesStatus = ColumnList([ MutableSet() for x in range(0,8) ])
#~ # Calculates the sha1 hash over self._articlesList to
#~ # rediscover known redundance families
#~ self.calc_famhash()
#~ if not status:
#~ status = MutableSet()
super().__init__( articlesList=articlesList, beginning=beginning, ending=ending, redpageid=redpageid,
famhash=famhash, heading=heading, status=status, articlesStatus=None )
#super().__init__()
def __repr__( self ):
"""
Returns repression str of RedFam object
@ -137,35 +111,12 @@ class RedFam( MysqlRedFam ):
else:
self.famhash = type(self).calc_famhash(self.articlesList)
#~ def changed( self ):
#~ """
#~ Checks wether anything has changed and maybe triggers db update
#~ """
#~ # On archived redfams do not delete possibly existing ending
#~ if( not self.ending and "archived" in self._status and
#~ self._mysql.data[ 'ending' ] ):
#~ self._ending = self._mysql.data[ 'ending' ]
#~ # Since status change means something has changed, update database
#~ if( self._raw_status != self._mysql.data[ 'status' ] or
#~ self._beginning != self._mysql.data[ 'beginning' ] or
#~ self._ending != self._mysql.data[ 'ending' ] or
#~ self._red_page_id != self._mysql.data[ 'redpageid' ] or
#~ self._heading != self._mysql.data[ 'heading' ]):
#~ self._mysql.update_fam( self._redpageid, self._heading,
#~ self._beginning, self._ending,
#~ self._raw_status() )
@classmethod
def flush_db_cache( cls ):
"""
Calls flush method of Mysql Interface class
"""
cls.session.commit()
#~ MysqlRedFam.flush()
def add_status(self, status):
"""
@ -204,24 +155,6 @@ class RedFam( MysqlRedFam ):
else:
return False
#~ def _parse_status(self, raw_status ):
#~ """
#~ Sets status based on comma separated list
#~ @param raw_status Commaseparated string of stati (from DB)
#~ @type raw_status str
#~ """
#~ self._status = set( raw_status.strip().split(","))
#~ def _raw_status( self ):
#~ """
#~ Returns status as commaseparated string (to save in DB)
#~ @returns Raw status string
#~ @rtype str
#~ """
#~ return ",".join( self._status )
def article_add_status(self, status, index=None, title=None ):
"""
Adds a status specified by status, to article (identified by title
@ -292,46 +225,6 @@ class RedFam( MysqlRedFam ):
else:
raise IndexError( "No index given or wrong format!")
def _article_parse_status(self, raw_status, index=None, title=None ):
"""
Sets status based on comma separated list to articles (identified by
title or index in articlesList) status set
@param status Statusstring to set
@type status str
@param index Add to article with index in articlesList
@type index int
@param title Add to article with title in articlesList
@type title str
"""
if title and not index:
index = self._articlesList.index( title )
if isinstance( index, int ) and index < len(self._articlesList):
self._article_status[index] = set( raw_status.strip().split(","))
else:
raise IndexError( "No index given or wrong format!")
def _article_raw_status( self, index=None, title=None ):
"""
Returns status as commaseparated string (to save in DB) of article
(identified by title or index in articlesList) status set
@param index Get from article with index in articlesList
@type index int
@param title Get from article with title in articlesList
@type title str
@returns Raw status string
@rtype str
"""
if title and not index:
index = self._articlesList.index( title )
if isinstance( index, int ) and index < len(self._articlesList):
return ",".join( self._article_status[index] )
else:
raise IndexError( "No index given or wrong format!")
class RedFamParser( RedFam ):
"""
@ -369,54 +262,14 @@ class RedFamParser( RedFam ):
str strptime parseable string
"""
# Parse the provided heading of redundance section
# to set self._articlesList
#~ self.heading = str(heading)
#~ self.articlesList = articlesList
#~ # Catch sections with more then 8 articles, print error
#~ if len( self.articlesList ) > 8:
#~ # For repression in output we need to know the fam hash
#~ self.calc_famhash()
#~ jogobot.output(
#~ ( "\03{{lightred}}" +
#~ "Maximum number of articles in red_fam exceeded, " +
#~ "maximum number is 8, {number:d} were given \n {repress}"
#~ ).format( datetime=datetime.now().strftime(
#~ "%Y-%m-%d %H:%M:%S" ), number=len( self._articlesList ),
#~ repress=repr( self ) ),
#~ "WARNING" )
#~ # Only save the first 8 articles
#~ # self.articlesList = self.articlesList[:8]
# Calculates the sha1 hash over self._articlesList to
# rediscover known redundance families
famhash = type(self).calc_famhash(articlesList)
#~ obj = self.session.query(RedFamParser).filter(RedFamParser.famhash == self.famhash ).one_or_none()
#~ if obj:
#~ self = obj
# Set object attributes:
#~ self.redpageid = redpage._pageid
self._redpagearchive = redpagearchive
# self.famhash = None
# Method self.add_beginning sets self._beginning directly
#~ self.add_beginning( beginning )
#~ # Method self.add_ending sets self._ending directly
#~ if( ending ):
#~ self.add_ending( ending )
#~ else:
#~ # If no ending was provided set to None
#~ self.ending = None
#~ self.status = MutableSet()
# Parse Timestamps
beginning = self.__datetime(beginning)
if ending:
ending = self.__datetime(ending)
@ -429,31 +282,8 @@ class RedFamParser( RedFam ):
self.check_status()
self.session.add(self)
# Open database connection, ask for data if existing,
# otherwise create entry
# self.__handle_db()
# Triggers db update if anything changed
# self.changed()
#~ def __handle_db( self ):
#~ """
#~ Handles opening of db connection
#~ """
#~ # We need a connection to our mysqldb
#~ self._mysql = MysqlRedFam( )
#~ self._mysql.get_fam( self._famhash )
#~ if not self._mysql.data:
#~ self._mysql.add_fam( self._articlesList, self._heading,
#~ self._redpageid, self._beginning,
#~ self._ending )
def update( self, articlesList, heading, redpage, redpagearchive,
beginning, ending=None):
@ -490,8 +320,6 @@ class RedFamParser( RedFam ):
return [ str( link.title ) for link
in heading.ifilter_wikilinks() ]
def add_beginning( self, beginning ):
"""
Adds the beginning date of a redundance diskussion to the object
@ -780,8 +608,6 @@ class RedFamWorker( RedFam ):
self._article_raw_status( index=index )
index += 1
print( repr(self) )
def get_disc_link( self ):
"""
Constructs and returns the link to Redundancy discussion

97
lib/redpage.py

@ -30,8 +30,7 @@ import mwparserfromhell as mwparser
import jogobot # noqa
#~ from lib.mysqlred import Column, Integer, String, Text, DateTime, ForeignKey, ColumnList, Status
from lib.mysqlred import MysqlRedPage, relationship, MutableSet #MysqlRedFam, Base, composite,
from lib.mysqlred import MysqlRedPage, relationship
from lib.redfam import RedFam, RedFamParser
from sqlalchemy.orm.collections import attribute_mapped_collection
@ -60,7 +59,6 @@ class RedPage( MysqlRedPage ):
# Safe the pywikibot page object
if page:
self._page = page
pageid = self._page.pageid
super().__init__(
pageid=pageid,
@ -69,48 +67,15 @@ class RedPage( MysqlRedPage ):
status=MutableSet() ) #TODO EMPTY MutableSet() necessary?
#~ self._status = set()
if archive:
self.status.add("archived")
#~ self._archive = archive
#~ self.pageid = pageid
#~ self.revid = self.page._revid
#~ self.p
#~ self.status = MutableSet()
# self.__handle_db( )
#~ self.is_page_changed()
#~ self._parsed = None
self.is_archive()
self.session.add(self)
#~ def __handle_db( self ):
#~ """
#~ Handles opening of db connection
#~ """
#~ # We need a connection to our mysqldb
#~ if self.page:
#~ self.__mysql = MysqlRedPage( self.page._pageid )
#~ self.pageid = self.page._pageid
#~ elif self.pageid:
#~ self.__mysql = MysqlRedPage( self.pageid )
#~ self.page = pywikibot.Page( pywikibot.Site(),
#~ self.pagetitle )
#~ self.page.exists()
#~ else:
#~ raise ValueError( "Page NOR pagid provided!" )
#~ if not self.__mysql.data:
#~ self.__mysql.add_page( self.page.title(), self.page._revid )
def update( self, page ):
self._page = page
self.revid = page._revid
self.pagetitle = page.title()
self.is_archive()
@property
def page(self):
@ -123,24 +88,10 @@ class RedPage( MysqlRedPage ):
def archive(self):
return self.has_status("archived")
def is_page_changed( self ):
"""
Check wether the page was changed since last run
"""
self._changed = self.changedp()
#~ if( self.__mysql.data != { 'pageid': self.page._pageid,
#~ 'revid': self.page._revid,
#~ 'pagetitle': self.page.title(),
#~ 'status': self.__mysql.data[ 'status' ] } ):
#~ self._changed = True
#~ else:
#~ self._changed = False
def is_archive( self ):
"""
Detects wether current page is an archive of discussions
"""
if( self.archive or ( u"/Archiv" in self.page.title() ) or
( "{{Archiv}}" in self.page.text ) or
( "{{Archiv|" in self.page.text ) ):
@ -153,10 +104,7 @@ class RedPage( MysqlRedPage ):
"""
Decides wether current RedPage needs to be parsed or not
"""
if( self.changedp() or not self.has_status("parsed") ):
return True
else:
return False
return self.changedp() or not self.has_status("parsed")
def parse( self ):
"""
@ -183,24 +131,6 @@ class RedPage( MysqlRedPage ):
else:
self.status.add("parsed")
self._parsed = True
#~ self.__update_db()
#~ def __update_db( self ):
#~ """
#~ Updates the page meta data in mysql db
#~ """
#~ if( self._parsed or not self._changed ):
#~ self.add_status( "open" )
#~ if( self.is_archive() ):
#~ self.remove_status( "open" )
#~ self.add_status( "archived" )
#~ else:
#~ pass
#~ self._status = set()
#~ self.__mysql.update_page( self.page._revid, self.page.title(),
#~ self._raw_status() )
@classmethod
def flush_db_cache( cls ):
@ -208,7 +138,6 @@ class RedPage( MysqlRedPage ):
Calls flush method of Mysql Interface class
"""
cls.session.commit()
#~ MysqlRedPage.flush()
def add_status(self, status):
"""
@ -246,21 +175,3 @@ class RedPage( MysqlRedPage ):
return True
else:
return False
#~ def _parse_status(self, raw_status ):
#~ """
#~ Sets status based on comma separated list
#~ @param raw_status Commaseparated string of stati (from DB)
#~ @type raw_status str
#~ """
#~ self._status = set( raw_status.strip().split(","))
#~ def _raw_status( self ):
#~ """
#~ Returns status as commaseparated string (to save in DB)
#~ @returns Raw status string
#~ @rtype str
#~ """
#~ return ",".join( self._status )

Loading…
Cancel
Save