Introduce new directory structure
To clarify which is a bot and which are helper scripts Related Task: [https://fs.golderweb.de/index.php?do=details&task_id=74 FS#74]
This commit is contained in:
326
lib/mysqlred.py
Normal file
326
lib/mysqlred.py
Normal file
@@ -0,0 +1,326 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# mysqlred.py
|
||||
#
|
||||
# Copyright 2015 GOLDERWEB – Jonathan Golder <jonathan@golderweb.de>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
|
||||
# MA 02110-1301, USA.
|
||||
#
|
||||
#
|
||||
"""
|
||||
Provides interface classes for communication of redundances bot with mysql-db
|
||||
"""
|
||||
|
||||
# Prefere using oursql then MySQLdb
|
||||
try:
|
||||
import oursql as mysqldb
|
||||
except ImportError:
|
||||
import MySQLdb as mysqldb
|
||||
|
||||
import atexit
|
||||
|
||||
from pywikibot import config
|
||||
|
||||
import jogobot
|
||||
|
||||
|
||||
class MysqlRed:
|
||||
"""
|
||||
Basic interface class, containing opening of connection
|
||||
|
||||
Specific querys should be defined in descendant classes per data type
|
||||
"""
|
||||
|
||||
# Save mysqldb-connection as class attribute to use only one
|
||||
# in descendant classes
|
||||
connection = False
|
||||
db_hostname = config.db_hostname
|
||||
db_port = config.db_port
|
||||
db_username = config.db_username
|
||||
db_password = config.db_password
|
||||
db_name = config.db_username + jogobot.config['db_suffix']
|
||||
|
||||
# Class variables for storing cached querys
|
||||
_cached_update_data = []
|
||||
_update_query = ''
|
||||
_cached_insert_data = {}
|
||||
_insert_query = ''
|
||||
|
||||
def __init__( self ):
|
||||
"""
|
||||
Opens a connection to MySQL-DB
|
||||
|
||||
@returns mysql-stream MySQL Connection
|
||||
"""
|
||||
|
||||
# Connect to mysqldb only once
|
||||
if not type( self ).connection:
|
||||
|
||||
type( self ).connection = mysqldb.connect(
|
||||
host=type( self ).db_hostname,
|
||||
port=type( self ).db_port,
|
||||
user=type( self ).db_username,
|
||||
passwd=type( self ).db_password,
|
||||
db=type( self ).db_name )
|
||||
|
||||
# Register callback for warnig if exit with cached db write querys
|
||||
atexit.register( type(self).warn_if_not_flushed )
|
||||
|
||||
def __del__( self ):
|
||||
"""
|
||||
Before deleting class, close connection to MySQL-DB
|
||||
"""
|
||||
|
||||
type( self ).connection.close()
|
||||
|
||||
@classmethod
|
||||
def flush( cls ):
|
||||
"""
|
||||
Run cached querys
|
||||
"""
|
||||
if not cls.connection:
|
||||
raise MysqlRedConnectionError( "No connection exists!" )
|
||||
|
||||
cursor = cls.connection.cursor()
|
||||
|
||||
# Execute insert query
|
||||
if cls._cached_insert_data:
|
||||
# Since cls._cached_insert_data is a dict, we need to have a custom
|
||||
# Generator to iterate over it
|
||||
cursor.executemany( cls._insert_query,
|
||||
( cls._cached_insert_data[ key ]
|
||||
for key in cls._cached_insert_data ) )
|
||||
# Reset after writing
|
||||
cls._cached_insert_data = {}
|
||||
|
||||
# Execute update query
|
||||
# Use executemany since update could not be reduced to one query
|
||||
if cls._cached_update_data:
|
||||
cursor.executemany( cls._update_query, cls._cached_update_data )
|
||||
# Reset after writing
|
||||
cls._cached_update_data = []
|
||||
|
||||
# Commit db changes
|
||||
if cls._cached_insert_data or cls._cached_update_data:
|
||||
cls.connection.commit()
|
||||
|
||||
@classmethod
|
||||
def warn_if_not_flushed(cls):
|
||||
"""
|
||||
Outputs a warning if there are db write querys cached and not flushed
|
||||
before exiting programm!
|
||||
"""
|
||||
if cls._cached_update_data or cls._cached_insert_data:
|
||||
jogobot.output( "Cached Database write querys not flushed!!! " +
|
||||
"Data loss is possible!", "WARNING" )
|
||||
|
||||
|
||||
class MysqlRedPage( MysqlRed ):
|
||||
"""
|
||||
MySQL-db Interface for handling querys for RedPages
|
||||
"""
|
||||
|
||||
# Class variables for storing cached querys
|
||||
_cached_update_data = []
|
||||
_update_query = 'UPDATE `red_pages` \
|
||||
SET `page_title` = ?, `rev_id` = ?, `status`= ? WHERE `page_id` = ?;'
|
||||
|
||||
_cached_insert_data = {}
|
||||
_insert_query = 'INSERT INTO `red_pages` \
|
||||
( page_id, page_title, rev_id, status ) VALUES ( ?, ?, ?, ? );'
|
||||
|
||||
def __init__( self, page_id ):
|
||||
"""
|
||||
Creates a new instance, runs __init__ of parent class
|
||||
"""
|
||||
|
||||
super().__init__( )
|
||||
|
||||
self.__page_id = int( page_id )
|
||||
|
||||
self.data = self.get_page()
|
||||
|
||||
def __del__( self ):
|
||||
pass
|
||||
|
||||
def get_page( self ):
|
||||
"""
|
||||
Retrieves a red page row from MySQL-Database for given page_id
|
||||
|
||||
@param int page_id MediaWiki page_id for page to retrieve
|
||||
|
||||
@returns tuple Tuple with data for given page_id
|
||||
bool FALSE if none found
|
||||
"""
|
||||
|
||||
cursor = type( self ).connection.cursor(mysqldb.DictCursor)
|
||||
|
||||
cursor.execute( 'SELECT * FROM `red_pages` WHERE `page_id` = ?;',
|
||||
( self.__page_id, ) )
|
||||
res = cursor.fetchone()
|
||||
|
||||
if res:
|
||||
return res
|
||||
else:
|
||||
return False
|
||||
|
||||
def add_page( self, page_title, rev_id, status=0 ):
|
||||
"""
|
||||
Inserts a red page row in MySQL-Database for given page_id
|
||||
|
||||
@param int rev_id MediaWiki current rev_id
|
||||
@param str page_title MediaWiki new page_title
|
||||
@param int status Page parsing status
|
||||
"""
|
||||
|
||||
insert_data = { self.__page_id: ( self.__page_id, page_title,
|
||||
rev_id, status ) }
|
||||
|
||||
type( self )._cached_insert_data.update( insert_data )
|
||||
|
||||
# Manualy construct self.data dict
|
||||
self.data = { 'page_id': self.__page_id, 'rev_id': rev_id,
|
||||
'page_title': page_title, 'status': status }
|
||||
|
||||
def update_page( self, rev_id=None, page_title=None, status=0 ):
|
||||
"""
|
||||
Updates the red page row in MySQL-Database for given page_id
|
||||
|
||||
@param int rev_id MediaWiki current rev_id
|
||||
@param str page_title MediaWiki new page_title
|
||||
@param int status Page parsing status
|
||||
"""
|
||||
|
||||
if not page_title:
|
||||
page_title = self.data[ 'page_title' ]
|
||||
if not rev_id:
|
||||
rev_id = self.data[ 'rev_id' ]
|
||||
|
||||
type( self )._cached_update_data.append( ( page_title, rev_id,
|
||||
status, self.__page_id ) )
|
||||
|
||||
|
||||
class MysqlRedFam( MysqlRed ):
|
||||
"""
|
||||
MySQL-db Interface for handling querys for RedFams
|
||||
"""
|
||||
|
||||
# Class variables for storing cached querys
|
||||
_cached_update_data = []
|
||||
_update_query = 'UPDATE `red_families` \
|
||||
SET `red_page_id` = ?, `heading` = ?, `beginning` = ?, `ending` = ?, \
|
||||
`status`= ? WHERE `fam_hash` = ?;'
|
||||
|
||||
_cached_insert_data = {}
|
||||
_insert_query = 'INSERT INTO `red_families` \
|
||||
( fam_hash, red_page_id, beginning, ending, status, heading, \
|
||||
article0, article1, article2, article3, article4, article5, article6, \
|
||||
article7 ) VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ? );'
|
||||
|
||||
def __init__( self ):
|
||||
"""
|
||||
Creates a new instance, runs __init__ of parent class
|
||||
"""
|
||||
|
||||
super().__init__( )
|
||||
|
||||
def __del__( self ):
|
||||
pass
|
||||
|
||||
def get_fam( self, fam_hash ):
|
||||
"""
|
||||
Retrieves a red family row from MySQL-Database for given fam_hash
|
||||
|
||||
@returns dict Dictionairy with data for given fam hash
|
||||
False if none found
|
||||
"""
|
||||
self.__fam_hash = fam_hash
|
||||
|
||||
cursor = type( self ).connection.cursor( mysqldb.DictCursor )
|
||||
|
||||
cursor.execute( 'SELECT * FROM `red_families` WHERE `fam_hash` = ?;',
|
||||
( fam_hash, ) )
|
||||
self.data = cursor.fetchone()
|
||||
|
||||
def add_fam( self, articlesList, heading, red_page_id,
|
||||
beginning, ending=None, status=0 ):
|
||||
|
||||
data = [ self.__fam_hash, red_page_id, beginning, ending,
|
||||
status, heading ]
|
||||
|
||||
for article in articlesList:
|
||||
data.append( str( article ) )
|
||||
|
||||
while len( data ) < 14:
|
||||
data.append( None )
|
||||
|
||||
data = tuple( data )
|
||||
|
||||
insert_data = { self.__fam_hash: data }
|
||||
type( self )._cached_insert_data.update( insert_data )
|
||||
|
||||
# Manualy construct self.data dict
|
||||
data_keys = ( 'fam_hash', 'red_page_id', 'beginning', 'ending',
|
||||
'status', 'heading', 'article0', 'article1', 'article2',
|
||||
'article3', 'article4', 'article5', 'article6',
|
||||
'article7' )
|
||||
self.data = dict( zip( data_keys, data ) )
|
||||
|
||||
def update_fam( self, red_page_id, heading, beginning, ending, status ):
|
||||
"""
|
||||
Updates the red fam row in MySQL-Database for given fam_hash
|
||||
|
||||
@param int red_page_id MediaWiki page_id
|
||||
@param datetime beginning Timestamp of beginning
|
||||
qparam datetime ending Timestamp of ending of
|
||||
@param int status red_fam status
|
||||
"""
|
||||
|
||||
type( self )._cached_update_data.append( ( red_page_id, heading,
|
||||
beginning, ending, status,
|
||||
self.__fam_hash ) )
|
||||
|
||||
def get_by_status( self, status ):
|
||||
"""
|
||||
Generator witch fetches redFams with given status from DB
|
||||
"""
|
||||
|
||||
cursor = type( self ).connection.cursor( mysqldb.DictCursor )
|
||||
|
||||
cursor.execute( 'SELECT * FROM `red_families` WHERE `status` = ?;',
|
||||
( status, ) )
|
||||
|
||||
while True:
|
||||
res = cursor.fetchmany( 1000 )
|
||||
if not res:
|
||||
break
|
||||
for row in res:
|
||||
yield row
|
||||
|
||||
|
||||
class MysqlRedError(Exception):
|
||||
"""
|
||||
Basic Exception class for this module
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class MysqlRedConnectionError(MysqlRedError):
|
||||
"""
|
||||
Raised if there are Errors with Mysql-Connections
|
||||
"""
|
||||
pass
|
||||
559
lib/redfam.py
Normal file
559
lib/redfam.py
Normal file
@@ -0,0 +1,559 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# redfam.py
|
||||
#
|
||||
# Copyright 2015 GOLDERWEB – Jonathan Golder <jonathan@golderweb.de>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
|
||||
# MA 02110-1301, USA.
|
||||
#
|
||||
#
|
||||
"""
|
||||
Provides classes for working with RedFams
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import locale
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
import mwparserfromhell as mwparser # noqa
|
||||
import pywikibot # noqa
|
||||
from pywikibot.tools import deprecated # noqa
|
||||
|
||||
import jogobot
|
||||
from lib.mysqlred import MysqlRedFam
|
||||
|
||||
|
||||
class RedFam:
|
||||
"""
|
||||
Basic class for RedFams, containing the basic data structure
|
||||
"""
|
||||
|
||||
def __init__( self, articlesList, beginning, ending=None, red_page_id=None,
|
||||
status=0, fam_hash=None, heading=None ):
|
||||
"""
|
||||
Generates a new RedFam object
|
||||
|
||||
@param articlesList list List of articles
|
||||
@param beginning datetime Beginning date
|
||||
@param ending datetime Ending date
|
||||
@param red_page_id int MW pageid of containing RedPage
|
||||
@param status int Status of RedFam
|
||||
@param fam_hash str SHA1 hash of articlesList
|
||||
@param heading str Original heading of RedFam (Link)
|
||||
"""
|
||||
|
||||
# Initial attribute values
|
||||
self._articlesList = articlesList
|
||||
self._beginning = beginning
|
||||
self._ending = ending
|
||||
self._red_page_id = red_page_id
|
||||
self._status = status
|
||||
self._fam_hash = fam_hash
|
||||
self._heading = heading
|
||||
|
||||
# Calculates the sha1 hash over self._articlesList to
|
||||
# rediscover known redundance families
|
||||
self.calc_fam_hash()
|
||||
|
||||
def __repr__( self ):
|
||||
"""
|
||||
Returns repression str of RedFam object
|
||||
|
||||
@returns str repr() string
|
||||
"""
|
||||
|
||||
__repr = "RedFam( " + \
|
||||
"articlesList=" + repr( self._articlesList ) + \
|
||||
", heading=" + repr( self._heading ) + \
|
||||
", beginning=" + repr( self._beginning ) + \
|
||||
", ending=" + repr( self._ending ) + \
|
||||
", red_page_id=" + repr( self._red_page_id ) + \
|
||||
", status=" + repr( self._status ) + \
|
||||
", fam_hash=" + repr( self._fam_hash ) + \
|
||||
" )"
|
||||
|
||||
return __repr
|
||||
|
||||
def calc_fam_hash( self ):
|
||||
"""
|
||||
Calculates the SHA-1 hash for the articlesList of redundance family.
|
||||
Since we don't need security SHA-1 is just fine.
|
||||
|
||||
@returns str String with the hexadecimal hash digest
|
||||
"""
|
||||
|
||||
h = hashlib.sha1()
|
||||
h.update( str( self._articlesList[:8] ).encode('utf-8') )
|
||||
|
||||
if self._fam_hash and h.hexdigest() != self._fam_hash:
|
||||
raise RedFamHashError( self._fam_hash, h.hexdigest() )
|
||||
|
||||
elif self._fam_hash:
|
||||
return
|
||||
else:
|
||||
self._fam_hash = h.hexdigest()
|
||||
|
||||
@classmethod
|
||||
def flush_db_cache( cls ):
|
||||
"""
|
||||
Calls flush method of Mysql Interface class
|
||||
"""
|
||||
MysqlRedFam.flush()
|
||||
|
||||
|
||||
class RedFamParser( RedFam ):
|
||||
"""
|
||||
Provides an interface to RedFam for adding/updating redundance families
|
||||
while parsig redundance pages
|
||||
"""
|
||||
|
||||
# Define the timestamp format
|
||||
__timestamp_format = jogobot.config['redundances']['timestamp_format']
|
||||
|
||||
# Define section heading re.pattern
|
||||
__sectionhead_pat = re.compile( r"^(.*\[\[.+\]\].*\[\[.+\]\].*)" )
|
||||
|
||||
# Define timestamp re.pattern
|
||||
__timestamp_pat = re.compile( jogobot.config['redundances']
|
||||
['timestamp_regex'] )
|
||||
|
||||
# Textpattern for recognisation of done-notices
|
||||
__done_notice = ":<small>Archivierung dieses Abschnittes \
|
||||
wurde gewünscht von:"
|
||||
__done_notice2 = "{{Erledigt|"
|
||||
|
||||
def __init__( self, heading, red_page, red_page_archive,
|
||||
beginning, ending=None ):
|
||||
"""
|
||||
Creates a RedFam object based on data collected while parsing red_pages
|
||||
combined with possibly former known data from db
|
||||
|
||||
@param red_fam_heading str Wikitext heading of section
|
||||
@param red_page page Pywikibot.page object
|
||||
@param red_page_archive bool Is red_page an archive
|
||||
@param beginning datetime Timestamp of beginning
|
||||
str as strptime parseable string
|
||||
@param ending datetime Timestamp of ending
|
||||
str strptime parseable string
|
||||
"""
|
||||
|
||||
# Set object attributes:
|
||||
self._red_page_id = red_page._pageid
|
||||
self._red_page_archive = red_page_archive
|
||||
self._fam_hash = None
|
||||
|
||||
# Method self.add_beginning sets self._beginning directly
|
||||
self.add_beginning( beginning )
|
||||
|
||||
# Method self.add_ending sets self._ending directly
|
||||
if( ending ):
|
||||
self.add_ending( ending )
|
||||
else:
|
||||
# If no ending was provided set to None
|
||||
self._ending = None
|
||||
|
||||
self._status = None
|
||||
|
||||
# Parse the provided heading of redundance section
|
||||
# to set self._articlesList
|
||||
self.heading_parser( heading )
|
||||
|
||||
# Calculates the sha1 hash over self._articlesList to
|
||||
# rediscover known redundance families
|
||||
|
||||
self.calc_fam_hash()
|
||||
|
||||
# Open database connection, ask for data if existing,
|
||||
# otherwise create entry
|
||||
self.__handle_db()
|
||||
|
||||
# Check status changes
|
||||
self.status()
|
||||
|
||||
# Triggers db update if anything changed
|
||||
self.changed()
|
||||
|
||||
def __handle_db( self ):
|
||||
"""
|
||||
Handles opening of db connection
|
||||
"""
|
||||
|
||||
# We need a connection to our mysqldb
|
||||
self.__mysql = MysqlRedFam( )
|
||||
self.__mysql.get_fam( self._fam_hash )
|
||||
|
||||
if not self.__mysql.data:
|
||||
self.__mysql.add_fam( self._articlesList, self._heading,
|
||||
self._red_page_id, self._beginning,
|
||||
self._ending )
|
||||
|
||||
def heading_parser( self, heading ):
|
||||
"""
|
||||
Parses given red_fam_heading string and saves articles list
|
||||
|
||||
@param heading Heading of RedFam-Section
|
||||
@type heading wikicode or mwparser-parseable
|
||||
"""
|
||||
|
||||
# Save heading as string
|
||||
self._heading = str( heading )
|
||||
|
||||
# Parse string heading with mwparse again everytime
|
||||
# In some cases the given wikicode is broken due to syntax errors
|
||||
# (Task FS#77)
|
||||
heading = mwparser.parse( self._heading )
|
||||
|
||||
# Save destinations of wikilinks in headings
|
||||
self._articlesList = [ str( link.title ) for link
|
||||
in heading.ifilter_wikilinks() ]
|
||||
|
||||
# Catch sections with more then 8 articles, print error
|
||||
if len( self._articlesList ) > 8:
|
||||
# For repression in output we need to know the fam hash
|
||||
self.calc_fam_hash()
|
||||
jogobot.output(
|
||||
( "\03{{lightred}}" +
|
||||
"Maximum number of articles in red_fam exceeded, " +
|
||||
"maximum number is 8, {number:d} were given \n {repress}"
|
||||
).format( datetime=datetime.now().strftime(
|
||||
"%Y-%m-%d %H:%M:%S" ), number=len( self._articlesList ),
|
||||
repress=repr( self ) ),
|
||||
"WARNING" )
|
||||
|
||||
# Only save the first 8 articles
|
||||
self._articlesList = self._articlesList[:8]
|
||||
|
||||
def add_beginning( self, beginning ):
|
||||
"""
|
||||
Adds the beginning date of a redundance diskussion to the object
|
||||
|
||||
@param datetime datetime Beginning date
|
||||
"""
|
||||
|
||||
self._beginning = self.__datetime( beginning )
|
||||
|
||||
def add_ending( self, ending ):
|
||||
"""
|
||||
Adds the ending date of a redundance diskussion to the object.
|
||||
|
||||
@param datetime datetime Ending date
|
||||
"""
|
||||
|
||||
self._ending = self.__datetime( ending )
|
||||
|
||||
def __datetime( self, timestamp ):
|
||||
"""
|
||||
Decides wether given timestamp is a parseable string or a
|
||||
datetime object and returns a datetime object in both cases
|
||||
|
||||
@param datetime timestamp Datetime object
|
||||
str timestamp Parseable string with timestamp
|
||||
|
||||
@returns datetime Datetime object
|
||||
"""
|
||||
|
||||
# Make sure locale is set to 'de_DE.UTF-8' to prevent problems
|
||||
# with wrong month abreviations in strptime
|
||||
locale.setlocale(locale.LC_ALL, 'de_DE.UTF-8')
|
||||
|
||||
if( isinstance( timestamp, datetime ) ):
|
||||
return timestamp
|
||||
else:
|
||||
result = datetime.strptime( timestamp,
|
||||
type( self ).__timestamp_format )
|
||||
return result
|
||||
|
||||
def status( self ):
|
||||
"""
|
||||
Handles detection of correct status
|
||||
There are three possible stati:
|
||||
- 0 Discussion running --> no ending, page is not an archive
|
||||
- 1 Discussion over --> ending present, page is not an archive
|
||||
- 2 Discussion archived --> ending (normaly) present, page is archive
|
||||
- 3 and greater status was set by worker script, do not change it
|
||||
"""
|
||||
|
||||
# Do not change stati set by worker script etc.
|
||||
if not self.__mysql.data['status'] > 2:
|
||||
|
||||
# No ending, discussion is running:
|
||||
# Sometimes archived discussions also have no detectable ending
|
||||
if not self._ending and not self._red_page_archive:
|
||||
self._status = 0
|
||||
else:
|
||||
if not self._red_page_archive:
|
||||
self._status = 1
|
||||
else:
|
||||
self._status = 2
|
||||
else:
|
||||
self._status = self.__mysql.data[ 'status' ]
|
||||
|
||||
def changed( self ):
|
||||
"""
|
||||
Checks wether anything has changed and maybe triggers db update
|
||||
"""
|
||||
|
||||
# On archived red_fams do not delete possibly existing ending
|
||||
if( not self._ending and self._status > 1 and
|
||||
self.__mysql.data[ 'ending' ] ):
|
||||
|
||||
self._ending = self.__mysql.data[ 'ending' ]
|
||||
|
||||
# Since status change means something has changed, update database
|
||||
if( self._status != self.__mysql.data[ 'status' ] or
|
||||
self._beginning != self.__mysql.data[ 'beginning' ] or
|
||||
self._ending != self.__mysql.data[ 'ending' ] or
|
||||
self._red_page_id != self.__mysql.data[ 'red_page_id' ] or
|
||||
self._heading != self.__mysql.data[ 'heading' ]):
|
||||
|
||||
self.__mysql.update_fam( self._red_page_id, self._heading,
|
||||
self._beginning, self._ending,
|
||||
self._status )
|
||||
|
||||
@classmethod
|
||||
@deprecated
|
||||
def is_sectionheading( cls, line ):
|
||||
"""
|
||||
Checks wether given line is a red_fam section heading
|
||||
|
||||
@param str line String to check
|
||||
|
||||
@returns bool Returns True if it is a section heading
|
||||
"""
|
||||
|
||||
if cls.__sectionhead_pat.search( str(line) ):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def is_section_redfam_cb( cls, heading ):
|
||||
"""
|
||||
Used as callback for wikicode.get_sections in redpage.parse to
|
||||
select sections which are redfams
|
||||
"""
|
||||
# Because of strange behavior in some cases, parse heading again
|
||||
# (Task FS#77)
|
||||
heading = mwparser.parse( str( heading ) )
|
||||
|
||||
# Make sure we have min. two wikilinks in heading to assume a redfam
|
||||
if len( heading.filter_wikilinks() ) >= 2:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def parser( cls, text, page, isarchive=False ):
|
||||
"""
|
||||
Handles parsing of redfam section
|
||||
|
||||
@param text Text of RedFam-Section
|
||||
@type text wikicode or mwparser-parseable
|
||||
"""
|
||||
|
||||
# Parse heading with mwparse if needed
|
||||
if not isinstance( text, mwparser.wikicode.Wikicode ):
|
||||
text = mwparser.parse( text )
|
||||
|
||||
# Extract heading text
|
||||
heading = next( text.ifilter_headings() ).title
|
||||
|
||||
# Extract beginnig and maybe ending
|
||||
(beginning, ending) = RedFamParser.extract_dates( text, isarchive )
|
||||
|
||||
# Missing beginning (Task: FS#76)
|
||||
# Use first day of month of reddisc
|
||||
if not beginning:
|
||||
match = re.search(
|
||||
jogobot.config["redundances"]["reddiscs_onlyinclude_re"],
|
||||
page.title() )
|
||||
|
||||
if match:
|
||||
beginning = datetime.strptime(
|
||||
"01. {month} {year}".format(
|
||||
month=match.group(1), year=match.group(2)),
|
||||
"%d. %B %Y" )
|
||||
|
||||
# Create the RedFam object
|
||||
RedFamParser( heading, page, isarchive, beginning, ending )
|
||||
|
||||
@classmethod
|
||||
def extract_dates( cls, text, isarchive=False ):
|
||||
"""
|
||||
Returns tuple of the first and maybe last timestamp of a section.
|
||||
Last timestamp is only returned if there is a done notice or param
|
||||
*isarchiv* is set to 'True'
|
||||
|
||||
@param text Text to search in
|
||||
@type line Any Type castable to str
|
||||
@param isarchive If true skip searching done notice (on archivepages)
|
||||
@type isarchive bool
|
||||
|
||||
@returns Timestamps, otherwise None
|
||||
@returntype tuple of strs
|
||||
"""
|
||||
|
||||
# Match all timestamps
|
||||
matches = cls.__timestamp_pat.findall( str( text ) )
|
||||
if matches:
|
||||
|
||||
# First one is beginning
|
||||
# Since some timestamps are broken we need to reconstruct them
|
||||
# by regex match groups
|
||||
beginning = ( matches[0][0] + ", " + matches[0][1] + ". " +
|
||||
matches[0][2] + ". " + matches[0][3] )
|
||||
|
||||
# Last one maybe is ending
|
||||
# Done notice format 1
|
||||
# Done notice format 2
|
||||
# Or on archivepages
|
||||
if ( cls.__done_notice in text or
|
||||
cls.__done_notice2 in text or
|
||||
isarchive ):
|
||||
|
||||
ending = ( matches[-1][0] + ", " + matches[-1][1] + ". " +
|
||||
matches[-1][2] + ". " + matches[-1][3] )
|
||||
|
||||
else:
|
||||
ending = None
|
||||
# Missing dates (Task: FS#76)
|
||||
else:
|
||||
beginning = None
|
||||
ending = None
|
||||
|
||||
return (beginning, ending)
|
||||
|
||||
@classmethod
|
||||
@deprecated( 'extract_dates' )
|
||||
def is_beginning( cls, line ):
|
||||
"""
|
||||
Returns the first timestamp found in line, otherwise None
|
||||
|
||||
@param str line String to search in
|
||||
|
||||
@returns str Timestamp, otherwise None
|
||||
"""
|
||||
|
||||
return cls.extract_dates( line )[0]
|
||||
|
||||
@classmethod
|
||||
@deprecated( 'extract_dates' )
|
||||
def is_ending( cls, line, isarchive=False ):
|
||||
"""
|
||||
Returns the timestamp of done notice ( if one ), otherwise None
|
||||
|
||||
@param line String to search in
|
||||
@type line str
|
||||
@param isarchive If true skip searching done notice (on archivepages)
|
||||
@type isarchive bool
|
||||
|
||||
@returns Timestamp, otherwise None
|
||||
@returntype str
|
||||
"""
|
||||
|
||||
return cls.extract_dates( line )[1]
|
||||
|
||||
@classmethod
|
||||
@deprecated( 'extract_dates' )
|
||||
def is_ending2( cls, line ):
|
||||
"""
|
||||
Returns the last timestamp found in line, otherwise None
|
||||
@param str line String to search in
|
||||
|
||||
@returns str Timestamp, otherwise None
|
||||
"""
|
||||
|
||||
return cls.extract_dates( line, True )[1]
|
||||
|
||||
|
||||
class RedFamWorker( RedFam ):
|
||||
"""
|
||||
Handles working with redundance families stored in database
|
||||
where discussion is finished
|
||||
"""
|
||||
def __init__( self, mysql_data ):
|
||||
|
||||
articlesList = []
|
||||
for key in sorted( mysql_data.keys() ):
|
||||
if 'article' in key and mysql_data[ key ]:
|
||||
articlesList.append( mysql_data[ key ] )
|
||||
|
||||
super().__init__( articlesList, mysql_data[ 'beginning' ],
|
||||
mysql_data[ 'ending' ], mysql_data[ 'red_page_id' ],
|
||||
mysql_data[ 'status' ], mysql_data[ 'fam_hash' ],
|
||||
mysql_data[ 'heading' ] )
|
||||
|
||||
@classmethod
|
||||
def list_by_status( cls, status ):
|
||||
"""
|
||||
Lists red_fams stored in db by given status
|
||||
"""
|
||||
mysql = MysqlRedFam()
|
||||
for fam in mysql.get_by_status( status ):
|
||||
try:
|
||||
print( cls( fam ) )
|
||||
except RedFamHashError:
|
||||
print(fam)
|
||||
raise
|
||||
|
||||
|
||||
class RedFamError( Exception ):
|
||||
"""
|
||||
Base class for all Errors of RedFam-Module
|
||||
"""
|
||||
|
||||
def __init__( self, message=None ):
|
||||
"""
|
||||
Handles Instantiation of RedFamError's
|
||||
"""
|
||||
if not message:
|
||||
self.message = "An Error occured while executing a RedFam action"
|
||||
else:
|
||||
self.message = message
|
||||
|
||||
def __str__( self ):
|
||||
"""
|
||||
Output of error message
|
||||
"""
|
||||
|
||||
return self.message
|
||||
|
||||
|
||||
class RedFamHashError( RedFamError ):
|
||||
"""
|
||||
Raised when given RedFamHash does not match with calculated
|
||||
"""
|
||||
|
||||
def __init__( self, givenHash, calculatedHash ):
|
||||
|
||||
message = "Given fam_hash ('{given}') does not match with \
|
||||
calculated ('{calc}'".format( given=givenHash, calc=calculatedHash )
|
||||
|
||||
super().__init__( message )
|
||||
|
||||
|
||||
class RedFamHeadingError ( RedFamError ):
|
||||
"""
|
||||
Raised when given RedFamHeading does not match __sectionhead_pat Regex
|
||||
"""
|
||||
def __init__( self, heading ):
|
||||
|
||||
message = "Error while trying to parse section heading. Given heading \
|
||||
'{heading}' does not match RegEx".format( heading=heading )
|
||||
|
||||
super().__init__( message )
|
||||
151
lib/redpage.py
Normal file
151
lib/redpage.py
Normal file
@@ -0,0 +1,151 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# redpage.py
|
||||
#
|
||||
# Copyright 2015 GOLDERWEB – Jonathan Golder <jonathan@golderweb.de>
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
|
||||
# MA 02110-1301, USA.
|
||||
#
|
||||
#
|
||||
"""
|
||||
Provides a class for handling redundance discussion pages and archives
|
||||
"""
|
||||
|
||||
import pywikibot # noqa
|
||||
import mwparserfromhell as mwparser
|
||||
|
||||
import jogobot # noqa
|
||||
|
||||
from lib.mysqlred import MysqlRedPage
|
||||
from lib.redfam import RedFamParser
|
||||
|
||||
|
||||
class RedPage:
|
||||
"""
|
||||
Class for handling redundance discussion pages and archives
|
||||
"""
|
||||
|
||||
def __init__( self, page, archive=False ):
|
||||
"""
|
||||
Generate a new RedPage object based on the given pywikibot page object
|
||||
|
||||
@param page page Pywikibot/MediaWiki page object for page
|
||||
"""
|
||||
|
||||
# Safe the pywikibot page object
|
||||
self.page = page
|
||||
self._archive = archive
|
||||
|
||||
self.__handle_db( )
|
||||
|
||||
self.is_page_changed()
|
||||
|
||||
self._parsed = None
|
||||
|
||||
def __handle_db( self ):
|
||||
"""
|
||||
Handles opening of db connection
|
||||
"""
|
||||
|
||||
# We need a connection to our mysqldb
|
||||
self.__mysql = MysqlRedPage( self.page._pageid )
|
||||
|
||||
if not self.__mysql.data:
|
||||
self.__mysql.add_page( self.page.title(), self.page._revid )
|
||||
|
||||
def is_page_changed( self ):
|
||||
"""
|
||||
Check wether the page was changed since last run
|
||||
"""
|
||||
|
||||
if( self.__mysql.data != { 'page_id': self.page._pageid,
|
||||
'rev_id': self.page._revid,
|
||||
'page_title': self.page.title(),
|
||||
'status': self.__mysql.data[ 'status' ] } ):
|
||||
self._changed = True
|
||||
else:
|
||||
self._changed = False
|
||||
|
||||
def is_archive( self ):
|
||||
"""
|
||||
Detects wether current page is an archive of discussions
|
||||
"""
|
||||
|
||||
if( self._archive or ( u"/Archiv" in self.page.title() ) or
|
||||
( "{{Archiv}}" in self.page.text ) or
|
||||
( "{{Archiv|" in self.page.text ) ):
|
||||
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def is_parsing_needed( self ):
|
||||
"""
|
||||
Decides wether current RedPage needs to be parsed or not
|
||||
"""
|
||||
|
||||
if( self._changed or self.__mysql.data[ 'status' ] == 0 ):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def parse( self ):
|
||||
"""
|
||||
Handles the parsing process
|
||||
"""
|
||||
|
||||
# Generate Wikicode object
|
||||
self.wikicode = mwparser.parse( self.page.text )
|
||||
|
||||
# Select RedFam-sections
|
||||
# matches=Regexp or
|
||||
# function( gets heading content as wikicode as param 1)
|
||||
# include_lead = if true include first section (intro)
|
||||
# include_heading = if true include heading
|
||||
fams = self.wikicode.get_sections(
|
||||
matches=RedFamParser.is_section_redfam_cb,
|
||||
include_lead=False, include_headings=True )
|
||||
|
||||
# Iterate over RedFam
|
||||
for fam in fams:
|
||||
|
||||
yield fam
|
||||
|
||||
else:
|
||||
self._parsed = True
|
||||
self.__update_db()
|
||||
|
||||
def __update_db( self ):
|
||||
"""
|
||||
Updates the page meta data in mysql db
|
||||
"""
|
||||
if( self._parsed or not self._changed ):
|
||||
status = 1
|
||||
|
||||
if( self.is_archive() ):
|
||||
status = 2
|
||||
else:
|
||||
status = 0
|
||||
|
||||
self.__mysql.update_page( self.page._revid, self.page.title(), status )
|
||||
|
||||
@classmethod
|
||||
def flush_db_cache( cls ):
|
||||
"""
|
||||
Calls flush method of Mysql Interface class
|
||||
"""
|
||||
MysqlRedPage.flush()
|
||||
Reference in New Issue
Block a user