Files
jogobot-red/lib/redpage.py
Jonathan Golder 3fe47e666f Fix polymorphism problem with relationships
Since we are using subclasses of the ORM mapped classes, disable
typechecks for ORM relations
2017-03-09 00:12:41 +01:00

134 lines
3.7 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# redpage.py
#
# Copyright 2015 GOLDERWEB Jonathan Golder <jonathan@golderweb.de>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
"""
Provides a class for handling redundance discussion pages and archives
"""
import pywikibot # noqa
import mwparserfromhell as mwparser
import jogobot # noqa
from lib.mysqlred import MysqlRedPage
from lib.redfam import RedFamParser
class RedPage( MysqlRedPage ):
"""
Class for handling redundance discussion pages and archives
"""
def __init__( self, page=None, pageid=None, archive=False ):
"""
Generate a new RedPage object based on the given pywikibot page object
@param page Pywikibot/MediaWiki page object for page
@type page pywikibot.Page
@param pageid MW-Pageid for related page
@type pageid int
"""
# Safe the pywikibot page object
if page:
self._page = page
super().__init__(
pageid=self._page.pageid,
revid=self._page._revid,
pagetitle=self._page.title(),
status=None
)
self.is_archive()
self.session.add(self)
def update( self, page ):
self._page = page
self.revid = page._revid
self.pagetitle = page.title()
self.is_archive()
@property
def page(self):
if not hasattr(self, "_page"):
self._page = pywikibot.Page( pywikibot.Site(), self.pagetitle )
return self._page
@property
def archive(self):
self.is_archive()
return self.status.has("archive")
def is_archive( self ):
"""
Detects wether current page is an archive of discussions
"""
if( ( u"/Archiv" in self.page.title() ) or
( "{{Archiv}}" in self.page.text ) or
( "{{Archiv|" in self.page.text ) ):
self.status.add("archive")
else:
self.status.discard("archive")
def is_parsing_needed( self ):
"""
Decides wether current RedPage needs to be parsed or not
"""
return self.changedp() or not self.status.has("parsed")
def parse( self ):
"""
Handles the parsing process
"""
# Generate Wikicode object
self.wikicode = mwparser.parse( self.page.text )
# Select RedFam-sections
# matches=Regexp or
# function( gets heading content as wikicode as param 1)
# include_lead = if true include first section (intro)
# include_heading = if true include heading
fams = self.wikicode.get_sections(
matches=RedFamParser.is_section_redfam_cb,
include_lead=False, include_headings=True )
# Iterate over RedFam
for fam in fams:
yield fam
else:
self.status.add("parsed")
self._parsed = True
@classmethod
def flush_db_cache( cls ):
"""
Calls flush method of Mysql Interface class
"""
cls.session.commit()