Esempio n. 1
0
 def __init__(self, mindelay=None, maxdelay=None, writedelay=None,
              multiplydelay=True, verbosedelay=False, write=False):
     self.lock = threading.RLock()
     self.mysite = None
     self.ctrlfilename = config.datafilepath('pywikibot', 'throttle.ctrl')
     self.mindelay = mindelay
     if self.mindelay is None:
         self.mindelay = config.minthrottle
     self.maxdelay = maxdelay
     if self.maxdelay is None:
         self.maxdelay = config.maxthrottle
     self.writedelay = writedelay
     if self.writedelay is None:
         self.writedelay = config.put_throttle
     self.last_read = 0
     self.last_write = 0
     self.next_multiplicity = 1.0
     self.checkdelay = 120  # Check logfile again after this many seconds
     self.dropdelay = 360   # Ignore processes that have not made
                            # a check in this many seconds
     self.releasepid = 1200 # Free the process id after this many seconds
     self.lastwait = 0.0
     self.delay = 0
     self.checktime = 0
     self.verbosedelay = verbosedelay
     self.multiplydelay = multiplydelay
     if self.multiplydelay:
         self.checkMultiplicity()
     self.setDelay()
     self.write = write
Esempio n. 2
0
    def dump(self, filename='category.dump.bz2'):
        '''Saves the contents of the dictionaries superclassDB and catContentDB
        to disk.

        '''
        if not os.path.isabs(filename):
            filename = config.datafilepath(filename)
        if self.catContentDB or self.superclassDB:
            pywikibot.output(u'Dumping to %s, please wait...'
                             % config.shortpath(filename))
            f = bz2.BZ2File(filename, 'w')
            databases = {
                'catContentDB': self.catContentDB,
                'superclassDB': self.superclassDB
            }
            # store dump to disk in binary format
            try:
                pickle.dump(databases, f, protocol=pickle.HIGHEST_PROTOCOL)
            except pickle.PicklingError:
                pass
            f.close()
        else:
            try:
                os.remove(filename)
            except EnvironmentError:
                pass
            else:
                pywikibot.output(u'Database is empty. %s removed'
                                 % config.shortpath(filename))
Esempio n. 3
0
 def __init__(self,
              mindelay=None,
              maxdelay=None,
              writedelay=None,
              multiplydelay=True,
              verbosedelay=False,
              write=False):
     self.lock = threading.RLock()
     self.mysite = None
     self.ctrlfilename = config.datafilepath('pywikibot', 'throttle.ctrl')
     self.mindelay = mindelay
     if self.mindelay is None:
         self.mindelay = config.minthrottle
     self.maxdelay = maxdelay
     if self.maxdelay is None:
         self.maxdelay = config.maxthrottle
     self.writedelay = writedelay
     if self.writedelay is None:
         self.writedelay = config.put_throttle
     self.last_read = 0
     self.last_write = 0
     self.next_multiplicity = 1.0
     self.checkdelay = 120  # Check logfile again after this many seconds
     self.dropdelay = 360  # Ignore processes that have not made
     # a check in this many seconds
     self.releasepid = 1200  # Free the process id after this many seconds
     self.lastwait = 0.0
     self.delay = 0
     self.checktime = 0
     self.verbosedelay = verbosedelay
     self.multiplydelay = multiplydelay
     if self.multiplydelay:
         self.checkMultiplicity()
     self.setDelay()
     self.write = write
Esempio n. 4
0
    def dump(self, filename='category.dump.bz2'):
        '''Saves the contents of the dictionaries superclassDB and catContentDB
        to disk.

        '''
        if not os.path.isabs(filename):
            filename = config.datafilepath(filename)
        if self.catContentDB or self.superclassDB:
            pywikibot.output(u'Dumping to %s, please wait...' %
                             config.shortpath(filename))
            f = bz2.BZ2File(filename, 'w')
            databases = {
                'catContentDB': self.catContentDB,
                'superclassDB': self.superclassDB
            }
            # store dump to disk in binary format
            try:
                pickle.dump(databases, f, protocol=pickle.HIGHEST_PROTOCOL)
            except pickle.PicklingError:
                pass
            f.close()
        else:
            try:
                os.remove(filename)
            except EnvironmentError:
                pass
            else:
                pywikibot.output(u'Database is empty. %s removed' %
                                 config.shortpath(filename))
Esempio n. 5
0
 def __init__(self, catTitle, catDB, filename=None, maxDepth=10):
     self.catTitle = catTitle
     self.catDB = catDB
     if filename and not os.path.isabs(filename):
         filename = config.datafilepath(filename)
     self.filename = filename
     self.maxDepth = maxDepth
     self.site = pywikibot.getSite()
Esempio n. 6
0
 def __init__(self, catTitle, catDB, filename=None, maxDepth=10):
     self.catTitle = catTitle
     self.catDB = catDB
     if filename and not os.path.isabs(filename):
         filename = config.datafilepath(filename)
     self.filename = filename
     self.maxDepth = maxDepth
     self.site = pywikibot.getSite()
Esempio n. 7
0
 def __init__(self, catTitle, catDB, filename=None, maxDepth=10):
     self.catTitle = catTitle
     self.catDB = catDB
     if filename and not os.path.isabs(filename):
         filename = config.datafilepath(filename)
     self.filename = filename
     # TODO: make maxDepth changeable with a parameter or config file entry
     self.maxDepth = maxDepth
     self.site = pywikibot.getSite()
Esempio n. 8
0
def validate_family(value):
    try:
        path.append(config.datafilepath('families'))
        __import__('%s_family' % value)
    except ImportError:
        raise ValidationError(_("Family %(family)s doesn't exist.") %
            {'family': value}
        )
    finally:
        path.pop()
Esempio n. 9
0
def callme_car_main(io_loop, asyncioloop):
    callme_car.startphonenumber = 16300000000
    callme_car.asyncioloop = asyncioloop
    callme_car.io_loop = io_loop
    callme_car.host = "127.0.0.1"
    callme_car.port = 5050
    callme_car.count = 1
    callme_car.isstart = False
    #ioloop.PeriodicCallback(vloop, callme_car.interval).start() #有问题,会一次多条
    callme_car.io_loop.call_later(callme_car.interval / 1000, vloop)

    if (os.path.isfile(config.datafilepath() + "callme_sim_config.json")):
        f = open(config.datafilepath() + "callme_sim_config.json", "r")
        s = f.read()
        f.close()
        o = json.loads(s)
        callme_car.startphonenumber = o["startphonenumber"]
        callme_car.count = o["count"]
        callme_car.mappoints = o["mappoints"]

    return
Esempio n. 10
0
    def __init__(self, data, prefix="", max_size=10, cache_base='cache'):
        self.max_size = max_size
        while True:
            self.cache_path = config.datafilepath(
                cache_base, prefix + ''.join([
                    random.choice('abcdefghijklmnopqrstuvwxyz')
                    for i in xrange(16)
                ]))
            if not os.path.exists(self.cache_path): break
        self.cache_file = open(self.cache_path, 'wb+')

        lookup = [-1] * 36
        data.sort(key=lambda i: i[0])
        for key, value in data:
            if type(key) is unicode:
                key = key.encode('utf-8')
            elif type(key) != str:
                key = str(key)
            key = key.lower()
            index = key[0]
            if not ((index >= 'a' and index <= 'z') or
                    (index >= '0' and index <= '9')) or '\t' in key:
                raise RuntimeError('Only alphabetic keys are supported', key)

            if index < 'a':
                index = ord(index) - 48 + 26  # Numeric
            else:
                index = ord(index) - 97
            if lookup[index] == -1:
                lookup[index] = self.cache_file.tell()

            if type(value) is unicode:
                value = value.encode('utf-8')
            elif type(value) != str:
                value = str(value)

            if len(key) > 0xFF:
                raise RuntimeError('Key length must be smaller than %i' % 0xFF)
            if len(value) > 0xFFFFFF:
                raise RuntimeError('Value length must be smaller than %i' %
                                   0xFFFFFF)

            self.cache_file.write('%02x%s%06x%s' %
                                  (len(key), key, len(value), value))

        self.lookup = lookup

        self.cache_file.close()
        self.cache_file = open(self.cache_path, 'rb')
        self.cache_file.seek(0)
        self.cache = []
Esempio n. 11
0
    def __init__(self, data, prefix="", max_size=10, cache_base="cache"):
        self.max_size = max_size
        while True:
            self.cache_path = config.datafilepath(
                cache_base, prefix + "".join([random.choice("abcdefghijklmnopqrstuvwxyz") for i in xrange(16)])
            )
            if not os.path.exists(self.cache_path):
                break
        self.cache_file = open(self.cache_path, "wb+")

        lookup = [-1] * 36
        data.sort(key=lambda i: i[0])
        for key, value in data:
            if type(key) is unicode:
                key = key.encode("utf-8")
            elif type(key) != str:
                key = str(key)
            key = key.lower()
            index = key[0]
            if not ((index >= "a" and index <= "z") or (index >= "0" and index <= "9")) or "\t" in key:
                raise RuntimeError("Only alphabetic keys are supported", key)

            if index < "a":
                index = ord(index) - 48 + 26  # Numeric
            else:
                index = ord(index) - 97
            if lookup[index] == -1:
                lookup[index] = self.cache_file.tell()

            if type(value) is unicode:
                value = value.encode("utf-8")
            elif type(value) != str:
                value = str(value)

            if len(key) > 0xFF:
                raise RuntimeError("Key length must be smaller than %i" % 0xFF)
            if len(value) > 0xFFFFFF:
                raise RuntimeError("Value length must be smaller than %i" % 0xFFFFFF)

            self.cache_file.write("%02x%s%06x%s" % (len(key), key, len(value), value))

        self.lookup = lookup

        self.cache_file.close()
        self.cache_file = open(self.cache_path, "rb")
        self.cache_file.seek(0)
        self.cache = []
Esempio n. 12
0
    def __init__(self, data, prefix = "", max_size = 10, cache_base = 'cache'):
        self.max_size = max_size
        while True:
            self.cache_path = config.datafilepath(cache_base, prefix + ''.join(
                [random.choice('abcdefghijklmnopqrstuvwxyz')
                    for i in xrange(16)]))
            if not os.path.exists(self.cache_path): break
        self.cache_file = open(self.cache_path, 'wb+')

        lookup = [-1] * 36
        data.sort(key = lambda i: i[0])
        for key, value in data:
            if type(key) is unicode:
                key = key.encode('utf-8')
            elif type(key) != str:
                key = str(key)
            key = key.lower()
            index = key[0]
            if not ((index >= 'a' and index <= 'z') or (index >= '0' and index <= '9')) or '\t' in key:
                raise RuntimeError('Only alphabetic keys are supported', key)

            if index < 'a':
                index = ord(index) - 48 + 26 # Numeric
            else:
                index = ord(index) - 97
            if lookup[index] == -1:
                lookup[index] = self.cache_file.tell()

            if type(value) is unicode:
                value = value.encode('utf-8')
            elif type(value) != str:
                value = str(value)

            if len(key) > 0xFF:
                raise RuntimeError('Key length must be smaller than %i' % 0xFF)
            if len(value) > 0xFFFFFF:
                raise RuntimeError('Value length must be smaller than %i' % 0xFFFFFF)

            self.cache_file.write('%02x%s%06x%s' % (len(key), key, len(value), value))

        self.lookup = lookup

        self.cache_file.close()
        self.cache_file = open(self.cache_path, 'rb')
        self.cache_file.seek(0)
        self.cache = []
Esempio n. 13
0
 def __init__(self, rebuild=False, filename='category.dump.bz2'):
     if rebuild:
         self.rebuild()
     else:
         try:
             if not os.path.isabs(filename):
                 filename = config.datafilepath(filename)
             f = bz2.BZ2File(filename, 'r')
             pywikibot.output(u'Reading dump from %s'
                              % config.shortpath(filename))
             databases = pickle.load(f)
             f.close()
             # keys are categories, values are 2-tuples with lists as
             # entries.
             self.catContentDB = databases['catContentDB']
             # like the above, but for supercategories
             self.superclassDB = databases['superclassDB']
             del databases
         except:
             # If something goes wrong, just rebuild the database
             self.rebuild()
Esempio n. 14
0
 def __init__(self, rebuild=False, filename='category.dump.bz2'):
     if rebuild:
         self.rebuild()
     else:
         try:
             if not os.path.isabs(filename):
                 filename = config.datafilepath(filename)
             f = bz2.BZ2File(filename, 'r')
             pywikibot.output(u'Reading dump from %s' %
                              config.shortpath(filename))
             databases = pickle.load(f)
             f.close()
             # keys are categories, values are 2-tuples with lists as
             # entries.
             self.catContentDB = databases['catContentDB']
             # like the above, but for supercategories
             self.superclassDB = databases['superclassDB']
             del databases
         except:
             # If something goes wrong, just rebuild the database
             self.rebuild()
Esempio n. 15
0
def main():
    # Open logfile
    commandLogFilename = config.datafilepath('logs', 'translation_move.log')
    try:
        commandLogFile = codecs.open(commandLogFilename, 'a', 'utf-8')
    except IOError:
        commandLogFile = codecs.open(commandLogFilename, 'w', 'utf-8')

    site = wikipedia.getSite()
    
    categlang=False
    categproj=False
    initlist=False
    debug=0
    debugwrite=False
    startindex=None
    finishindex=None
    finalverif=None
    verbose=False
    pagename=None
    fixlink=False
    fixtitle=False
    
    for arg in wikipedia.handleArgs():
        if arg.startswith('-categlang'):
            categlang = True
            if arg.startswith('-categlang:'):
                parts=re.split(u':', arg)
                subparts=re.split(u'-', parts[1])
                startindex=int(subparts[0])
                finishindex=int(subparts[1])
        elif arg.startswith('-categproj'):
            categproj = True
        elif arg.startswith('-sublist'):
            if arg.startswith('-sublist:'):
                parts=re.split(u':', arg)
                subparts=re.split(u'-', parts[1])
                startindex=int(subparts[0])
                finishindex=int(subparts[1])
        elif arg.startswith('-initlist'):
            initlist = True
            categ = True
        elif arg.startswith('-finalverif'):
            finalverif = True
        elif arg.startswith('-debugw'):
            debug = 2
        elif arg.startswith('-debug'):
            debug = 1
        elif arg.startswith('-verb'):
            verbose = True
        elif arg.startswith('-fixlink'):
            fixlink=True
        elif arg.startswith('-fixtitle'):
            fixtitle=True
        elif arg.startswith('-page:'):
            parts=re.split(u':', arg, 1)
            pagename=parts[1]
        else:
            wikipedia.output(u'Syntax: translation_move.py [-categ[:[start]-[finish]]] [-initlist] [-debug] [-finalverif]')
            exit()
    
    # Get category
     
    if not debug==0:
        artlist=list()

        artlist.append(wikipedia.Page(site, u'Projet:Traduction/Jun Kazama'))
        #artlist.append(wikipedia.Page(site, u'Discussion:Train Protection & Warning System/Traduction'))
        #artlist.append(wikipedia.Page(site, u'Discussion:Cathédrale d\'Espoo/Traduction '))
       
        #catname = u'Catégorie:Traduction du Projet Architecture chrétienne'
        #categ=catlib.Category(site, catname)
        #artlist=categ.articlesList(True)
        #artlist=fullartlist[0:150] 
        translationGenerator=iter(artlist)        
        #translationGenerator=site.prefixindex(u'Traduction/', 102)
        #artlist=list(translationGenerator)
        commandLogFile.write(u'== Traitement de %d article(s) ==\n' % len(artlist))
        if verbose:
            wikipedia.output(u'== Traitement de %d article(s) ==' % len(artlist))
    elif categlang:
        catname = u'Traduction par langue'
        categ=catlib.Category(site, catname)
        translationGenerator=categ.articles(True)
    elif categproj:
        catname = u'Traduction par projet'
        categ=catlib.Category(site, catname)
        translationGenerator=categ.articles(True)
    elif pagename:
        artlist=list()
        artlist.append(wikipedia.Page(site, pagename))
        translationGenerator=iter(artlist)
    elif fixlink:
        artlist=get_moved_pages(site, fixtitle)
        translationGenerator=iter(artlist)
    elif fixtitle:
        catname = u'Catégorie:Page de traduction mal paramétrée'
        categ=catlib.Category(site, catname)
        translationGenerator=categ.articles(True)
    else: 
        translationGenerator=site.prefixindex(u'Traduction/', 102)
    
    allset=0
    processed=0
    total=0
    
    translationPreloadingGen=pagegenerators.PreloadingGenerator(translationGenerator, 60)
    
    index=0
    for page in translationPreloadingGen:
        if (not startindex) or (index>=startindex):
            total=total+1
            namespace=site.namespace(page.namespace())
            title=page.titleWithoutNamespace()
            if verbose:
                wikipedia.output(u'Processing : %s in %s' % (title, namespace))
            if namespace == 'Discussion':
                if re.search (u'/Traduction', title):
                    if fixtitle:
                        fix_title(page, commandLogFile, debug, verbose)
                    elif not finalverif:
                        commandLogFile.write(u'* Page déjà renommée: [[%s:%s]]\n' % (namespace, title))
                        if verbose:
                            wikipedia.output(u'* Page déjà renommée: [[%s:%s]]' % (namespace, title))
                    allset=allset+1
                else:
                    commandLogFile.write(u'* Page ignorée (namespace incorrect): [[%s:%s]]\n' % (namespace, title))
                    if verbose:
                        wikipedia.output(u'* Page ignorée (namespace incorrect): [[%s:%s]]' % (namespace, title))
            elif namespace == u'Projet':
                if re.search(u'Traduction', title):
                    shorttitle=re.sub(u'Traduction/', u'/', title)
                    correctname = False
                    if re.search(u':', shorttitle):
                        if verbose:
                            wikipedia.output(shorttitle+u' has :')
                        parts=re.split(u':', shorttitle)
                        if verbose:
                            wikipedia.output(parts[0][1:])
                        subnamespaceidx=site.getNamespaceIndex(parts[0][1:])
                        if subnamespaceidx==None or subnamespaceidx==100:
                            # Test against old namespaces
                            if parts[0][1:] == 'Discussion':
                                commandLogFile.write(u'* Page ignorée (namespace lié incorrect): [[%s:%s]]\n' % (namespace, title))
                                if verbose:
                                    wikipedia.output(u'* Page ignorée (namespace lié incorrect): [[%s:%s]]' % (namespace, title))
                            else:
                                correctname = True
                        else:
                            commandLogFile.write(u'* Page ignorée (namespace lié incorrect): [[%s:%s]]\n' % (namespace, title))
                            if verbose:
                                wikipedia.output(u'* Page ignorée (namespace lié incorrect): [[%s:%s]]' % (namespace, title))
                    elif re.search(u'\*', shorttitle):
                        commandLogFile.write(u'* Page ignorée (page de maintenance): [[%s:%s]]\n' % (namespace, title))
                        if verbose:
                            wikipedia.output(u'* Page ignorée (page de maintenance): [[%s:%s]]' % (namespace, title))
                    else:
                        correctname = True
                    
                    #if verbose:
                    #   if correctname:
                    #       wikipedia.output(u'Correctname')
                    #   else:
                    #       wikipedia.output(u'Incorrectname')
                    if correctname:                    
                        if initlist:
                            processed=processed+1
                            commandLogFile.write(u'* Page potentiellement traitée: [[%s:%s]]\n' % (namespace, title))
                            if verbose:
                                wikipedia.output(u'* Page potentiellement traitée: [[%s:%s]]' % (namespace, title))
                        else:
                            if page.isRedirectPage():
                                commandLogFile.write(u'* Page ignorée (redirect): [[%s:%s]]\n' % (namespace, title))
                                if verbose:
                                    wikipedia.output(u'* Page ignorée (redirect): [[%s:%s]]' % (namespace, title))
                            elif not page.canBeEdited():
                                commandLogFile.write(u'* Page ignorée (protection): [[%s:%s]]\n' % (namespace, title))
                                if verbose:
                                    wikipedia.output(u'* Page ignorée (protection): [[%s:%s]]' % (namespace, title))
                            elif not page.botMayEdit('AlmabotJunior'):
                                commandLogFile.write(u'* Page ignorée (nobot): [[%s:%s]]\n' % (namespace, title))
                                if verbose:
                                    wikipedia.output(u'* Page ignorée (nobot): [[%s:%s]]' % (namespace, title))
                            elif not page.exists() and not fixtitle:
                                movepage(page, commandLogFile, debug, verbose)
                            else:
                                templates=page.templates()
                                correcttemplate = False
                                for template in templates:
                                    templatetitle=template.title()
                                    if verbose:
                                        wikipedia.output(templatetitle)
                                    if (templatetitle==u'Translation/Header'):
                                        correcttemplate = True
                                    elif (templatetitle==u'Translation/Header2'):
                                        correcttemplate = True
                                    elif (templatetitle==u'Traduction/Instructions'):
                                        correcttemplate = True
                                    elif (templatetitle==u'Traduction/Suivi'):
                                        correcttemplate = True
                                if correcttemplate and not fixtitle:
                                    processed=processed+1
                                    movepage(page, commandLogFile, debug, verbose)
                                else:
                                    commandLogFile.write(u'* Page ignorée (pas de modèle): [[%s:%s]]\n' % (namespace, title))
                                    if verbose:
                                        wikipedia.output(u'* Page ignorée (pas de modèle): [[%s:%s]]' % (namespace, title))
                else:
                    commandLogFile.write(u'* Page ignorée (nom incorrect): [[%s:%s]]\n' % (namespace, title))
                    if verbose:
                        wikipedia.output(u'* Page ignorée (nom incorrect): [[%s:%s]]' % (namespace, title))
            else:
                commandLogFile.write(u'* Page ignorée (namespace incorrect): [[%s:%s]]\n' % (namespace, title))
                if verbose:
                    wikipedia.output(u'* Page ignorée (namespace incorrect): [[%s:%s]]' % (namespace, title))
        index=index+1
        if finishindex and (index>=finishindex):
            break
       
    commandLogFile.write(u'%d page(s) au total\n' % total)
    commandLogFile.write(u'%d page(s) en place\n' % allset)
    commandLogFile.write(u'%d page(s) traitées\n' % processed)
    commandLogFile.close()
Esempio n. 16
0
        ],
    },
    # These replacements will convert HTML tag from FCK-editor to wiki syntax.
    #
    'fckeditor': {
        'regex':
        True,
        'msg': {
            'en': u'Robot: Fixing rich-editor html',
            'fa': u'ربات: تصحیح اچ‌تی‌ام‌ال ویرایشگر پیشرفته',
        },
        'replacements': [
            # replace <br> with a new line
            (r'(?i)<br>', r'\n'),
            # replace &nbsp; with a space
            (r'(?i)&nbsp;', r' '),
        ],
    },
}

#
# Load the user fixes file.

import config

try:
    exec open(config.datafilepath(config.base_dir, "user-fixes.py"),
              'r').read()
except IOError:
    pass
Esempio n. 17
0
            await conn.connection.ping()

            bbb=0
            for d in data:
                vindex=int(d["vehicleid"])
                if (bbb==0):
                    bbb=1
                    s = tbl.update().where(tbl.c.vehicleid == vindex)
                    s = s.values(vehiclenumber=vnumber).values(vehiclevincode=vincode)
                    trans=await conn.begin()
                    result = await conn.execute(s)
                    await trans.commit()
                else:
                    s = tbl.delete().where(tbl.c.vehicleid == vindex)
                    trans=await conn.begin()
                    result = await conn.execute(s)
                    await trans.commit()

            print(result.rowcount)


    pass

if __name__ == '__main__':

    ServerParameters.asyncioloop = asyncio.get_event_loop()
    ServerParameters.asyncioloop.run_until_complete(ServerParameters.InitServer())
    print(config.datafilepath())
    ServerParameters.asyncioloop.run_until_complete(testsql())
    ServerParameters.asyncioloop.run_until_complete(ServerParameters.DropServer())
    print("over")
Esempio n. 18
0
            (u'www.mfa.gov.yu',              u'www.mfa.gov.rs'),
            (u'www.drzavnauprava.sr.gov.yu', u'www.drzavnauprava.gov.rs'),
        ],
    },
    # These replacements will convert HTML tag from FCK-editor to wiki syntax.
    #
    'fckeditor': {
        'regex': True,
        'msg': {
            'en': u'Robot: Fixing rich-editor html',
            'fa': u'ربات: تصحیح اچ‌تی‌ام‌ال ویرایشگر پیشرفته',
        },
        'replacements': [
            # replace <br> with a new line
            (r'(?i)<br>', r'\n'),
            # replace &nbsp; with a space
            (r'(?i)&nbsp;', r' '),
        ],
    },
}

#
# Load the user fixes file.

import config

try:
    exec open(config.datafilepath(config.base_dir, "user-fixes.py"), 'r').read()
except IOError:
    pass
Esempio n. 19
0
    def handle(self, *args, **options):
        # Set nick
        if not options['nick']:
            options['nick'] = raw_input('Nick > ')
        # Set family
        if not options['family']:
            options['family'] = raw_input('Family (default:wikipedia) > ') or 'wikipedia'
        # Set language
        if not options['language']:
            options['language'] = raw_input('Language (default:en) > ') or 'en'
        # Set index URL
        if not options['url']:
            options['url'] = raw_input('Index URL > ')

        # Set active or not
        if options['active'] is None:
            if raw_input('active [Y/n]') != 'n':
                options['active'] = True

        U = Wiki_User(
            nick=options['nick'],
            family=options['family'],
            language=options['language'],
            url=options['url'],
            comment=options['comment'],
            active=options['active'] or False
        )

        # Try to validate family
        try:
            validate_family(options['family'])
        except ValidationError:
            family_file = config.datafilepath('families')+('/%s_family') % options['family']
            logging.error("Your family file '%s' doesn't exist." % family_file)
            if raw_input('Do you want to create family file ? [Y/n] ') != 'n':
                with open(family_file, 'w') as outfile:
                    t = loader.get_template('family.py')
                    c = Context({
                        'opts': options,
                        'scriptpath': raw_input("scriptpath (default:'/wiki/') >") or '/wiki/',
                    })
                    outfile.write(t.render(context))
                    logging.info("Create family file '%s'." % family_file)

        # Try to validate user in DB
        try:
            U.full_clean()  # Test to validate fields
        except ValidationError as e:
            logging.error('Bad value(s) given for fields.')
        else:
            try:
                # Create bot dir
                bot_path = settings.BASEDIR+'/bots-config/'+U.nick+'/'
                mkdir(bot_path)
                logging.info(u"Create folder '%s'" % bot_path)
            except OSError as e:
                logging.info('Bots config file already exists')
            else:
                # Create families symlink from pwikipedia dir
                families_symlink = settings.WIKI['path']+'/families'
                symlink(families_symlink, bot_path+'families')
                logging.info(u"Create file '%s'" % families_symlink)
                # Create families symlink from pwikipedia dir
                userinterfaces_symlink = settings.WIKI['path']+'/userinterfaces'
                symlink(userinterfaces_symlink, bot_path+'userinterfaces')
                logging.info(u"Create file '%s'" % userinterfaces_symlink)

            # Launch pywikipedia's login.py
            if U.active:
                L = LoginManager()
                logging.info(u"User is set as active, trying to login")
                # Find if password file has been configured
                if not config.password_file:
                    logging.warning("Password file has not been configured. \
                      If you want automatic login please set it in '%s/config.py'." % \
                      settings.WIKI['path'])
                else:
                    # Try to see if user is in passwd file
                    user_found = False
                    passwd_file = wikipedia.config.datafilepath(config.password_file)
                    try:
                        with open(passwd_file, 'r') as f:
                            # Search user by syntax: tuple of 2 or 4
                            for line in f.readlines():
                                if not line.strip(): continue
                                entry = eval(line)
                                if len(entry) == 2:
                                    if entry[0] == U.nick: user_found = True
                                elif len(entry) == 4:
                                    if entry[2] == U.nick and \
                                        entry[0] == U.language and \
                                        entry[1] == U.family:
                                            user_found = True
                        if not user_found:
                            # Purpose to create it
                            logging.info(u"User '%s' hasn't a passwd row in '%s'." % (U.nick, passwd_file))
                            if raw_input('Do you want to appent it ? [Y/n] ') != 'n':
                                with open(passwd_file, 'a') as f:
                                    password = getpass('Password > ')
                                    line = '\n'+str((U.language, U.family, U.nick, password))
                                    f.write(line)
                    except IOError as e:
                        # Except files not exists and purpose to create
                        logging.warning("File '%s' does not exist" % passwd_file)
                        if raw_input('Do you want to create it ? [Y/n] ') != 'n':
                            with open(passwd_file, 'w') as f:
                                password = getpass('Password > ')
                                line = str((U.language, U.family, U.nick, password))
                                f.write(line)

                    # Try to see if user exists in user-config
                    user_found = False
                    user_file = wikipedia.config.datafilepath('user-config.py')
                    REG_USER_LINE = re.compile("usernames\['(.*)'\]\['(.*)'\] = u?'(.*)'")
                    try:
                        with open(user_file, 'r') as f:
                            for line in f.readlines():
                                if REG_USER_LINE.match(line):
                                    family, lang, nick = REG_USER_LINE.sub(r'\1 \2 \3', line).split()
                                    if family == U.family and lang == U.language and nick == U.nick:
                                        user_found = True
                                        break

                    except IOError as e:
                        # If file doesn't exist create it.
                        logging.warning("File '%s' does not exist" % user_file)
                        with open(user_file, 'w') as f:
                            f.write("# -*- coding: utf-8  -*-")
                            logging.warning("Create file '%s'" % user_file)
                    finally:
                        if user_found:
                            logging.info(u"User '%s' has a row in '%s'." % (U.nick, user_file))
                        else:
                            logging.info(u"User '%s' hasn't a row in '%s'." % (U.nick, user_file))
                            # Ask for add line
                            if raw_input('Do you want to append user line ? [Y/n] ') != 'n':
                                with open(user_file, 'a') as f:
                                    user_line = "\nusernames['%s']['%s'] = u'%s'" % (U.family, U.language, U.nick)
                                    f.write(user_line)
                                    # Ask for add sysops line
                                    if raw_input('Is user sysops ? [N/y] ') == 'y':
                                        sys_line = "sysopnames['%s']['%s'] = u'%s'" % (U.family, U.language, U.nick)
                                        f.write(user_line)

                # Launch login script
                L.readPassword()
                is_logged = L.login()

        U.save()
        logging.info(u"Create user '%s' in Db" % U.nick)
Esempio n. 20
0
def main():
    # Open logfile
    commandLogFilename = config.datafilepath('logs', 'translation_move.log')
    try:
        commandLogFile = codecs.open(commandLogFilename, 'a', 'utf-8')
    except IOError:
        commandLogFile = codecs.open(commandLogFilename, 'w', 'utf-8')

    site = wikipedia.getSite()

    categlang = False
    categproj = False
    initlist = False
    debug = 0
    debugwrite = False
    startindex = None
    finishindex = None
    finalverif = None
    verbose = False
    pagename = None
    fixlink = False
    fixtitle = False

    for arg in wikipedia.handleArgs():
        if arg.startswith('-categlang'):
            categlang = True
            if arg.startswith('-categlang:'):
                parts = re.split(u':', arg)
                subparts = re.split(u'-', parts[1])
                startindex = int(subparts[0])
                finishindex = int(subparts[1])
        elif arg.startswith('-categproj'):
            categproj = True
        elif arg.startswith('-sublist'):
            if arg.startswith('-sublist:'):
                parts = re.split(u':', arg)
                subparts = re.split(u'-', parts[1])
                startindex = int(subparts[0])
                finishindex = int(subparts[1])
        elif arg.startswith('-initlist'):
            initlist = True
            categ = True
        elif arg.startswith('-finalverif'):
            finalverif = True
        elif arg.startswith('-debugw'):
            debug = 2
        elif arg.startswith('-debug'):
            debug = 1
        elif arg.startswith('-verb'):
            verbose = True
        elif arg.startswith('-fixlink'):
            fixlink = True
        elif arg.startswith('-fixtitle'):
            fixtitle = True
        elif arg.startswith('-page:'):
            parts = re.split(u':', arg, 1)
            pagename = parts[1]
        else:
            wikipedia.output(
                u'Syntax: translation_move.py [-categ[:[start]-[finish]]] [-initlist] [-debug] [-finalverif]'
            )
            exit()

    # Get category

    if not debug == 0:
        artlist = list()

        artlist.append(wikipedia.Page(site, u'Projet:Traduction/Jun Kazama'))
        #artlist.append(wikipedia.Page(site, u'Discussion:Train Protection & Warning System/Traduction'))
        #artlist.append(wikipedia.Page(site, u'Discussion:Cathédrale d\'Espoo/Traduction '))

        #catname = u'Catégorie:Traduction du Projet Architecture chrétienne'
        #categ=catlib.Category(site, catname)
        #artlist=categ.articlesList(True)
        #artlist=fullartlist[0:150]
        translationGenerator = iter(artlist)
        #translationGenerator=site.prefixindex(u'Traduction/', 102)
        #artlist=list(translationGenerator)
        commandLogFile.write(u'== Traitement de %d article(s) ==\n' %
                             len(artlist))
        if verbose:
            wikipedia.output(u'== Traitement de %d article(s) ==' %
                             len(artlist))
    elif categlang:
        catname = u'Traduction par langue'
        categ = catlib.Category(site, catname)
        translationGenerator = categ.articles(True)
    elif categproj:
        catname = u'Traduction par projet'
        categ = catlib.Category(site, catname)
        translationGenerator = categ.articles(True)
    elif pagename:
        artlist = list()
        artlist.append(wikipedia.Page(site, pagename))
        translationGenerator = iter(artlist)
    elif fixlink:
        artlist = get_moved_pages(site, fixtitle)
        translationGenerator = iter(artlist)
    elif fixtitle:
        catname = u'Catégorie:Page de traduction mal paramétrée'
        categ = catlib.Category(site, catname)
        translationGenerator = categ.articles(True)
    else:
        translationGenerator = site.prefixindex(u'Traduction/', 102)

    allset = 0
    processed = 0
    total = 0

    translationPreloadingGen = pagegenerators.PreloadingGenerator(
        translationGenerator, 60)

    index = 0
    for page in translationPreloadingGen:
        if (not startindex) or (index >= startindex):
            total = total + 1
            namespace = site.namespace(page.namespace())
            title = page.titleWithoutNamespace()
            if verbose:
                wikipedia.output(u'Processing : %s in %s' % (title, namespace))
            if namespace == 'Discussion':
                if re.search(u'/Traduction', title):
                    if fixtitle:
                        fix_title(page, commandLogFile, debug, verbose)
                    elif not finalverif:
                        commandLogFile.write(
                            u'* Page déjà renommée: [[%s:%s]]\n' %
                            (namespace, title))
                        if verbose:
                            wikipedia.output(
                                u'* Page déjà renommée: [[%s:%s]]' %
                                (namespace, title))
                    allset = allset + 1
                else:
                    commandLogFile.write(
                        u'* Page ignorée (namespace incorrect): [[%s:%s]]\n' %
                        (namespace, title))
                    if verbose:
                        wikipedia.output(
                            u'* Page ignorée (namespace incorrect): [[%s:%s]]'
                            % (namespace, title))
            elif namespace == u'Projet':
                if re.search(u'Traduction', title):
                    shorttitle = re.sub(u'Traduction/', u'/', title)
                    correctname = False
                    if re.search(u':', shorttitle):
                        if verbose:
                            wikipedia.output(shorttitle + u' has :')
                        parts = re.split(u':', shorttitle)
                        if verbose:
                            wikipedia.output(parts[0][1:])
                        subnamespaceidx = site.getNamespaceIndex(parts[0][1:])
                        if subnamespaceidx == None or subnamespaceidx == 100:
                            # Test against old namespaces
                            if parts[0][1:] == 'Discussion':
                                commandLogFile.write(
                                    u'* Page ignorée (namespace lié incorrect): [[%s:%s]]\n'
                                    % (namespace, title))
                                if verbose:
                                    wikipedia.output(
                                        u'* Page ignorée (namespace lié incorrect): [[%s:%s]]'
                                        % (namespace, title))
                            else:
                                correctname = True
                        else:
                            commandLogFile.write(
                                u'* Page ignorée (namespace lié incorrect): [[%s:%s]]\n'
                                % (namespace, title))
                            if verbose:
                                wikipedia.output(
                                    u'* Page ignorée (namespace lié incorrect): [[%s:%s]]'
                                    % (namespace, title))
                    elif re.search(u'\*', shorttitle):
                        commandLogFile.write(
                            u'* Page ignorée (page de maintenance): [[%s:%s]]\n'
                            % (namespace, title))
                        if verbose:
                            wikipedia.output(
                                u'* Page ignorée (page de maintenance): [[%s:%s]]'
                                % (namespace, title))
                    else:
                        correctname = True

                    #if verbose:
                    #   if correctname:
                    #       wikipedia.output(u'Correctname')
                    #   else:
                    #       wikipedia.output(u'Incorrectname')
                    if correctname:
                        if initlist:
                            processed = processed + 1
                            commandLogFile.write(
                                u'* Page potentiellement traitée: [[%s:%s]]\n'
                                % (namespace, title))
                            if verbose:
                                wikipedia.output(
                                    u'* Page potentiellement traitée: [[%s:%s]]'
                                    % (namespace, title))
                        else:
                            if page.isRedirectPage():
                                commandLogFile.write(
                                    u'* Page ignorée (redirect): [[%s:%s]]\n' %
                                    (namespace, title))
                                if verbose:
                                    wikipedia.output(
                                        u'* Page ignorée (redirect): [[%s:%s]]'
                                        % (namespace, title))
                            elif not page.canBeEdited():
                                commandLogFile.write(
                                    u'* Page ignorée (protection): [[%s:%s]]\n'
                                    % (namespace, title))
                                if verbose:
                                    wikipedia.output(
                                        u'* Page ignorée (protection): [[%s:%s]]'
                                        % (namespace, title))
                            elif not page.botMayEdit('AlmabotJunior'):
                                commandLogFile.write(
                                    u'* Page ignorée (nobot): [[%s:%s]]\n' %
                                    (namespace, title))
                                if verbose:
                                    wikipedia.output(
                                        u'* Page ignorée (nobot): [[%s:%s]]' %
                                        (namespace, title))
                            elif not page.exists() and not fixtitle:
                                movepage(page, commandLogFile, debug, verbose)
                            else:
                                templates = page.templates()
                                correcttemplate = False
                                for template in templates:
                                    templatetitle = template.title()
                                    if verbose:
                                        wikipedia.output(templatetitle)
                                    if (templatetitle == u'Translation/Header'
                                        ):
                                        correcttemplate = True
                                    elif (templatetitle ==
                                          u'Translation/Header2'):
                                        correcttemplate = True
                                    elif (templatetitle ==
                                          u'Traduction/Instructions'):
                                        correcttemplate = True
                                    elif (templatetitle == u'Traduction/Suivi'
                                          ):
                                        correcttemplate = True
                                if correcttemplate and not fixtitle:
                                    processed = processed + 1
                                    movepage(page, commandLogFile, debug,
                                             verbose)
                                else:
                                    commandLogFile.write(
                                        u'* Page ignorée (pas de modèle): [[%s:%s]]\n'
                                        % (namespace, title))
                                    if verbose:
                                        wikipedia.output(
                                            u'* Page ignorée (pas de modèle): [[%s:%s]]'
                                            % (namespace, title))
                else:
                    commandLogFile.write(
                        u'* Page ignorée (nom incorrect): [[%s:%s]]\n' %
                        (namespace, title))
                    if verbose:
                        wikipedia.output(
                            u'* Page ignorée (nom incorrect): [[%s:%s]]' %
                            (namespace, title))
            else:
                commandLogFile.write(
                    u'* Page ignorée (namespace incorrect): [[%s:%s]]\n' %
                    (namespace, title))
                if verbose:
                    wikipedia.output(
                        u'* Page ignorée (namespace incorrect): [[%s:%s]]' %
                        (namespace, title))
        index = index + 1
        if finishindex and (index >= finishindex):
            break

    commandLogFile.write(u'%d page(s) au total\n' % total)
    commandLogFile.write(u'%d page(s) en place\n' % allset)
    commandLogFile.write(u'%d page(s) traitées\n' % processed)
    commandLogFile.close()
Esempio n. 21
0
            (u'www.mfa.gov.yu',              u'www.mfa.gov.rs'),
            (u'www.drzavnauprava.sr.gov.yu', u'www.drzavnauprava.gov.rs'),
        ],
    },
    # These replacements will convert HTML tag from FCK-editor to wiki syntax.
    #
    'fckeditor': {
        'regex': True,
        'msg': {
            'en': u'Robot: Fixing rich-editor html',
            'fa': u'ربات: تصحیح اچ‌تی‌ام‌ال ویرایشگر پیشرفته',
         },
         'replacements': [
            # replace <br> with a new line
            (r'(?i)<br>',                      r'\n'),
            # replace &nbsp; with a space
            (r'(?i)&nbsp;',                      r' '),
        ],
    },
}

#
# Load the user fixes file.

import config

try:
    execfile(config.datafilepath(config.base_dir, "user-fixes.py"))
except IOError:
    pass
Esempio n. 22
0
            (u'www.drzavnauprava.sr.gov.yu', u'www.drzavnauprava.gov.rs'),
        ],
    },
    # These replacements will convert HTML tag from FCK-editor to wiki syntax.
    #
    'fckeditor': {
        'regex':
        True,
        'msg': {
            'en': u'Robot: Fixing rich-editor html',
            'fa': u'ربات: تصحیح اچ‌تی‌ام‌ال ویرایشگر پیشرفته',
        },
        'replacements': [
            # replace <br> with a new line
            (r'(?i)<br>', r'\n'),
            # replace &nbsp; with a space
            (r'(?i)&nbsp;', r' '),
        ],
    },
}

#
# Load the user fixes file.

import config

try:
    execfile(config.datafilepath(config.base_dir, "user-fixes.py"))
except IOError:
    pass