Exemple #1
0
    def rename_file(self, ofilename, issue=None, annualize=None, arc=False, file_format=None): #comicname, issue, comicyear=None, issueid=None)
            comicid = self.comicid   # it's coming in unicoded...
            issueid = self.issueid

            if file_format is None:
                file_format = mylar.CONFIG.FILE_FORMAT

            logger.fdebug(type(comicid))
            logger.fdebug(type(issueid))
            logger.fdebug('comicid: %s' % comicid)
            logger.fdebug('issue# as per cv: %s' % issue)
            logger.fdebug('issueid:' + str(issueid))

            if issueid is None:
                logger.fdebug('annualize is ' + str(annualize))
                if arc:
                    #this has to be adjusted to be able to include story arc issues that span multiple arcs
                    chkissue = self.myDB.selectone("SELECT * from storyarcs WHERE ComicID=? AND Issue_Number=?", [comicid, issue]).fetchone()
                else:
                    chkissue = self.myDB.selectone("SELECT * from issues WHERE ComicID=? AND Issue_Number=?", [comicid, issue]).fetchone()
                    if all([chkissue is None, annualize is None, not mylar.CONFIG.ANNUALS_ON]):
                        chkissue = self.myDB.selectone("SELECT * from annuals WHERE ComicID=? AND Issue_Number=?", [comicid, issue]).fetchone()

                if chkissue is None:
                    #rechk chkissue against int value of issue #
                    if arc:
                        chkissue = self.myDB.selectone("SELECT * from storyarcs WHERE ComicID=? AND Int_IssueNumber=?", [comicid, issuedigits(issue)]).fetchone()
                    else:
                        chkissue = self.myDB.selectone("SELECT * from issues WHERE ComicID=? AND Int_IssueNumber=?", [comicid, issuedigits(issue)]).fetchone()
                        if all([chkissue is None, annualize == 'yes', mylar.CONFIG.ANNUALS_ON]):
                            chkissue = self.myDB.selectone("SELECT * from annuals WHERE ComicID=? AND Int_IssueNumber=?", [comicid, issuedigits(issue)]).fetchone()

                    if chkissue is None:
                        logger.error('Invalid Issue_Number - please validate.')
                        return
                    else:
                        logger.info('Int Issue_number compare found. continuing...')
                        issueid = chkissue['IssueID']
                else:
                    issueid = chkissue['IssueID']

            #use issueid to get publisher, series, year, issue number
            logger.fdebug('issueid is now : ' + str(issueid))
            if arc:
                issueinfo = self.myDB.selectone("SELECT * from storyarcs WHERE ComicID=? AND IssueID=? AND StoryArc=?", [comicid, issueid, arc]).fetchone()
            else:
                issueinfo = self.myDB.selectone("SELECT * from issues WHERE ComicID=? AND IssueID=?", [comicid, issueid]).fetchone()
                if issueinfo is None:
                    logger.fdebug('not an issue, checking against annuals')
                    issueinfo = self.myDB.selectone("SELECT * from annuals WHERE ComicID=? AND IssueID=?", [comicid, issueid]).fetchone()
                    if issueinfo is None:
                        logger.fdebug('Unable to rename - cannot locate issue id within db')
                        return
                    else:
                        annualize = True

            if issueinfo is None:
                logger.fdebug('Unable to rename - cannot locate issue id within db')
                return

            #remap the variables to a common factor.
            if arc:
                issuenum = issueinfo['IssueNumber']
                issuedate = issueinfo['IssueDate']
                publisher = issueinfo['IssuePublisher']
                series = issueinfo['ComicName']
                seriesfilename = series   #Alternate FileNaming is not available with story arcs.
                seriesyear = issueinfo['SeriesYear']
                arcdir = helpers.filesafe(issueinfo['StoryArc'])
                if mylar.CONFIG.REPLACE_SPACES:
                    arcdir = arcdir.replace(' ', mylar.CONFIG.REPLACE_CHAR)
                if mylar.CONFIG.STORYARCDIR:
                    storyarcd = os.path.join(mylar.CONFIG.DESTINATION_DIR, "StoryArcs", arcdir)
                    logger.fdebug('Story Arc Directory set to : ' + storyarcd)
                else:
                    logger.fdebug('Story Arc Directory set to : ' + mylar.CONFIG.GRABBAG_DIR)
                    storyarcd = os.path.join(mylar.CONFIG.DESTINATION_DIR, mylar.CONFIG.GRABBAG_DIR)

                comlocation = storyarcd
                comversion = None   #need to populate this.

            else:
                issuenum = issueinfo['Issue_Number']
                issuedate = issueinfo['IssueDate']
                publisher = self.comic['ComicPublisher']
                series = self.comic['ComicName']
                if self.comic['AlternateFileName'] is None or self.comic['AlternateFileName'] == 'None':
                    seriesfilename = series
                else:
                    seriesfilename = self.comic['AlternateFileName']
                    logger.fdebug('Alternate File Naming has been enabled for this series. Will rename series title to : ' + seriesfilename)
                seriesyear = self.comic['ComicYear']
                comlocation = self.comic['ComicLocation']
                comversion = self.comic['ComicVersion']

            unicodeissue = issuenum

            if type(issuenum) == str:
               vals = {'\xbd':'.5','\xbc':'.25','\xbe':'.75','\u221e':'9999999999','\xe2':'9999999999'}
            else:
               vals = {'\xbd':'.5','\xbc':'.25','\xbe':'.75','\\u221e':'9999999999','\xe2':'9999999999'}
            x = [vals[key] for key in vals if key in issuenum]
            if x:
                issuenum = x[0]
                logger.fdebug('issue number formatted: %s' % issuenum)

            #comicid = issueinfo['ComicID']
            #issueno = str(issuenum).split('.')[0]
            issue_except = 'None'
            issue_exceptions = ['AU',
                                'INH',
                                'NOW',
                                'AI',
                                'MU',
                                'A',
                                'B',
                                'C',
                                'X',
                                'O']
            valid_spaces = ('.', '-')
            for issexcept in issue_exceptions:
                if issexcept.lower() in issuenum.lower():
                    logger.fdebug('ALPHANUMERIC EXCEPTION : [' + issexcept + ']')
                    v_chk = [v for v in valid_spaces if v in issuenum]
                    if v_chk:
                        iss_space = v_chk[0]
                        logger.fdebug('character space denoted as : ' + iss_space)
                    else:
                        logger.fdebug('character space not denoted.')
                        iss_space = ''
#                    if issexcept == 'INH':
#                       issue_except = '.INH'
                    if issexcept == 'NOW':
                       if '!' in issuenum: issuenum = re.sub('\!', '', issuenum)
#                       issue_except = '.NOW'

                    issue_except = iss_space + issexcept
                    logger.fdebug('issue_except denoted as : ' + issue_except)
                    issuenum = re.sub("[^0-9]", "", issuenum)
                    break

#            if 'au' in issuenum.lower() and issuenum[:1].isdigit():
#                issue_except = ' AU'
#            elif 'ai' in issuenum.lower() and issuenum[:1].isdigit():
#                issuenum = re.sub("[^0-9]", "", issuenum)
#                issue_except = ' AI'
#            elif 'inh' in issuenum.lower() and issuenum[:1].isdigit():
#                issuenum = re.sub("[^0-9]", "", issuenum)
#                issue_except = '.INH'
#            elif 'now' in issuenum.lower() and issuenum[:1].isdigit():
#                if '!' in issuenum: issuenum = re.sub('\!', '', issuenum)
#                issuenum = re.sub("[^0-9]", "", issuenum)
#                issue_except = '.NOW'
            if '.' in issuenum:
                iss_find = issuenum.find('.')
                iss_b4dec = issuenum[:iss_find]
                if iss_find == 0:
                    iss_b4dec = '0'
                iss_decval = issuenum[iss_find +1:]
                if iss_decval.endswith('.'):
                    iss_decval = iss_decval[:-1]
                if int(iss_decval) == 0:
                    iss = iss_b4dec
                    issdec = int(iss_decval)
                    issueno = iss
                else:
                    if len(iss_decval) == 1:
                        iss = iss_b4dec + "." + iss_decval
                        issdec = int(iss_decval) * 10
                    else:
                        iss = iss_b4dec + "." + iss_decval.rstrip('0')
                        issdec = int(iss_decval.rstrip('0')) * 10
                    issueno = iss_b4dec
            else:
                iss = issuenum
                issueno = iss
            # issue zero-suppression here
            if mylar.CONFIG.ZERO_LEVEL == "0":
                zeroadd = ""
            else:
                if mylar.CONFIG.ZERO_LEVEL_N  == "none": zeroadd = ""
                elif mylar.CONFIG.ZERO_LEVEL_N == "0x": zeroadd = "0"
                elif mylar.CONFIG.ZERO_LEVEL_N == "00x": zeroadd = "00"

            logger.fdebug('Zero Suppression set to : ' + str(mylar.CONFIG.ZERO_LEVEL_N))
            prettycomiss = None

            if issueno.isalpha():
                logger.fdebug('issue detected as an alpha.')
                prettycomiss = str(issueno)
            else:
                try:
                    x = float(issuenum)
                    #validity check
                    if x < 0:
                        logger.info('I\'ve encountered a negative issue #: %s. Trying to accomodate.' % issueno)
                        prettycomiss = '-' + str(zeroadd) + str(issueno[1:])
                    elif x == 9999999999:
                        logger.fdebug('Infinity issue found.')
                        issuenum = 'infinity'
                    elif x >= 0:
                        pass
                    else:
                        raise ValueError
                except ValueError as e:
                    logger.warn('Unable to properly determine issue number [ %s] - you should probably log this on github for help.' % issueno)
                    return

            if prettycomiss is None and len(str(issueno)) > 0:
                #if int(issueno) < 0:
                #    self._log("issue detected is a negative")
                #    prettycomiss = '-' + str(zeroadd) + str(abs(issueno))
                if int(issueno) < 10:
                    logger.fdebug('issue detected less than 10')
                    if '.' in iss:
                        if int(iss_decval) > 0:
                            issueno = str(iss)
                            prettycomiss = str(zeroadd) + str(iss)
                        else:
                            prettycomiss = str(zeroadd) + str(int(issueno))
                    else:
                        prettycomiss = str(zeroadd) + str(iss)
                    if issue_except != 'None':
                        prettycomiss = str(prettycomiss) + issue_except
                    logger.fdebug('Zero level supplement set to ' + str(mylar.CONFIG.ZERO_LEVEL_N) + '. Issue will be set as : ' + str(prettycomiss))
                elif int(issueno) >= 10 and int(issueno) < 100:
                    logger.fdebug('issue detected greater than 10, but less than 100')
                    if mylar.CONFIG.ZERO_LEVEL_N == "none":
                        zeroadd = ""
                    else:
                        zeroadd = "0"
                    if '.' in iss:
                        if int(iss_decval) > 0:
                            issueno = str(iss)
                            prettycomiss = str(zeroadd) + str(iss)
                        else:
                           prettycomiss = str(zeroadd) + str(int(issueno))
                    else:
                        prettycomiss = str(zeroadd) + str(iss)
                    if issue_except != 'None':
                        prettycomiss = str(prettycomiss) + issue_except
                    logger.fdebug('Zero level supplement set to ' + str(mylar.CONFIG.ZERO_LEVEL_N) + '.Issue will be set as : ' + str(prettycomiss))
                else:
                    logger.fdebug('issue detected greater than 100')
                    if issuenum == 'infinity':
                        prettycomiss = 'infinity'
                    else:
                        if '.' in iss:
                            if int(iss_decval) > 0:
                                issueno = str(iss)
                        prettycomiss = str(issueno)
                    if issue_except != 'None':
                        prettycomiss = str(prettycomiss) + issue_except
                    logger.fdebug('Zero level supplement set to ' + str(mylar.CONFIG.ZERO_LEVEL_N) + '. Issue will be set as : ' + str(prettycomiss))
            elif len(str(issueno)) == 0:
                prettycomiss = str(issueno)
                logger.fdebug('issue length error - cannot determine length. Defaulting to None:  ' + str(prettycomiss))

            logger.fdebug('Pretty Comic Issue is : ' + str(prettycomiss))
            if mylar.CONFIG.UNICODE_ISSUENUMBER:
                logger.fdebug('Setting this to Unicode format as requested: %s' % prettycomiss)
                prettycomiss = unicodeissue

            issueyear = issuedate[:4]
            month = issuedate[5:7].replace('-', '').strip()
            month_name = helpers.fullmonth(month)
            if month_name is None:
                month_name = 'None'
            logger.fdebug('Issue Year : ' + str(issueyear))
            logger.fdebug('Publisher: ' + publisher)
            logger.fdebug('Series: ' + series)
            logger.fdebug('Year: '  + str(seriesyear))
            logger.fdebug('Comic Location: ' + comlocation)

            if self.comic['Corrected_Type'] is not None:
                if self.comic['Type'] != self.comic['Corrected_Type']:
                    booktype = self.comic['Corrected_Type']
                else:
                    booktype = self.comic['Type']
            else:
                booktype = self.comic['Type']

            if booktype == 'Print' or all([booktype != 'Print', mylar.CONFIG.FORMAT_BOOKTYPE is False]):
                chunk_fb = re.sub('\$Type', '', file_format)
                chunk_b = re.compile(r'\s+')
                chunk_file_format = chunk_b.sub(' ', chunk_fb)
            else:
                chunk_file_format = file_format

            if any([comversion is None, booktype != 'Print']):
                comversion = 'None'

            #if comversion is None, remove it so it doesn't populate with 'None'
            if comversion == 'None':
                chunk_f_f = re.sub('\$VolumeN', '', chunk_file_format)
                chunk_f = re.compile(r'\s+')
                chunk_file_format = chunk_f.sub(' ', chunk_f_f)
                logger.fdebug('No version # found for series, removing from filename')
                logger.fdebug("new format: " + str(chunk_file_format))

            if annualize is None:
                chunk_f_f = re.sub('\$Annual', '', chunk_file_format)
                chunk_f = re.compile(r'\s+')
                chunk_file_format = chunk_f.sub(' ', chunk_f_f)
                logger.fdebug('not an annual - removing from filename paramaters')
                logger.fdebug('new format: ' + str(chunk_file_format))

            else:
                logger.fdebug('chunk_file_format is: ' + str(chunk_file_format))
                if mylar.CONFIG.ANNUALS_ON:
                    if 'annual' in series.lower():
                        if '$Annual' not in chunk_file_format: # and 'annual' not in ofilename.lower():
                        #if it's an annual, but $annual isn't specified in file_format, we need to
                        #force it in there, by default in the format of $Annual $Issue
                            #prettycomiss = "Annual " + str(prettycomiss)
                            logger.fdebug('[%s][ANNUALS-ON][ANNUAL IN SERIES][NO ANNUAL FORMAT] prettycomiss: %s' % (series, prettycomiss))
                        else:
                            #because it exists within title, strip it then use formatting tag for placement of wording.
                            chunk_f_f = re.sub('\$Annual', '', chunk_file_format)
                            chunk_f = re.compile(r'\s+')
                            chunk_file_format = chunk_f.sub(' ', chunk_f_f)
                            logger.fdebug('[%s][ANNUALS-ON][ANNUAL IN SERIES][ANNUAL FORMAT] prettycomiss: %s' % (series, prettycomiss))
                    else:
                        if '$Annual' not in chunk_file_format: # and 'annual' not in ofilename.lower():
                        #if it's an annual, but $annual isn't specified in file_format, we need to
                        #force it in there, by default in the format of $Annual $Issue
                            prettycomiss = "Annual %s" % prettycomiss
                            logger.fdebug('[%s][ANNUALS-ON][ANNUAL NOT IN SERIES][NO ANNUAL FORMAT] prettycomiss: %s' % (series, prettycomiss))
                        else:
                            logger.fdebug('[%s][ANNUALS-ON][ANNUAL NOT IN SERIES][ANNUAL FORMAT] prettycomiss: %s' % (series, prettycomiss))

                else:
                    #if annuals aren't enabled, then annuals are being tracked as independent series.
                    #annualize will be true since it's an annual in the seriesname.
                    if 'annual' in series.lower():
                        if '$Annual' not in chunk_file_format: # and 'annual' not in ofilename.lower():
                        #if it's an annual, but $annual isn't specified in file_format, we need to
                        #force it in there, by default in the format of $Annual $Issue
                            #prettycomiss = "Annual " + str(prettycomiss)
                            logger.fdebug('[%s][ANNUALS-OFF][ANNUAL IN SERIES][NO ANNUAL FORMAT] prettycomiss: %s' % (series, prettycomiss))
                        else:
                            #because it exists within title, strip it then use formatting tag for placement of wording.
                            chunk_f_f = re.sub('\$Annual', '', chunk_file_format)
                            chunk_f = re.compile(r'\s+')
                            chunk_file_format = chunk_f.sub(' ', chunk_f_f)
                            logger.fdebug('[%s][ANNUALS-OFF][ANNUAL IN SERIES][ANNUAL FORMAT] prettycomiss: %s' % (series, prettycomiss))
                    else:
                        if '$Annual' not in chunk_file_format: # and 'annual' not in ofilename.lower():
                            #if it's an annual, but $annual isn't specified in file_format, we need to
                            #force it in there, by default in the format of $Annual $Issue
                            prettycomiss = "Annual %s" % prettycomiss
                            logger.fdebug('[%s][ANNUALS-OFF][ANNUAL NOT IN SERIES][NO ANNUAL FORMAT] prettycomiss: %s' % (series, prettycomiss))
                        else:
                            logger.fdebug('[%s][ANNUALS-OFF][ANNUAL NOT IN SERIES][ANNUAL FORMAT] prettycomiss: %s' % (series, prettycomiss))


                    logger.fdebug('Annual detected within series title of ' + series + '. Not auto-correcting issue #')

            seriesfilename = seriesfilename #.encode('ascii', 'ignore').strip()
            filebad = [':', ',', '/', '?', '!', '\'', '\"', '\*'] #in u_comicname or '/' in u_comicname or ',' in u_comicname or '?' in u_comicname:
            for dbd in filebad:
                if dbd in seriesfilename:
                    if any([dbd == '/', dbd == '*']): 
                        repthechar = '-'
                    else:
                        repthechar = ''
                    seriesfilename = seriesfilename.replace(dbd, repthechar)
                    logger.fdebug('Altering series name due to filenaming restrictions: ' + seriesfilename)

            publisher = re.sub('!', '', publisher)

            file_values = {'$Series':    seriesfilename,
                           '$Issue':     prettycomiss,
                           '$Year':      issueyear,
                           '$series':    series.lower(),
                           '$Publisher': publisher,
                           '$publisher': publisher.lower(),
                           '$VolumeY':   'V' + str(seriesyear),
                           '$VolumeN':   comversion,
                           '$monthname': month_name,
                           '$month':     month,
                           '$Annual':    'Annual',
                           '$Type':      booktype
                          }

            extensions = ('.cbr', '.cbz', '.cb7')

            if ofilename.lower().endswith(extensions):
                path, ext = os.path.splitext(ofilename)

            if file_format == '':
                logger.fdebug('Rename Files is not enabled - keeping original filename.')
                #check if extension is in nzb_name - will screw up otherwise
                if ofilename.lower().endswith(extensions):
                    nfilename = ofilename[:-4]
                else:
                    nfilename = ofilename
            else:
                chunk_file_format = re.sub('[()|[]]', '', chunk_file_format).strip()
                nfilename = helpers.replace_all(chunk_file_format, file_values)
                if mylar.CONFIG.REPLACE_SPACES:
                    #mylar.CONFIG.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
                    nfilename = nfilename.replace(' ', mylar.CONFIG.REPLACE_CHAR)

            nfilename = re.sub('[\,\:]', '', nfilename) + ext.lower()
            logger.fdebug('New Filename: ' + nfilename)

            if mylar.CONFIG.LOWERCASE_FILENAMES:
                nfilename = nfilename.lower()
                dst = os.path.join(comlocation, nfilename)
            else:
                dst = os.path.join(comlocation, nfilename)

            logger.fdebug('Source: ' + ofilename)
            logger.fdebug('Destination: ' + dst)

            rename_this = {"destination_dir": dst,
                           "nfilename": nfilename,
                           "issueid": issueid,
                           "comicid": comicid}

            return rename_this
Exemple #2
0
    def folder_create(self, booktype=None):
        # dictionary needs to passed called comic with {'ComicPublisher', 'CorrectedType, 'Type', 'ComicYear', 'ComicName', 'ComicVersion'}
        # or pass in comicid value from __init__

        # setup default location here
        u_comicnm = self.comic['ComicName']
        # let's remove the non-standard characters here that will break filenaming / searching.
        comicname_filesafe = helpers.filesafe(u_comicnm)
        comicdir = comicname_filesafe

        series = comicdir
        if series[-1:] == '.':
            series[:-1]

        publisher = re.sub('!', '', self.comic['ComicPublisher']) # thanks Boom!
        publisher = helpers.filesafe(publisher)

        if booktype is not None:
            if self.comic['Corrected_Type'] is not None:
                booktype = self.comic['Corrected_Type']
            else:
                booktype = booktype
        else:
            booktype = self.comic['Type']

        if any([booktype is None, booktype == 'None', booktype == 'Print']) or all([booktype != 'Print', mylar.CONFIG.FORMAT_BOOKTYPE is False]):
            chunk_fb = re.sub('\$Type', '', mylar.CONFIG.FOLDER_FORMAT)
            chunk_b = re.compile(r'\s+')
            chunk_folder_format = chunk_b.sub(' ', chunk_fb)
        else:
            chunk_folder_format = mylar.CONFIG.FOLDER_FORMAT

        if any([self.comic['ComicVersion'] is None, booktype != 'Print']):
            comicVol = 'None'
        else:
            comicVol = self.comic['ComicVersion']

        #if comversion is None, remove it so it doesn't populate with 'None'
        if comicVol == 'None':
            chunk_f_f = re.sub('\$VolumeN', '', chunk_folder_format)
            chunk_f = re.compile(r'\s+')
            chunk_folder_format = chunk_f.sub(' ', chunk_f_f)
            logger.fdebug('No version # found for series, removing from folder format')
            logger.fdebug("new folder format: " + str(chunk_folder_format))

        #do work to generate folder path
        values = {'$Series':        series,
                  '$Publisher':     publisher,
                  '$Year':          self.comic['ComicYear'],
                  '$series':        series.lower(),
                  '$publisher':     publisher.lower(),
                  '$VolumeY':       'V' + self.comic['ComicYear'],
                  '$VolumeN':       comicVol.upper(),
                  '$Annual':        'Annual',
                  '$Type':          booktype
                  }
        try:
            if mylar.CONFIG.FOLDER_FORMAT == '':
                comlocation = os.path.join(mylar.CONFIG.DESTINATION_DIR, comicdir, " (" + comic['SeriesYear'] + ")")
            else:
                chunk_folder_format = re.sub('[()|[]]', '', chunk_folder_format).strip()
                comlocation = os.path.join(mylar.CONFIG.DESTINATION_DIR, helpers.replace_all(chunk_folder_format, values))

        except TypeError as e:
            if mylar.CONFIG.DESTINATION_DIR is None:
                logger.error('[ERROR] %s' % e)
                logger.error('No Comic Location specified. This NEEDS to be set before anything can be added successfully.')
                return
            else:
                logger.error('[ERROR] %s' % e)
                return
        except Exception as e:
            logger.error('[ERROR] %s' % e)
            logger.error('Cannot determine Comic Location path properly. Check your Comic Location and Folder Format for any errors.')
            return

        if mylar.CONFIG.DESTINATION_DIR == "":
            logger.error('There is no Comic Location Path specified - please specify one in Config/Web Interface.')
            return

        #enforce proper slashes here..
        cnt1 = comlocation.count('\\')
        cnt2 = comlocation.count('/')
        if cnt1 > cnt2 and '/' in chunk_folder_format:
            comlocation = re.sub('/', '\\', comlocation)

        if mylar.CONFIG.REPLACE_SPACES:
            #mylar.CONFIG.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
            comlocation = comlocation.replace(' ', mylar.CONFIG.REPLACE_CHAR)

        return comlocation
Exemple #3
0
    def folder_create(self, booktype=None, update_loc=None, secondary=None, imprint=None):
        # dictionary needs to passed called comic with
        #  {'ComicPublisher', 'Corrected_Type, 'Type', 'ComicYear', 'ComicName', 'ComicVersion'}
        # or pass in comicid value from __init__

        # setup default location here
        if update_loc is not None:
            comic_location = update_loc['temppath']
            enforce_format = update_loc['tempff']
            folder_format = update_loc['tempformat']
            comicid = update_loc['comicid']
        else:
            comic_location = mylar.CONFIG.DESTINATION_DIR
            enforce_format = False
            folder_format = mylar.CONFIG.FOLDER_FORMAT

        if folder_format is None:
            folder_format = '$Series ($Year)'

        publisher = re.sub('!', '', self.comic['ComicPublisher']) # thanks Boom!
        publisher = helpers.filesafe(publisher)

        if mylar.OS_DETECT == 'Windows':
            if '/' in folder_format:
                folder_format = re.sub('/', '\\', folder_format).strip()
        else:
            if '\\' in folder_format:
                folder_format = folder_format.replace('\\', '/').strip()

        if publisher is not None:
            if publisher.endswith('.'):
                publisher = publisher[:-1]

        u_comicnm = self.comic['ComicName']
        # let's remove the non-standard characters here that will break filenaming / searching.
        comicname_filesafe = helpers.filesafe(u_comicnm)
        comicdir = comicname_filesafe

        series = comicdir
        if any([series.endswith('.'), series.endswith('..'), series.endswith('...'), series.endswith('....')]):
            if series.endswith('....'):
                series = series[:-4]
            elif series.endswith('...'):
                series = series[:-3]
            elif series.endswith('..'):
                series = series[:-2]
            elif series.endswith('.'):
                series = series[:-1]

        if booktype is not None:
            if self.comic['Corrected_Type'] is not None:
                if self.comic['Corrected_Type'] != booktype:
                    booktype = booktype
                else:
                    booktype = self.comic['Corrected_Type']
            else:
                booktype = booktype
        else:
            booktype = self.comic['Type']

        if any([booktype is None, booktype == 'None', booktype == 'Print']) or all([booktype != 'Print', mylar.CONFIG.FORMAT_BOOKTYPE is False]):
            chunk_fb = re.sub('\$Type', '', folder_format)
            chunk_b = re.compile(r'\s+')
            chunk_folder_format = chunk_b.sub(' ', chunk_fb)
            if booktype != 'Print':
                booktype = 'None'
        else:
            chunk_folder_format = folder_format

        if self.comic['ComicVersion'] is None:
            comicVol = 'None'
        else:
            if booktype != 'Print':
                comicVol = self.comic['ComicVersion']
            else:
                comicVol = self.comic['ComicVersion']
            if comicVol is None:
                comicVol = 'None'

        #if comversion is None, remove it so it doesn't populate with 'None'
        if comicVol == 'None':
            chunk_f_f = re.sub('\$VolumeN', '', chunk_folder_format)
            chunk_f = re.compile(r'\s+')
            chunk_folder_format = chunk_f.sub(' ', chunk_f_f)

        if any([imprint is None, imprint == 'None']):
            imprint = self.comic['PublisherImprint']
        if any([imprint is None, imprint == 'None']):
            chunk_f_f = re.sub('\$Imprint', '', chunk_folder_format)
            chunk_f = re.compile(r'\s+')
            chunk_folder_format = chunk_f.sub(' ', chunk_f_f)

        chunk_folder_format = re.sub("[()|[]]", '', chunk_folder_format).strip()
        ccf = chunk_folder_format.find('/ ')
        if ccf != -1:
            chunk_folder_format = chunk_folder_format[:ccf+1] + chunk_folder_format[ccf+2:]
        ccf = chunk_folder_format.find('\ ')
        if ccf != -1:
            chunk_folder_format = chunk_folder_format[:ccf+1] + chunk_folder_format[ccf+2:]

        chunk_folder_format = re.sub(r'\s+', ' ', chunk_folder_format)

        # if the path contains // in linux it will incorrectly parse things out.
        #logger.fdebug('newPath: %s' % re.sub('//', '/', chunk_folder_format).strip())

        #do work to generate folder path
        values = {'$Series':        series,
                  '$Publisher':     publisher,
                  '$Imprint':       imprint,
                  '$Year':          self.comic['ComicYear'],
                  '$series':        series.lower(),
                  '$publisher':     publisher.lower(),
                  '$VolumeY':       'V' + self.comic['ComicYear'],
                  '$VolumeN':       comicVol.upper(),
                  '$Type':          booktype
                  }

        if update_loc is not None:
            #set the paths here with the seperator removed allowing for cross-platform altering.
            ccdir = pathlib.PurePath(comic_location)
            ddir = pathlib.PurePath(mylar.CONFIG.DESTINATION_DIR)
            dlc = pathlib.PurePath(self.comic['ComicLocation'])
            path_convert = True
            i = 0
            bb = []
            while i < len(dlc.parts):
                try:
                    if dlc.parts[i] == ddir.parts[i]:
                        i+=1
                        continue
                    else:
                        bb.append(dlc.parts[i])
                        i+=1 #print('d.parts: %s' % ccdir.parts[i])
                except IndexError:
                    bb.append(dlc.parts[i])
                    i+=1
            bb_tuple = pathlib.PurePath(os.path.sep.join(bb))
            try:
                com_base = pathlib.PurePath(dlc).relative_to(ddir)
            except ValueError as e:
                #if the original path is not located in the same path as the ComicLocation (destination_dir).
                #this can happen when manually altered to a new path, or thru various changes to the ComicLocation path over time.
                #ie. ValueError: '/mnt/Comics/Death of Wolverine The Logan Legacy-(2014)' does not start with '/mnt/mediavg/Comics/Comics-2'
                dir_fix = []
                dir_parts = pathlib.PurePath(dlc).parts
                for dp in dir_parts:
                    try:
                        if self.comic['ComicYear'] is not None:
                            if self.comic['ComicYear'] in dp:
                                break
                        if self.comic['ComicName'] is not None:
                            if self.comic['ComicName'] in dp:
                                break
                        if self.comic['ComicPublisher'] is not None:
                            if self.comic['ComicPublisher'] in dp:
                                break
                        if self.comic['PublisherImprint'] is not None:
                            if self.comic['PublisherImprint'] in dp:
                                break
                        if self.comic['ComicVersion'] is not None:
                            if self.comic['ComicVersion'] in dp:
                                break
                        dir_fix.append(dp)
                    except:
                        pass

                if len(dir_fix) > 0:
                    spath = ''
                    t=0
                    while (t < len(dir_parts)):
                        newpath = os.path.join(spath, dir_parts[t])
                        t+=1
                    com_base = newpath
                    #path_convert = False
            #print('com_base: %s' % com_base)
            #detect comiclocation path based on OS so that the path seperators are correct
            #have to figure out how to determine OS of original location...
            if mylar.OS_DETECT == 'Windows':
                p_path = pathlib.PureWindowsPath(ccdir)
            else:
                p_path = pathlib.PurePosixPath(ccdir)
            if enforce_format is True:
                first = helpers.replace_all(chunk_folder_format, values)
                if mylar.CONFIG.REPLACE_SPACES:
                    #mylar.CONFIG.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
                    first = first.replace(' ', mylar.CONFIG.REPLACE_CHAR)
                comlocation = str(p_path.joinpath(first))
            else:
                comlocation = str(p_path.joinpath(com_base))

            return {'comlocation':  comlocation,
                    'path_convert': path_convert,
                    'comicid':      comicid}
        else:
            if secondary is not None:
                ppath = secondary
            else:
                ppath = mylar.CONFIG.DESTINATION_DIR

            ddir = pathlib.PurePath(ppath)
            i = 0
            bb = []
            while i < len(ddir.parts):
                try:
                    bb.append(ddir.parts[i])
                    i+=1
                except IndexError:
                    break

            bb2 = bb[0]
            bb.pop(0)
            bb_tuple = pathlib.PurePath(os.path.sep.join(bb))
            #logger.fdebug('bb_tuple: %s' % bb_tuple)
            if mylar.OS_DETECT == 'Windows':
                p_path = pathlib.PureWindowsPath(pathlib.PurePath(bb2).joinpath(bb_tuple))
            else:
                p_path = pathlib.PurePosixPath(pathlib.PurePath(bb2).joinpath(bb_tuple))

            #logger.fdebug('p_path: %s' % p_path)

            first = helpers.replace_all(chunk_folder_format, values)
            #logger.fdebug('first-1: %s' % first)

            if mylar.CONFIG.REPLACE_SPACES:
                first = first.replace(' ', mylar.CONFIG.REPLACE_CHAR)
            #logger.fdebug('first-2: %s' % first)
            comlocation = str(p_path.joinpath(first))
            com_parentdir = str(p_path.joinpath(first).parent)
            #logger.fdebug('comlocation: %s' % comlocation)

            #try:
            #    if folder_format == '':
            #        #comlocation = pathlib.PurePath(comiclocation).joinpath(comicdir, '(%s)') % comic['SeriesYear']
            #        comlocation = os.path.join(comic_location, comicdir, " (" + comic['SeriesYear'] + ")")
            #    else:
            #except TypeError as e:
            #    if comic_location is None:
            #        logger.error('[ERROR] %s' % e)
            #        logger.error('No Comic Location specified. This NEEDS to be set before anything can be added successfully.')
            #        return
            #    else:
            #        logger.error('[ERROR] %s' % e)
            #        return
            #except Exception as e:
            #    logger.error('[ERROR] %s' % e)
            #    logger.error('Cannot determine Comic Location path properly. Check your Comic Location and Folder Format for any errors.')
            #    return

            if comlocation == "":
                logger.error('There is no Comic Location Path specified - please specify one in Config/Web Interface.')
                return

            return {'comlocation': comlocation,
                    'subpath':     bb_tuple,
                    'com_parentdir': com_parentdir}
Exemple #4
0
    def folder_create(self, booktype=None):
        # dictionary needs to passed called comic with {'ComicPublisher', 'CorrectedType, 'Type', 'ComicYear', 'ComicName', 'ComicVersion'}
        # or pass in comicid value from __init__

        # setup default location here
        u_comicnm = self.comic['ComicName']
        # let's remove the non-standard characters here that will break filenaming / searching.
        comicname_filesafe = helpers.filesafe(u_comicnm)
        comicdir = comicname_filesafe

        series = comicdir
        if series[-1:] == '.':
            series[:-1]

        publisher = re.sub('!', '', self.comic['ComicPublisher']) # thanks Boom!
        publisher = helpers.filesafe(publisher)

        if booktype is not None:
            if self.comic['Corrected_Type'] is not None:
                booktype = self.comic['Corrected_Type']
            else:
                booktype = booktype
        else:
            booktype = self.comic['Type']

        if any([booktype is None, booktype == 'None', booktype == 'Print']) or all([booktype != 'Print', mylar.CONFIG.FORMAT_BOOKTYPE is False]):
            chunk_fb = re.sub('\$Type', '', mylar.CONFIG.FOLDER_FORMAT)
            chunk_b = re.compile(r'\s+')
            chunk_folder_format = chunk_b.sub(' ', chunk_fb)
        else:
            chunk_folder_format = mylar.CONFIG.FOLDER_FORMAT

        if any([self.comic['ComicVersion'] is None, booktype != 'Print']):
            comicVol = 'None'
        else:
            comicVol = self.comic['ComicVersion']

        #if comversion is None, remove it so it doesn't populate with 'None'
        if comicVol == 'None':
            chunk_f_f = re.sub('\$VolumeN', '', chunk_folder_format)
            chunk_f = re.compile(r'\s+')
            chunk_folder_format = chunk_f.sub(' ', chunk_f_f)
            logger.fdebug('No version # found for series, removing from folder format')
            logger.fdebug("new folder format: " + str(chunk_folder_format))

        #do work to generate folder path
        values = {'$Series':        series,
                  '$Publisher':     publisher,
                  '$Year':          self.comic['ComicYear'],
                  '$series':        series.lower(),
                  '$publisher':     publisher.lower(),
                  '$VolumeY':       'V' + self.comic['ComicYear'],
                  '$VolumeN':       comicVol.upper(),
                  '$Annual':        'Annual',
                  '$Type':          booktype
                  }
        try:
            if mylar.CONFIG.FOLDER_FORMAT == '':
                comlocation = os.path.join(mylar.CONFIG.DESTINATION_DIR, comicdir, " (" + comic['SeriesYear'] + ")")
            else:
                chunk_folder_format = re.sub('[()|[]]', '', chunk_folder_format).strip()
                comlocation = os.path.join(mylar.CONFIG.DESTINATION_DIR, helpers.replace_all(chunk_folder_format, values))

        except Exception as e:
            if 'TypeError' in e:
                if mylar.CONFIG.DESTINATION_DIR is None:
                    logger.error('[ERROR] %s' % e)
                    logger.error('No Comic Location specified. This NEEDS to be set before anything can be added successfully.')
                    return
            logger.error('[ERROR] %s' % e)
            logger.error('Cannot determine Comic Location path properly. Check your Comic Location and Folder Format for any errors.')
            return

        if mylar.CONFIG.DESTINATION_DIR == "":
            logger.error('There is no Comic Location Path specified - please specify one in Config/Web Interface.')
            return

        #enforce proper slashes here..
        cnt1 = comlocation.count('\\')
        cnt2 = comlocation.count('/')
        if cnt1 > cnt2 and '/' in chunk_folder_format:
            comlocation = re.sub('/', '\\', comlocation)

        if mylar.CONFIG.REPLACE_SPACES:
            #mylar.CONFIG.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
            comlocation = comlocation.replace(' ', mylar.CONFIG.REPLACE_CHAR)

        return comlocation
Exemple #5
0
def initialize():

    with INIT_LOCK:
    
        global __INITIALIZED__, FULL_PATH, PROG_DIR, VERBOSE, DAEMON, COMICSORT, DATA_DIR, CONFIG_FILE, CFG, CONFIG_VERSION, LOG_DIR, CACHE_DIR, LOGVERBOSE, OLDCONFIG_VERSION, \
                HTTP_PORT, HTTP_HOST, HTTP_USERNAME, HTTP_PASSWORD, HTTP_ROOT, LAUNCH_BROWSER, GIT_PATH, \
                CURRENT_VERSION, LATEST_VERSION, CHECK_GITHUB, CHECK_GITHUB_ON_STARTUP, CHECK_GITHUB_INTERVAL, USER_AGENT, DESTINATION_DIR, \
                DOWNLOAD_DIR, USENET_RETENTION, SEARCH_INTERVAL, NZB_STARTUP_SEARCH, INTERFACE, AUTOWANT_ALL, AUTOWANT_UPCOMING, ZERO_LEVEL, ZERO_LEVEL_N, COMIC_COVER_LOCAL, HIGHCOUNT, \
                LIBRARYSCAN, LIBRARYSCAN_INTERVAL, DOWNLOAD_SCAN_INTERVAL, USE_SABNZBD, SAB_HOST, SAB_USERNAME, SAB_PASSWORD, SAB_APIKEY, SAB_CATEGORY, SAB_PRIORITY, SAB_DIRECTORY, BLACKHOLE, BLACKHOLE_DIR, ADD_COMICS, COMIC_DIR, IMP_MOVE, IMP_RENAME, IMP_METADATA, \
                USE_NZBGET, NZBGET_HOST, NZBGET_PORT, NZBGET_USERNAME, NZBGET_PASSWORD, NZBGET_CATEGORY, NZBGET_PRIORITY, NZBSU, NZBSU_APIKEY, DOGNZB, DOGNZB_APIKEY, NZBX,\
                NEWZNAB, NEWZNAB_HOST, NEWZNAB_APIKEY, NEWZNAB_ENABLED, EXTRA_NEWZNABS, NEWZNAB_EXTRA, \
                RAW, RAW_PROVIDER, RAW_USERNAME, RAW_PASSWORD, RAW_GROUPS, EXPERIMENTAL, \
                PROWL_ENABLED, PROWL_PRIORITY, PROWL_KEYS, PROWL_ONSNATCH, NMA_ENABLED, NMA_APIKEY, NMA_PRIORITY, NMA_ONSNATCH, PUSHOVER_ENABLED, PUSHOVER_PRIORITY, PUSHOVER_APIKEY, PUSHOVER_USERKEY, PUSHOVER_ONSNATCH, LOCMOVE, NEWCOM_DIR, FFTONEWCOM_DIR, \
                PREFERRED_QUALITY, MOVE_FILES, RENAME_FILES, LOWERCASE_FILENAMES, USE_MINSIZE, MINSIZE, USE_MAXSIZE, MAXSIZE, CORRECT_METADATA, FOLDER_FORMAT, FILE_FORMAT, REPLACE_CHAR, REPLACE_SPACES, ADD_TO_CSV, CVINFO, LOG_LEVEL, POST_PROCESSING, SEARCH_DELAY, GRABBAG_DIR, READ2FILENAME, STORYARCDIR, CVURL, CVAPIFIX, \
                COMIC_LOCATION, QUAL_ALTVERS, QUAL_SCANNER, QUAL_TYPE, QUAL_QUALITY, ENABLE_EXTRA_SCRIPTS, EXTRA_SCRIPTS, ENABLE_PRE_SCRIPTS, PRE_SCRIPTS, PULLNEW, COUNT_ISSUES, COUNT_HAVES, COUNT_COMICS, SYNO_FIX, CHMOD_FILE, CHMOD_DIR, ANNUALS_ON, CV_ONLY, CV_ONETIMER, WEEKFOLDER
                
        if __INITIALIZED__:
            return False

        # Make sure all the config sections exist
        CheckSection('General')
        CheckSection('SABnzbd')
        CheckSection('NZBGet')
        CheckSection('NZBsu')
        CheckSection('DOGnzb')
        CheckSection('Raw')
        CheckSection('Experimental')        
        CheckSection('Newznab')
        # Set global variables based on config file or use defaults
        try:
            HTTP_PORT = check_setting_int(CFG, 'General', 'http_port', 8090)
        except:
            HTTP_PORT = 8090
            
        if HTTP_PORT < 21 or HTTP_PORT > 65535:
            HTTP_PORT = 8090
            
        CONFIG_VERSION = check_setting_str(CFG, 'General', 'config_version', '')
        HTTP_HOST = check_setting_str(CFG, 'General', 'http_host', '0.0.0.0')
        HTTP_USERNAME = check_setting_str(CFG, 'General', 'http_username', '')
        HTTP_PASSWORD = check_setting_str(CFG, 'General', 'http_password', '')
        HTTP_ROOT = check_setting_str(CFG, 'General', 'http_root', '/')
        LAUNCH_BROWSER = bool(check_setting_int(CFG, 'General', 'launch_browser', 1))
        LOGVERBOSE = bool(check_setting_int(CFG, 'General', 'logverbose', 1))
        GIT_PATH = check_setting_str(CFG, 'General', 'git_path', '')
        LOG_DIR = check_setting_str(CFG, 'General', 'log_dir', '')
        if not CACHE_DIR:
            CACHE_DIR = check_setting_str(CFG, 'General', 'cache_dir', '')
        
        CHECK_GITHUB = bool(check_setting_int(CFG, 'General', 'check_github', 1))
        CHECK_GITHUB_ON_STARTUP = bool(check_setting_int(CFG, 'General', 'check_github_on_startup', 1))
        CHECK_GITHUB_INTERVAL = check_setting_int(CFG, 'General', 'check_github_interval', 360)
        
        DESTINATION_DIR = check_setting_str(CFG, 'General', 'destination_dir', '')
        CHMOD_DIR = check_setting_str(CFG, 'General', 'chmod_dir', '0777')
        CHMOD_FILE = check_setting_str(CFG, 'General', 'chmod_file', '0660')
        USENET_RETENTION = check_setting_int(CFG, 'General', 'usenet_retention', '1500')
        
        SEARCH_INTERVAL = check_setting_int(CFG, 'General', 'search_interval', 360)
        NZB_STARTUP_SEARCH = bool(check_setting_int(CFG, 'General', 'nzb_startup_search', 0))
        LIBRARYSCAN = bool(check_setting_int(CFG, 'General', 'libraryscan', 1))
        LIBRARYSCAN_INTERVAL = check_setting_int(CFG, 'General', 'libraryscan_interval', 300)
        ADD_COMICS = bool(check_setting_int(CFG, 'General', 'add_comics', 0))
        COMIC_DIR = check_setting_str(CFG, 'General', 'comic_dir', '')
        IMP_MOVE = bool(check_setting_int(CFG, 'General', 'imp_move', 0))
        IMP_RENAME = bool(check_setting_int(CFG, 'General', 'imp_rename', 0))
        IMP_METADATA = bool(check_setting_int(CFG, 'General', 'imp_metadata', 0))
        DOWNLOAD_SCAN_INTERVAL = check_setting_int(CFG, 'General', 'download_scan_interval', 5)
        INTERFACE = check_setting_str(CFG, 'General', 'interface', 'default')
        AUTOWANT_ALL = bool(check_setting_int(CFG, 'General', 'autowant_all', 0))
        AUTOWANT_UPCOMING = bool(check_setting_int(CFG, 'General', 'autowant_upcoming', 1))
        COMIC_COVER_LOCAL = bool(check_setting_int(CFG, 'General', 'comic_cover_local', 0))
        PREFERRED_QUALITY = bool(check_setting_int(CFG, 'General', 'preferred_quality', 0))
        CORRECT_METADATA = bool(check_setting_int(CFG, 'General', 'correct_metadata', 0))
        MOVE_FILES = bool(check_setting_int(CFG, 'General', 'move_files', 0))
        RENAME_FILES = bool(check_setting_int(CFG, 'General', 'rename_files', 0))
        FOLDER_FORMAT = check_setting_str(CFG, 'General', 'folder_format', '$Series ($Year)')
        FILE_FORMAT = check_setting_str(CFG, 'General', 'file_format', '$Series $Issue ($Year)')
        BLACKHOLE = bool(check_setting_int(CFG, 'General', 'blackhole', 0))
        BLACKHOLE_DIR = check_setting_str(CFG, 'General', 'blackhole_dir', '')
        REPLACE_SPACES = bool(check_setting_int(CFG, 'General', 'replace_spaces', 0))
        REPLACE_CHAR = check_setting_str(CFG, 'General', 'replace_char', '')
        ZERO_LEVEL = bool(check_setting_int(CFG, 'General', 'zero_level', 0))
        ZERO_LEVEL_N = check_setting_str(CFG, 'General', 'zero_level_n', '')
        LOWERCASE_FILENAMES = bool(check_setting_int(CFG, 'General', 'lowercase_filenames', 0))
        SYNO_FIX = bool(check_setting_int(CFG, 'General', 'syno_fix', 0))
        SEARCH_DELAY = check_setting_int(CFG, 'General', 'search_delay', 1)
        GRABBAG_DIR = check_setting_str(CFG, 'General', 'grabbag_dir', '')
        if not GRABBAG_DIR:
            #default to ComicLocation
            GRABBAG_DIR = DESTINATION_DIR
        WEEKFOLDER = bool(check_setting_int(CFG, 'General', 'weekfolder', 0))
        CVAPIFIX = bool(check_setting_int(CFG, 'General', 'cvapifix', 0))
        if CVAPIFIX is None:
            CVAPIFIX = 0
        LOCMOVE = bool(check_setting_int(CFG, 'General', 'locmove', 0))
        if LOCMOVE is None:
            LOCMOVE = 0
        NEWCOM_DIR = check_setting_str(CFG, 'General', 'newcom_dir', '')
        FFTONEWCOM_DIR = bool(check_setting_int(CFG, 'General', 'fftonewcom_dir', 0))
        if FFTONEWCOM_DIR is None:
            FFTONEWCOM_DIR = 0
        HIGHCOUNT = check_setting_str(CFG, 'General', 'highcount', '')
        if not HIGHCOUNT: HIGHCOUNT = 0
        READ2FILENAME = bool(check_setting_int(CFG, 'General', 'read2filename', 0))
        STORYARCDIR = bool(check_setting_int(CFG, 'General', 'storyarcdir', 0))
        PROWL_ENABLED = bool(check_setting_int(CFG, 'Prowl', 'prowl_enabled', 0))
        PROWL_KEYS = check_setting_str(CFG, 'Prowl', 'prowl_keys', '')
        PROWL_ONSNATCH = bool(check_setting_int(CFG, 'Prowl', 'prowl_onsnatch', 0))
        PROWL_PRIORITY = check_setting_int(CFG, 'Prowl', 'prowl_priority', 0)

        NMA_ENABLED = bool(check_setting_int(CFG, 'NMA', 'nma_enabled', 0))
        NMA_APIKEY = check_setting_str(CFG, 'NMA', 'nma_apikey', '')
        NMA_PRIORITY = check_setting_int(CFG, 'NMA', 'nma_priority', 0)
        NMA_ONSNATCH = bool(check_setting_int(CFG, 'NMA', 'nma_onsnatch', 0))

        PUSHOVER_ENABLED = bool(check_setting_int(CFG, 'PUSHOVER', 'pushover_enabled', 0))
        PUSHOVER_APIKEY = check_setting_str(CFG, 'PUSHOVER', 'pushover_apikey', '')
        PUSHOVER_USERKEY = check_setting_str(CFG, 'PUSHOVER', 'pushover_userkey', '')
        PUSHOVER_PRIORITY = check_setting_int(CFG, 'PUSHOVER', 'pushover_priority', 0)
        PUSHOVER_ONSNATCH = bool(check_setting_int(CFG, 'PUSHOVER', 'pushover_onsnatch', 0))

        USE_MINSIZE = bool(check_setting_int(CFG, 'General', 'use_minsize', 0))
        MINSIZE = check_setting_str(CFG, 'General', 'minsize', '')
        USE_MAXSIZE = bool(check_setting_int(CFG, 'General', 'use_maxsize', 0))
        MAXSIZE = check_setting_str(CFG, 'General', 'maxsize', '')
        ADD_TO_CSV = bool(check_setting_int(CFG, 'General', 'add_to_csv', 1))
        CVINFO = bool(check_setting_int(CFG, 'General', 'cvinfo', 0))
        ANNUALS_ON = bool(check_setting_int(CFG, 'General', 'annuals_on', 0))
        if not ANNUALS_ON:
            #default to on
            ANNUALS_ON = 0
        CV_ONLY = bool(check_setting_int(CFG, 'General', 'cv_only', 1))
        if not CV_ONLY:
            #default to on
            CV_ONLY = 1
        CV_ONETIMER = bool(check_setting_int(CFG, 'General', 'cv_onetimer', 1))
        if not CV_ONETIMER:
            CV_ONETIMER = 1
        LOG_LEVEL = check_setting_str(CFG, 'General', 'log_level', '')
        ENABLE_EXTRA_SCRIPTS = bool(check_setting_int(CFG, 'General', 'enable_extra_scripts', 0))
        EXTRA_SCRIPTS = check_setting_str(CFG, 'General', 'extra_scripts', '')

        ENABLE_PRE_SCRIPTS = bool(check_setting_int(CFG, 'General', 'enable_pre_scripts', 0))
        PRE_SCRIPTS = check_setting_str(CFG, 'General', 'pre_scripts', '')
        POST_PROCESSING = bool(check_setting_int(CFG, 'General', 'post_processing', 1))

        USE_SABNZBD = bool(check_setting_int(CFG, 'SABnzbd', 'use_sabnzbd', 0))
        SAB_HOST = check_setting_str(CFG, 'SABnzbd', 'sab_host', '')
        SAB_USERNAME = check_setting_str(CFG, 'SABnzbd', 'sab_username', '')
        SAB_PASSWORD = check_setting_str(CFG, 'SABnzbd', 'sab_password', '')
        SAB_APIKEY = check_setting_str(CFG, 'SABnzbd', 'sab_apikey', '')
        SAB_CATEGORY = check_setting_str(CFG, 'SABnzbd', 'sab_category', '')
        SAB_DIRECTORY = check_setting_str(CFG, 'SABnzbd', 'sab_directory', '')
        SAB_PRIORITY = check_setting_str(CFG, 'SABnzbd', 'sab_priority', '')
        if SAB_PRIORITY.isdigit():
            if SAB_PRIORITY == "0": SAB_PRIORITY = "Default"
            elif SAB_PRIORITY == "1": SAB_PRIORITY = "Low"
            elif SAB_PRIORITY == "2": SAB_PRIORITY = "Normal"
            elif SAB_PRIORITY == "3": SAB_PRIORITY = "High"
            elif SAB_PRIORITY == "4": SAB_PRIORITY = "Paused"
            else: SAB_PRIORITY = "Default"

        USE_NZBGET = bool(check_setting_int(CFG, 'NZBGet', 'use_nzbget', 0))
        NZBGET_HOST = check_setting_str(CFG, 'NZBGet', 'nzbget_host', '')
        NZBGET_PORT = check_setting_str(CFG, 'NZBGet', 'nzbget_port', '')
        NZBGET_USERNAME = check_setting_str(CFG, 'NZBGet', 'nzbget_username', '')
        NZBGET_PASSWORD = check_setting_str(CFG, 'NZBGet', 'nzbget_password', '')
        NZBGET_CATEGORY = check_setting_str(CFG, 'NZBGet', 'nzbget_category', '')
        NZBGET_PRIORITY = check_setting_str(CFG, 'NZBGet', 'nzbget_priority', '')

        NZBSU = bool(check_setting_int(CFG, 'NZBsu', 'nzbsu', 0))
        NZBSU_APIKEY = check_setting_str(CFG, 'NZBsu', 'nzbsu_apikey', '')

        DOGNZB = bool(check_setting_int(CFG, 'DOGnzb', 'dognzb', 0))
        DOGNZB_APIKEY = check_setting_str(CFG, 'DOGnzb', 'dognzb_apikey', '')

        NZBX = bool(check_setting_int(CFG, 'nzbx', 'nzbx', 0))

        RAW = bool(check_setting_int(CFG, 'Raw', 'raw', 0))
        RAW_PROVIDER = check_setting_str(CFG, 'Raw', 'raw_provider', '')
        RAW_USERNAME = check_setting_str(CFG, 'Raw', 'raw_username', '')
        RAW_PASSWORD  = check_setting_str(CFG, 'Raw', 'raw_password', '')
        RAW_GROUPS = check_setting_str(CFG, 'Raw', 'raw_groups', '')

        EXPERIMENTAL = bool(check_setting_int(CFG, 'Experimental', 'experimental', 0))

        NEWZNAB = bool(check_setting_int(CFG, 'Newznab', 'newznab', 0))

        if CONFIG_VERSION:
            NEWZNAB_HOST = check_setting_str(CFG, 'Newznab', 'newznab_host', '')
            NEWZNAB_APIKEY = check_setting_str(CFG, 'Newznab', 'newznab_apikey', '')
            NEWZNAB_ENABLED = bool(check_setting_int(CFG, 'Newznab', 'newznab_enabled', 1))

        # Need to pack the extra newznabs back into a list of tuples
        flattened_newznabs = check_setting_str(CFG, 'Newznab', 'extra_newznabs', [], log=False)
        EXTRA_NEWZNABS = list(itertools.izip(*[itertools.islice(flattened_newznabs, i, None, 3) for i in range(3)]))

        #to counteract the loss of the 1st newznab entry because of a switch, let's rewrite to the tuple
        if NEWZNAB_HOST and CONFIG_VERSION:
            EXTRA_NEWZNABS.append((NEWZNAB_HOST, NEWZNAB_APIKEY, int(NEWZNAB_ENABLED)))
            # Need to rewrite config here and bump up config version
            CONFIG_VERSION = '3'
            config_write()        
         
        # update folder formats in the config & bump up config version
        if CONFIG_VERSION == '0':
            from mylar.helpers import replace_all
            file_values = { 'issue':  'Issue', 'title': 'Title', 'series' : 'Series', 'year' : 'Year' }
            folder_values = { 'series' : 'Series', 'publisher':'Publisher', 'year' : 'Year', 'first' : 'First', 'lowerfirst' : 'first' }
            FILE_FORMAT = replace_all(FILE_FORMAT, file_values)
            FOLDER_FORMAT = replace_all(FOLDER_FORMAT, folder_values)
            
            CONFIG_VERSION = '1'
            
        if CONFIG_VERSION == '1':

            from mylar.helpers import replace_all

            file_values = { 'Issue':        '$Issue',
                            'Title':        '$Title',
                            'Series':       '$Series',
                            'Year':         '$Year',
                            'title':        '$title',
                            'series':       '$series',
                            'year':         '$year'
                            }
            folder_values = {   'Series':       '$Series',
                                'Publisher':    '$Publisher',
                                'Year':         '$Year',
                                'First':        '$First',
                                'series':       '$series',
                                'publisher':    '$publisher',
                                'year':         '$year',
                                'first':        '$first'
                            }   
            FILE_FORMAT = replace_all(FILE_FORMAT, file_values)
            FOLDER_FORMAT = replace_all(FOLDER_FORMAT, folder_values)
            
            CONFIG_VERSION = '2'

        if 'http://' not in SAB_HOST[:7] and 'https://' not in SAB_HOST[:8]:
            SAB_HOST = 'http://' + SAB_HOST
            #print ("SAB_HOST:" + SAB_HOST)

        if not LOG_DIR:
            LOG_DIR = os.path.join(DATA_DIR, 'logs')

        if not os.path.exists(LOG_DIR):
            try:
                os.makedirs(LOG_DIR)
            except OSError:
                if VERBOSE:
                    print 'Unable to create the log directory. Logging to screen only.'

        # Start the logger, silence console logging if we need to
        logger.mylar_log.initLogger(verbose=VERBOSE)

        # Put the cache dir in the data dir for now
        if not CACHE_DIR:
            CACHE_DIR = os.path.join(str(DATA_DIR), 'cache')
        #logger.info("cache set to : " + str(CACHE_DIR))
        if not os.path.exists(CACHE_DIR):
            try:
               os.makedirs(CACHE_DIR)
            except OSError:
                logger.error('Could not create cache dir. Check permissions of datadir: ' + DATA_DIR)

        # Sanity check for search interval. Set it to at least 6 hours
        if SEARCH_INTERVAL < 360:
            logger.info("Search interval too low. Resetting to 6 hour minimum")
            SEARCH_INTERVAL = 360


        # Initialize the database
        logger.info('Checking to see if the database has all tables....')
        try:
            dbcheck()
        except Exception, e:
            logger.error("Can't connect to the database: %s" % e)

        # With the addition of NZBGet, it's possible that both SAB and NZBget are unchecked initially.
        # let's force default SAB.
        if USE_NZBGET == 0 and USE_SABNZBD == 0 :
            logger.info("No Download Server option given - defaulting to SABnzbd.")
            USE_SABNZBD = 1

        # Get the currently installed version - returns None, 'win32' or the git hash
        # Also sets INSTALL_TYPE variable to 'win', 'git' or 'source'
        CURRENT_VERSION = versioncheck.getVersion()
        if CURRENT_VERSION is not None:
            hash = CURRENT_VERSION[:7]
        else:
            hash = "unknown"

        if version.MYLAR_VERSION == 'master':
            vers = 'M'
        else:
           vers = 'D'

        USER_AGENT = 'Mylar/'+str(hash)+'('+vers+') +http://www.github.com/evilhero/mylar/'

        # Check for new versions
        if CHECK_GITHUB_ON_STARTUP:
            try:
                LATEST_VERSION = versioncheck.checkGithub()
            except:
                LATEST_VERSION = CURRENT_VERSION
        else:
            LATEST_VERSION = CURRENT_VERSION

        #check for syno_fix here
        if SYNO_FIX:
            parsepath = os.path.join(DATA_DIR, 'bs4', 'builder', '_lxml.py')
            if os.path.isfile(parsepath):
                print ("found bs4...renaming appropriate file.")
                src = os.path.join(parsepath)
                dst = os.path.join(DATA_DIR, 'bs4', 'builder', 'lxml.py')
                try:
                    shutil.move(src, dst)
                except (OSError, IOError):
                    logger.error("Unable to rename file...shutdown Mylar and go to " + src.encode('utf-8') + " and rename the _lxml.py file to lxml.py")
                    logger.error("NOT doing this will result in errors when adding / refreshing a series")
            else:
                logger.info("Synology Parsing Fix already implemented. No changes required at this time.")

        #CV sometimes points to the incorrect DNS - here's the fix.
        if CVAPIFIX == 1:
            CVURL = 'http://beta.comicvine.com/api/'
            logger.info("CVAPIFIX enabled: ComicVine set to beta API site")
        else:
            CVURL = 'http://api.comicvine.com/'
            logger.info("CVAPIFIX disabled: Comicvine set to normal API site")

        if LOCMOVE:
            helpers.updateComicLocation()

        #Ordering comics here
        logger.info("Remapping the sorting to allow for new additions.")
        COMICSORT = helpers.ComicSort(sequence='startup')
                                    
        __INITIALIZED__ = True
        return True
Exemple #6
0
    def Process_next(self, comicid, issueid, issuenumOG, ml=None):
        annchk = "no"
        extensions = ('.cbr', '.cbz')
        myDB = db.DBConnection()
        comicnzb = myDB.action("SELECT * from comics WHERE comicid=?",
                               [comicid]).fetchone()
        issuenzb = myDB.action(
            "SELECT * from issues WHERE issueid=? AND comicid=? AND ComicName NOT NULL",
            [issueid, comicid]).fetchone()
        logger.fdebug('issueid: ' + str(issueid))
        logger.fdebug('issuenumOG: ' + str(issuenumOG))
        if issuenzb is None:
            issuenzb = myDB.action(
                "SELECT * from annuals WHERE issueid=? and comicid=?",
                [issueid, comicid]).fetchone()
            annchk = "yes"
        #issueno = str(issuenum).split('.')[0]
        #new CV API - removed all decimals...here we go AGAIN!
        issuenum = issuenzb['Issue_Number']
        issue_except = 'None'

        if 'au' in issuenum.lower() and issuenum[:1].isdigit():
            issuenum = re.sub("[^0-9]", "", issuenum)
            issue_except = ' AU'
        elif 'ai' in issuenum.lower() and issuenum[:1].isdigit():
            issuenum = re.sub("[^0-9]", "", issuenum)
            issue_except = ' AI'
        elif 'inh' in issuenum.lower() and issuenum[:1].isdigit():
            issuenum = re.sub("[^0-9]", "", issuenum)
            issue_except = '.INH'
        elif 'now' in issuenum.lower() and issuenum[:1].isdigit():
            if '!' in issuenum: issuenum = re.sub('\!', '', issuenum)
            issuenum = re.sub("[^0-9]", "", issuenum)
            issue_except = '.NOW'

        if '.' in issuenum:
            iss_find = issuenum.find('.')
            iss_b4dec = issuenum[:iss_find]
            iss_decval = issuenum[iss_find + 1:]
            if int(iss_decval) == 0:
                iss = iss_b4dec
                issdec = int(iss_decval)
                issueno = str(iss)
                self._log("Issue Number: " + str(issueno), logger.DEBUG)
                logger.fdebug("Issue Number: " + str(issueno))
            else:
                if len(iss_decval) == 1:
                    iss = iss_b4dec + "." + iss_decval
                    issdec = int(iss_decval) * 10
                else:
                    iss = iss_b4dec + "." + iss_decval.rstrip('0')
                    issdec = int(iss_decval.rstrip('0')) * 10
                issueno = iss_b4dec
                self._log("Issue Number: " + str(iss), logger.DEBUG)
                logger.fdebug("Issue Number: " + str(iss))
        else:
            iss = issuenum
            issueno = str(iss)

        # issue zero-suppression here
        if mylar.ZERO_LEVEL == "0":
            zeroadd = ""
        else:
            if mylar.ZERO_LEVEL_N == "none": zeroadd = ""
            elif mylar.ZERO_LEVEL_N == "0x": zeroadd = "0"
            elif mylar.ZERO_LEVEL_N == "00x": zeroadd = "00"

        logger.fdebug("Zero Suppression set to : " + str(mylar.ZERO_LEVEL_N))

        if str(len(issueno)) > 1:
            if int(issueno) < 10:
                self._log("issue detected less than 10", logger.DEBUG)
                if '.' in iss:
                    if int(iss_decval) > 0:
                        issueno = str(iss)
                        prettycomiss = str(zeroadd) + str(iss)
                    else:
                        prettycomiss = str(zeroadd) + str(int(issueno))
                else:
                    prettycomiss = str(zeroadd) + str(iss)
                if issue_except != 'None':
                    prettycomiss = str(prettycomiss) + issue_except
                self._log(
                    "Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) +
                    ". Issue will be set as : " + str(prettycomiss),
                    logger.DEBUG)
            elif int(issueno) >= 10 and int(issueno) < 100:
                self._log("issue detected greater than 10, but less than 100",
                          logger.DEBUG)
                if mylar.ZERO_LEVEL_N == "none":
                    zeroadd = ""
                else:
                    zeroadd = "0"
                if '.' in iss:
                    if int(iss_decval) > 0:
                        issueno = str(iss)
                        prettycomiss = str(zeroadd) + str(iss)
                    else:
                        prettycomiss = str(zeroadd) + str(int(issueno))
                else:
                    prettycomiss = str(zeroadd) + str(iss)
                if issue_except != 'None':
                    prettycomiss = str(prettycomiss) + issue_except
                self._log(
                    "Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) +
                    ".Issue will be set as : " + str(prettycomiss),
                    logger.DEBUG)
            else:
                self._log("issue detected greater than 100", logger.DEBUG)
                if '.' in iss:
                    if int(iss_decval) > 0:
                        issueno = str(iss)
                prettycomiss = str(issueno)
                if issue_except != 'None':
                    prettycomiss = str(prettycomiss) + issue_except
                self._log(
                    "Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) +
                    ". Issue will be set as : " + str(prettycomiss),
                    logger.DEBUG)
        else:
            prettycomiss = str(issueno)
            self._log(
                "issue length error - cannot determine length. Defaulting to None:  "
                + str(prettycomiss), logger.DEBUG)

        if annchk == "yes":
            self._log("Annual detected.")
        logger.fdebug("Pretty Comic Issue is : " + str(prettycomiss))
        issueyear = issuenzb['IssueDate'][:4]
        self._log("Issue Year: " + str(issueyear), logger.DEBUG)
        logger.fdebug("Issue Year : " + str(issueyear))
        month = issuenzb['IssueDate'][5:7].replace('-', '').strip()
        month_name = helpers.fullmonth(month)
        #            comicnzb= myDB.action("SELECT * from comics WHERE comicid=?", [comicid]).fetchone()
        publisher = comicnzb['ComicPublisher']
        self._log("Publisher: " + publisher, logger.DEBUG)
        logger.fdebug("Publisher: " + str(publisher))
        #we need to un-unicode this to make sure we can write the filenames properly for spec.chars
        series = comicnzb['ComicName'].encode('ascii', 'ignore').strip()
        self._log("Series: " + series, logger.DEBUG)
        logger.fdebug("Series: " + str(series))
        seriesyear = comicnzb['ComicYear']
        self._log("Year: " + seriesyear, logger.DEBUG)
        logger.fdebug("Year: " + str(seriesyear))
        comlocation = comicnzb['ComicLocation']
        self._log("Comic Location: " + comlocation, logger.DEBUG)
        logger.fdebug("Comic Location: " + str(comlocation))
        comversion = comicnzb['ComicVersion']
        self._log("Comic Version: " + str(comversion), logger.DEBUG)
        logger.fdebug("Comic Version: " + str(comversion))
        if comversion is None:
            comversion = 'None'
        #if comversion is None, remove it so it doesn't populate with 'None'
        if comversion == 'None':
            chunk_f_f = re.sub('\$VolumeN', '', mylar.FILE_FORMAT)
            chunk_f = re.compile(r'\s+')
            chunk_file_format = chunk_f.sub(' ', chunk_f_f)
            self._log(
                "No version # found for series - tag will not be available for renaming.",
                logger.DEBUG)
            logger.fdebug(
                "No version # found for series, removing from filename")
            logger.fdebug("new format is now: " + str(chunk_file_format))
        else:
            chunk_file_format = mylar.FILE_FORMAT

        if annchk == "no":
            chunk_f_f = re.sub('\$Annual', '', chunk_file_format)
            chunk_f = re.compile(r'\s+')
            chunk_file_format = chunk_f.sub(' ', chunk_f_f)
            logger.fdebug('not an annual - removing from filename paramaters')
            logger.fdebug('new format: ' + str(chunk_file_format))

        else:
            logger.fdebug('chunk_file_format is: ' + str(chunk_file_format))
            if '$Annual' not in chunk_file_format:
                #if it's an annual, but $Annual isn't specified in file_format, we need to
                #force it in there, by default in the format of $Annual $Issue
                prettycomiss = "Annual " + str(prettycomiss)
                logger.fdebug('prettycomiss: ' + str(prettycomiss))

        ofilename = None

        #if meta-tagging is not enabled, we need to declare the check as being fail
        #if meta-tagging is enabled, it gets changed just below to a default of pass
        pcheck = "fail"

        #tag the meta.
        if mylar.ENABLE_META:
            self._log("Metatagging enabled - proceeding...")
            logger.fdebug("Metatagging enabled - proceeding...")
            pcheck = "pass"
            try:
                import cmtagmylar
                if ml is None:
                    pcheck = cmtagmylar.run(self.nzb_folder, issueid=issueid)
                else:
                    pcheck = cmtagmylar.run(self.nzb_folder,
                                            issueid=issueid,
                                            manual="yes",
                                            filename=ml['ComicLocation'])

            except ImportError:
                logger.fdebug(
                    "comictaggerlib not found on system. Ensure the ENTIRE lib directory is located within mylar/lib/comictaggerlib/"
                )
                logger.fdebug(
                    "continuing with PostProcessing, but I'm not using metadata."
                )
                pcheck = "fail"

            if pcheck == "fail":
                self._log(
                    "Unable to write metadata successfully - check mylar.log file. Attempting to continue without tagging..."
                )
                logger.fdebug(
                    "Unable to write metadata successfully - check mylar.log file. Attempting to continue without tagging..."
                )
            elif pcheck == "unrar error":
                self._log(
                    "This is a corrupt archive - whether CRC errors or it's incomplete. Marking as BAD, and retrying a different copy."
                )
                logger.error(
                    "This is a corrupt archive - whether CRC errors or it's incomplete. Marking as BAD, and retrying a different copy."
                )
                return self.log
            else:
                otofilename = pcheck
                self._log("Sucessfully wrote metadata to .cbz - Continuing..")
                logger.fdebug("Sucessfully wrote metadata to .cbz (" +
                              str(otofilename) + ") - Continuing..")
        #Run Pre-script

        if mylar.ENABLE_PRE_SCRIPTS:
            nzbn = self.nzb_name  #original nzb name
            nzbf = self.nzb_folder  #original nzb folder
            #name, comicyear, comicid , issueid, issueyear, issue, publisher
            #create the dic and send it.
            seriesmeta = []
            seriesmetadata = {}
            seriesmeta.append({
                'name': series,
                'comicyear': seriesyear,
                'comicid': comicid,
                'issueid': issueid,
                'issueyear': issueyear,
                'issue': issuenum,
                'publisher': publisher
            })
            seriesmetadata['seriesmeta'] = seriesmeta
            self._run_pre_scripts(nzbn, nzbf, seriesmetadata)

    #rename file and move to new path
    #nfilename = series + " " + issueno + " (" + seriesyear + ")"

        file_values = {
            '$Series': series,
            '$Issue': prettycomiss,
            '$Year': issueyear,
            '$series': series.lower(),
            '$Publisher': publisher,
            '$publisher': publisher.lower(),
            '$VolumeY': 'V' + str(seriesyear),
            '$VolumeN': comversion,
            '$monthname': month_name,
            '$month': month,
            '$Annual': 'Annual'
        }

        #if it's a Manual Run, use the ml['ComicLocation'] for the exact filename.
        if ml is None:

            for root, dirnames, filenames in os.walk(self.nzb_folder):
                for filename in filenames:
                    if filename.lower().endswith(extensions):
                        ofilename = filename
                        path, ext = os.path.splitext(ofilename)
        else:
            if pcheck == "fail":
                otofilename = ml['ComicLocation']
            logger.fdebug('otofilename:' + str(otofilename))
            odir, ofilename = os.path.split(otofilename)
            logger.fdebug('ofilename: ' + str(ofilename))
            path, ext = os.path.splitext(ofilename)
            logger.fdebug('path: ' + str(path))
            logger.fdebug('ext:' + str(ext))

        if ofilename is None:
            logger.error(
                u"Aborting PostProcessing - the filename doesn't exist in the location given. Make sure that "
                + str(self.nzb_folder) +
                " exists and is the correct location.")
            return
        self._log("Original Filename: " + ofilename, logger.DEBUG)
        self._log("Original Extension: " + ext, logger.DEBUG)
        logger.fdebug("Original Filname: " + str(ofilename))
        logger.fdebug("Original Extension: " + str(ext))

        if mylar.FILE_FORMAT == '' or not mylar.RENAME_FILES:
            self._log(
                "Rename Files isn't enabled...keeping original filename.",
                logger.DEBUG)
            logger.fdebug(
                "Rename Files isn't enabled - keeping original filename.")
            #check if extension is in nzb_name - will screw up otherwise
            if ofilename.lower().endswith(extensions):
                nfilename = ofilename[:-4]
            else:
                nfilename = ofilename
        else:
            nfilename = helpers.replace_all(chunk_file_format, file_values)
            if mylar.REPLACE_SPACES:
                #mylar.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
                nfilename = nfilename.replace(' ', mylar.REPLACE_CHAR)
        nfilename = re.sub('[\,\:\?]', '', nfilename)
        nfilename = re.sub('[\/]', '-', nfilename)
        self._log("New Filename: " + nfilename, logger.DEBUG)
        logger.fdebug("New Filename: " + str(nfilename))

        src = os.path.join(self.nzb_folder, ofilename)

        filechecker.validateAndCreateDirectory(comlocation, True)

        if mylar.LOWERCASE_FILENAMES:
            dst = (comlocation + "/" + nfilename + ext).lower()
        else:
            dst = comlocation + "/" + nfilename + ext.lower()
        self._log("Source:" + src, logger.DEBUG)
        self._log("Destination:" + dst, logger.DEBUG)
        logger.fdebug("Source: " + str(src))
        logger.fdebug("Destination: " + str(dst))

        if ml is None:
            #non-manual run moving/deleting...
            logger.fdebug('self.nzb_folder: ' + self.nzb_folder)
            logger.fdebug('ofilename:' + str(ofilename))
            logger.fdebug('nfilename:' + str(nfilename + ext))
            os.rename(os.path.join(self.nzb_folder, str(ofilename)),
                      os.path.join(self.nzb_folder, str(nfilename + ext)))
            src = os.path.join(self.nzb_folder, str(nfilename + ext))
            try:
                shutil.move(src, dst)
            except (OSError, IOError):
                self._log(
                    "Failed to move directory - check directories and manually re-run.",
                    logger.DEBUG)
                self._log("Post-Processing ABORTED.", logger.DEBUG)
                return
            #tidyup old path
            try:
                shutil.rmtree(self.nzb_folder)
            except (OSError, IOError):
                self._log(
                    "Failed to remove temporary directory - check directory and manually re-run.",
                    logger.DEBUG)
                self._log("Post-Processing ABORTED.", logger.DEBUG)
                return

            self._log("Removed temporary directory : " + str(self.nzb_folder),
                      logger.DEBUG)
        else:
            #Manual Run, this is the portion.
            logger.fdebug("Renaming " +
                          os.path.join(self.nzb_folder, str(ofilename)) +
                          " ..to.. " +
                          os.path.join(self.nzb_folder, str(nfilename + ext)))
            os.rename(os.path.join(self.nzb_folder, str(ofilename)),
                      os.path.join(self.nzb_folder, str(nfilename + ext)))
            src = os.path.join(self.nzb_folder, str(nfilename + ext))
            logger.fdebug("Moving " + src + " ... to ... " + dst)
            try:
                shutil.move(src, dst)
            except (OSError, IOError):
                logger.fdebug(
                    "Failed to move directory - check directories and manually re-run."
                )
                logger.fdebug("Post-Processing ABORTED.")
                return
            logger.fdebug("Successfully moved to : " + dst)
            #tidyup old path
            #try:
            #    os.remove(os.path.join(self.nzb_folder, str(ofilename)))
            #    logger.fdebug("Deleting : " + os.path.join(self.nzb_folder, str(ofilename)))
            #except (OSError, IOError):
            #    logger.fdebug("Failed to remove temporary directory - check directory and manually re-run.")
            #    logger.fdebug("Post-Processing ABORTED.")
            #    return
            #logger.fdebug("Removed temporary directory : " + str(self.nzb_folder))

            #delete entry from nzblog table
        myDB.action('DELETE from nzblog WHERE issueid=?', [issueid])
        #update snatched table to change status to Downloaded
        if annchk == "no":
            updater.foundsearch(comicid, issueid, down='True')
            dispiss = 'issue: ' + str(issuenumOG)
        else:
            updater.foundsearch(comicid, issueid, mode='want_ann', down='True')
            dispiss = 'annual issue: ' + str(issuenumOG)

            #force rescan of files
        updater.forceRescan(comicid)
        logger.info(u"Post-Processing completed for: " + series + " " +
                    dispiss)
        self._log(u"Post Processing SUCCESSFULL! ", logger.DEBUG)

        # retrieve/create the corresponding comic objects
        if mylar.ENABLE_EXTRA_SCRIPTS:
            folderp = str(dst)  #folder location after move/rename
            nzbn = self.nzb_name  #original nzb name
            filen = str(nfilename + ext)  #new filename
            #name, comicyear, comicid , issueid, issueyear, issue, publisher
            #create the dic and send it.
            seriesmeta = []
            seriesmetadata = {}
            seriesmeta.append({
                'name': series,
                'comicyear': seriesyear,
                'comicid': comicid,
                'issueid': issueid,
                'issueyear': issueyear,
                'issue': issuenum,
                'publisher': publisher
            })
            seriesmetadata['seriesmeta'] = seriesmeta
            self._run_extra_scripts(nzbn, self.nzb_folder, filen, folderp,
                                    seriesmetadata)

        if ml is not None:
            return self.log
        else:
            if mylar.PROWL_ENABLED:
                pushmessage = series + '(' + issueyear + ') - issue #' + issuenumOG
                logger.info(u"Prowl request")
                prowl = notifiers.PROWL()
                prowl.notify(pushmessage,
                             "Download and Postprocessing completed")

            if mylar.NMA_ENABLED:
                nma = notifiers.NMA()
                nma.notify(series, str(issueyear), str(issuenumOG))

            if mylar.PUSHOVER_ENABLED:
                pushmessage = series + ' (' + str(
                    issueyear) + ') - issue #' + str(issuenumOG)
                logger.info(u"Pushover request")
                pushover = notifiers.PUSHOVER()
                pushover.notify(pushmessage,
                                "Download and Post-Processing completed")

            if mylar.BOXCAR_ENABLED:
                boxcar = notifiers.BOXCAR()
                boxcar.notify(series, str(issueyear), str(issuenumOG))

        return self.log
Exemple #7
0
    def Process_next(self,comicid,issueid,issuenumOG,ml=None):
            annchk = "no"
            extensions = ('.cbr', '.cbz')
            myDB = db.DBConnection()
            comicnzb = myDB.action("SELECT * from comics WHERE comicid=?", [comicid]).fetchone()
            issuenzb = myDB.action("SELECT * from issues WHERE issueid=? AND comicid=? AND ComicName NOT NULL", [issueid,comicid]).fetchone()
            print "issueid: " + str(issueid)
            print "issuenumOG: " + str(issuenumOG)
            if issuenzb is None:
                print "chk1"
                issuenzb = myDB.action("SELECT * from annuals WHERE issueid=? and comicid=?", [issueid,comicid]).fetchone()
                print "chk2"
                annchk = "yes"
            print issuenzb
            #issueno = str(issuenum).split('.')[0]
            #new CV API - removed all decimals...here we go AGAIN!
            issuenum = issuenzb['Issue_Number']
            issue_except = 'None'
            if 'au' in issuenum.lower():
                issuenum = re.sub("[^0-9]", "", issuenum)
                issue_except = ' AU'
            elif 'ai' in issuenum.lower():
                issuenum = re.sub("[^0-9]", "", issuenum)
                issue_except = ' AI'
            if '.' in issuenum:
                iss_find = issuenum.find('.')
                iss_b4dec = issuenum[:iss_find]
                iss_decval = issuenum[iss_find+1:]
                if int(iss_decval) == 0:
                    iss = iss_b4dec
                    issdec = int(iss_decval)
                    issueno = str(iss)
                    self._log("Issue Number: " + str(issueno), logger.DEBUG)
                    logger.fdebug("Issue Number: " + str(issueno))
                else:
                    if len(iss_decval) == 1:
                        iss = iss_b4dec + "." + iss_decval
                        issdec = int(iss_decval) * 10
                    else:
                        iss = iss_b4dec + "." + iss_decval.rstrip('0')
                        issdec = int(iss_decval.rstrip('0')) * 10
                    issueno = iss_b4dec
                    self._log("Issue Number: " + str(iss), logger.DEBUG)
                    logger.fdebug("Issue Number: " + str(iss))
            else:
                iss = issuenum
                issueno = str(iss)
            # issue zero-suppression here
            if mylar.ZERO_LEVEL == "0": 
                zeroadd = ""
            else:
                if mylar.ZERO_LEVEL_N  == "none": zeroadd = ""
                elif mylar.ZERO_LEVEL_N == "0x": zeroadd = "0"
                elif mylar.ZERO_LEVEL_N == "00x": zeroadd = "00"

            logger.fdebug("Zero Suppression set to : " + str(mylar.ZERO_LEVEL_N))

            if str(len(issueno)) > 1:
                if int(issueno) < 10:
                    self._log("issue detected less than 10", logger.DEBUG)
                    if '.' in iss:
                        if int(iss_decval) > 0:
                            issueno = str(iss)
                            prettycomiss = str(zeroadd) + str(iss)
                        else:
                            prettycomiss = str(zeroadd) + str(int(issueno))
                    else:
                        prettycomiss = str(zeroadd) + str(iss)
                    if issue_except != 'None': 
                        prettycomiss = str(prettycomiss) + issue_except
                    self._log("Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ". Issue will be set as : " + str(prettycomiss), logger.DEBUG)
                elif int(issueno) >= 10 and int(issueno) < 100:
                    self._log("issue detected greater than 10, but less than 100", logger.DEBUG)
                    if mylar.ZERO_LEVEL_N == "none":
                        zeroadd = ""
                    else:
                        zeroadd = "0"
                    if '.' in iss:
                        if int(iss_decval) > 0:
                            issueno = str(iss)
                            prettycomiss = str(zeroadd) + str(iss)
                        else:
                           prettycomiss = str(zeroadd) + str(int(issueno))
                    else:
                        prettycomiss = str(zeroadd) + str(iss)
                    if issue_except != 'None':
                        prettycomiss = str(prettycomiss) + issue_except
                    self._log("Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ".Issue will be set as : " + str(prettycomiss), logger.DEBUG)
                else:
                    self._log("issue detected greater than 100", logger.DEBUG)
                    if '.' in iss:
                        if int(iss_decval) > 0:
                            issueno = str(iss)
                    prettycomiss = str(issueno)
                    if issue_except != 'None':
                        prettycomiss = str(prettycomiss) + issue_except
                    self._log("Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ". Issue will be set as : " + str(prettycomiss), logger.DEBUG)
            else:
                prettycomiss = str(issueno)
                self._log("issue length error - cannot determine length. Defaulting to None:  " + str(prettycomiss), logger.DEBUG)

            if annchk == "yes":
                prettycomiss = "Annual " + str(prettycomiss)
                self._log("Annual detected.")
            logger.fdebug("Pretty Comic Issue is : " + str(prettycomiss))
            issueyear = issuenzb['IssueDate'][:4]
            self._log("Issue Year: " + str(issueyear), logger.DEBUG)
            logger.fdebug("Issue Year : " + str(issueyear))
#            comicnzb= myDB.action("SELECT * from comics WHERE comicid=?", [comicid]).fetchone()
            publisher = comicnzb['ComicPublisher']
            self._log("Publisher: " + publisher, logger.DEBUG)
            logger.fdebug("Publisher: " + str(publisher))
            #we need to un-unicode this to make sure we can write the filenames properly for spec.chars
            series = comicnzb['ComicName'].encode('ascii', 'ignore').strip()
            self._log("Series: " + series, logger.DEBUG)
            logger.fdebug("Series: " + str(series))
            seriesyear = comicnzb['ComicYear']
            self._log("Year: " + seriesyear, logger.DEBUG)
            logger.fdebug("Year: "  + str(seriesyear))
            comlocation = comicnzb['ComicLocation']
            self._log("Comic Location: " + comlocation, logger.DEBUG)
            logger.fdebug("Comic Location: " + str(comlocation))
            comversion = comicnzb['ComicVersion']
            self._log("Comic Version: " + str(comversion), logger.DEBUG)
            logger.fdebug("Comic Version: " + str(comversion))
            if comversion is None:
                comversion = 'None'
            #if comversion is None, remove it so it doesn't populate with 'None'
            if comversion == 'None':
                chunk_f_f = re.sub('\$VolumeN','',mylar.FILE_FORMAT)
                chunk_f = re.compile(r'\s+')
                chunk_file_format = chunk_f.sub(' ', chunk_f_f)
                self._log("No version # found for series - tag will not be available for renaming.", logger.DEBUG)
                logger.fdebug("No version # found for series, removing from filename")
                logger.fdebug("new format is now: " + str(chunk_file_format))
            else:
                chunk_file_format = mylar.FILE_FORMAT

            ofilename = None

            #if meta-tagging is not enabled, we need to declare the check as being fail
            #if meta-tagging is enabled, it gets changed just below to a default of pass
            pcheck = "fail"

            #tag the meta.
            if mylar.ENABLE_META:
                self._log("Metatagging enabled - proceeding...")
                logger.fdebug("Metatagging enabled - proceeding...")
                pcheck = "pass"
                try:
                    import cmtagmylar
                    if ml is None:
                        pcheck = cmtagmylar.run(self.nzb_folder, issueid=issueid)
                    else:
                        pcheck = cmtagmylar.run(self.nzb_folder, issueid=issueid, manual="yes", filename=ml['ComicLocation'])

                except ImportError:
                    logger.fdebug("comictaggerlib not found on system. Ensure the ENTIRE lib directory is located within mylar/lib/comictaggerlib/")
                    logger.fdebug("continuing with PostProcessing, but I'm not using metadata.")
                    pcheck = "fail"
                
                if pcheck == "fail":
                    self._log("Unable to write metadata successfully - check mylar.log file. Attempting to continue without tagging...")
                    logger.fdebug("Unable to write metadata successfully - check mylar.log file. Attempting to continue without tagging...")
                elif pcheck == "unrar error":
                    self._log("This is a corrupt archive - whether CRC errors or it's incomplete. Marking as BAD, and retrying a different copy.")
                    logger.error("This is a corrupt archive - whether CRC errors or it's incomplete. Marking as BAD, and retrying a different copy.")
                    return self.log
                else:
                    otofilename = pcheck
                    self._log("Sucessfully wrote metadata to .cbz - Continuing..")
                    logger.fdebug("Sucessfully wrote metadata to .cbz (" + str(otofilename) + ") - Continuing..")
            #Run Pre-script

            if mylar.ENABLE_PRE_SCRIPTS:
                nzbn = self.nzb_name #original nzb name
                nzbf = self.nzb_folder #original nzb folder
                #name, comicyear, comicid , issueid, issueyear, issue, publisher
                #create the dic and send it.
                seriesmeta = []
                seriesmetadata = {}
                seriesmeta.append({
                            'name':                 series,
                            'comicyear':            seriesyear,
                            'comicid':              comicid,
                            'issueid':              issueid,
                            'issueyear':            issueyear,
                            'issue':                issuenum,
                            'publisher':            publisher
                            })
                seriesmetadata['seriesmeta'] = seriesmeta
                self._run_pre_scripts(nzbn, nzbf, seriesmetadata )

        #rename file and move to new path
        #nfilename = series + " " + issueno + " (" + seriesyear + ")"

            file_values = {'$Series':    series,
                           '$Issue':     prettycomiss,
                           '$Year':      issueyear,
                           '$series':    series.lower(),
                           '$Publisher': publisher,
                           '$publisher': publisher.lower(),
                           '$VolumeY':   'V' + str(seriesyear),
                           '$VolumeN':   comversion
                          }


            #if it's a Manual Run, use the ml['ComicLocation'] for the exact filename.
            if ml is None:

                for root, dirnames, filenames in os.walk(self.nzb_folder):
                    for filename in filenames:
                        if filename.lower().endswith(extensions):
                            ofilename = filename
                            path, ext = os.path.splitext(ofilename)
            else:
                if pcheck == "fail":
                    otofilename = ml['ComicLocation']
                print "otofilename:" + str(otofilename)
                odir, ofilename = os.path.split(otofilename)
                print "ofilename: " + str(ofilename)
                path, ext = os.path.splitext(ofilename)
                print "path: " + str(path)
                print "ext:" + str(ext)

            if ofilename is None:
                logger.error(u"Aborting PostProcessing - the filename doesn't exist in the location given. Make sure that " + str(self.nzb_folder) + " exists and is the correct location.")
                return
            self._log("Original Filename: " + ofilename, logger.DEBUG)
            self._log("Original Extension: " + ext, logger.DEBUG)
            logger.fdebug("Original Filname: " + str(ofilename))
            logger.fdebug("Original Extension: " + str(ext))

            if mylar.FILE_FORMAT == '' or not mylar.RENAME_FILES:
                self._log("Rename Files isn't enabled...keeping original filename.", logger.DEBUG)
                logger.fdebug("Rename Files isn't enabled - keeping original filename.")
                #check if extension is in nzb_name - will screw up otherwise
                if ofilename.lower().endswith(extensions):
                    nfilename = ofilename[:-4]
                else:
                    nfilename = ofilename
            else:
                nfilename = helpers.replace_all(chunk_file_format, file_values)
                if mylar.REPLACE_SPACES:
                    #mylar.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
                    nfilename = nfilename.replace(' ', mylar.REPLACE_CHAR)
            nfilename = re.sub('[\,\:\?]', '', nfilename)
            self._log("New Filename: " + nfilename, logger.DEBUG)
            logger.fdebug("New Filename: " + str(nfilename))

            src = os.path.join(self.nzb_folder, ofilename)

            filechecker.validateAndCreateDirectory(comlocation, True)

            if mylar.LOWERCASE_FILENAMES:
                dst = (comlocation + "/" + nfilename + ext).lower()
            else:
                dst = comlocation + "/" + nfilename + ext.lower()    
            self._log("Source:" + src, logger.DEBUG)
            self._log("Destination:" +  dst, logger.DEBUG)
            logger.fdebug("Source: " + str(src))
            logger.fdebug("Destination: " + str(dst))

            if ml is None:
                #non-manual run moving/deleting...
                os.rename(os.path.join(self.nzb_folder, str(ofilename)), os.path.join(self.nzb_folder,str(nfilename + ext)))
                src = os.path.join(self.nzb_folder, str(nfilename + ext))
                try:
                    shutil.move(src, dst)
                except (OSError, IOError):
                    self._log("Failed to move directory - check directories and manually re-run.", logger.DEBUG)
                    self._log("Post-Processing ABORTED.", logger.DEBUG)
                    return
                #tidyup old path
                try:
                    shutil.rmtree(self.nzb_folder)
                except (OSError, IOError):
                    self._log("Failed to remove temporary directory - check directory and manually re-run.", logger.DEBUG)
                    self._log("Post-Processing ABORTED.", logger.DEBUG)
                    return

                self._log("Removed temporary directory : " + str(self.nzb_folder), logger.DEBUG)
            else:
                #Manual Run, this is the portion.
                logger.fdebug("Renaming " + os.path.join(self.nzb_folder, str(ofilename)) + " ..to.. " + os.path.join(self.nzb_folder,str(nfilename + ext)))
                os.rename(os.path.join(self.nzb_folder, str(ofilename)), os.path.join(self.nzb_folder,str(nfilename + ext)))
                src = os.path.join(self.nzb_folder, str(nfilename + ext))
                logger.fdebug("Moving " + src + " ... to ... " + dst)
                try:
                    shutil.move(src, dst)
                except (OSError, IOError):
                    logger.fdebug("Failed to move directory - check directories and manually re-run.")
                    logger.fdebug("Post-Processing ABORTED.")
                    return
                logger.fdebug("Successfully moved to : " + dst)
                #tidyup old path
                #try:
                #    os.remove(os.path.join(self.nzb_folder, str(ofilename)))
                #    logger.fdebug("Deleting : " + os.path.join(self.nzb_folder, str(ofilename)))
                #except (OSError, IOError):
                #    logger.fdebug("Failed to remove temporary directory - check directory and manually re-run.")
                #    logger.fdebug("Post-Processing ABORTED.")
                #    return
                #logger.fdebug("Removed temporary directory : " + str(self.nzb_folder))

                    #delete entry from nzblog table
            myDB.action('DELETE from nzblog WHERE issueid=?', [issueid])
                    #update snatched table to change status to Downloaded
            if annchk == "no":
                updater.foundsearch(comicid, issueid, down='True')
            else:
                updater.foundsearch(comicid, issueid, mode='want_ann', down='True')
                    #force rescan of files
            updater.forceRescan(comicid)
            logger.info(u"Post-Processing completed for: " + series + " issue: " + str(issuenumOG) )
            self._log(u"Post Processing SUCCESSFULL! ", logger.DEBUG)
            if ml is not None: 
                return
            else:
                if mylar.PROWL_ENABLED:
                    pushmessage = series + '(' + issueyear + ') - issue #' + issuenumOG
                    logger.info(u"Prowl request")
                    prowl = notifiers.PROWL()
                    prowl.notify(pushmessage,"Download and Postprocessing completed")
    
                if mylar.NMA_ENABLED:
                    nma = notifiers.NMA()
                    nma.notify(series, str(issueyear), str(issuenumOG))

                if mylar.PUSHOVER_ENABLED:
                    pushmessage = series + ' (' + str(issueyear) + ') - issue #' + str(issuenumOG)
                    logger.info(u"Pushover request")
                    pushover = notifiers.PUSHOVER()
                    pushover.notify(pushmessage, "Download and Post-Processing completed")

                if mylar.BOXCAR_ENABLED:
                    boxcar = notifiers.BOXCAR()
                    boxcar.notify(series, str(issueyear), str(issuenumOG))

             
            # retrieve/create the corresponding comic objects

            if mylar.ENABLE_EXTRA_SCRIPTS:
                folderp = str(dst) #folder location after move/rename
                nzbn = self.nzb_name #original nzb name
                filen = str(nfilename + ext) #new filename
                #name, comicyear, comicid , issueid, issueyear, issue, publisher
                #create the dic and send it.
                seriesmeta = []
                seriesmetadata = {}
                seriesmeta.append({
                            'name':                 series,
                            'comicyear':            seriesyear,
                            'comicid':              comicid,
                            'issueid':              issueid,
                            'issueyear':            issueyear,
                            'issue':                issuenum,
                            'publisher':            publisher
                            })
                seriesmetadata['seriesmeta'] = seriesmeta
                self._run_extra_scripts(nzbname, self.nzb_folder, filen, folderp, seriesmetadata )

            return self.log
Exemple #8
0
def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None):
    # Putting this here to get around the circular import. Will try to use this to update images at later date.
#    from mylar import cache
    
    myDB = db.DBConnection()
    
    # We need the current minimal info in the database instantly
    # so we don't throw a 500 error when we redirect to the artistPage

    controlValueDict = {"ComicID":     comicid}

    dbcomic = myDB.action('SELECT * FROM comics WHERE ComicID=?', [comicid]).fetchone()
    if dbcomic is None:
        newValueDict = {"ComicName":   "Comic ID: %s" % (comicid),
                "Status":   "Loading"}
        comlocation = None
        oldcomversion = None
    else:
        newValueDict = {"Status":   "Loading"}
        comlocation = dbcomic['ComicLocation']
        filechecker.validateAndCreateDirectory(comlocation, True)
        oldcomversion = dbcomic['ComicVersion'] #store the comicversion and chk if it exists before hammering.

    myDB.upsert("comics", newValueDict, controlValueDict)

    #run the re-sortorder here in order to properly display the page
    if pullupd is None:
        helpers.ComicSort(comicorder=mylar.COMICSORT, imported=comicid)

    # we need to lookup the info for the requested ComicID in full now        
    comic = cv.getComic(comicid,'comic')
    #comic = myDB.action('SELECT * FROM comics WHERE ComicID=?', [comicid]).fetchone()
    if not comic:
        logger.warn("Error fetching comic. ID for : " + comicid)
        if dbcomic is None:
            newValueDict = {"ComicName":   "Fetch failed, try refreshing. (%s)" % (comicid),
                    "Status":   "Active"}
        else:
            newValueDict = {"Status":   "Active"}
        myDB.upsert("comics", newValueDict, controlValueDict)
        return
    
    if comic['ComicName'].startswith('The '):
        sortname = comic['ComicName'][4:]
    else:
        sortname = comic['ComicName']
        

    logger.info(u"Now adding/updating: " + comic['ComicName'])
    #--Now that we know ComicName, let's try some scraping
    #--Start
    # gcd will return issue details (most importantly publishing date)
    if not mylar.CV_ONLY:
        if mismatch == "no" or mismatch is None:
            gcdinfo=parseit.GCDScraper(comic['ComicName'], comic['ComicYear'], comic['ComicIssues'], comicid) 
            #print ("gcdinfo: " + str(gcdinfo))
            mismatch_com = "no"
            if gcdinfo == "No Match":
                updater.no_searchresults(comicid)
                nomatch = "true"
                logger.info(u"There was an error when trying to add " + comic['ComicName'] + " (" + comic['ComicYear'] + ")" )
                return nomatch
            else:
                mismatch_com = "yes"
                #print ("gcdinfo:" + str(gcdinfo))

        elif mismatch == "yes":
            CV_EXcomicid = myDB.action("SELECT * from exceptions WHERE ComicID=?", [comicid]).fetchone()
            if CV_EXcomicid['variloop'] is None: pass
            else:
                vari_loop = CV_EXcomicid['variloop']
                NewComicID = CV_EXcomicid['NewComicID']
                gcomicid = CV_EXcomicid['GComicID']
                resultURL = "/series/" + str(NewComicID) + "/"
                #print ("variloop" + str(CV_EXcomicid['variloop']))
                #if vari_loop == '99':
                gcdinfo = parseit.GCDdetails(comseries=None, resultURL=resultURL, vari_loop=0, ComicID=comicid, TotalIssues=0, issvariation="no", resultPublished=None)

    logger.info(u"Sucessfully retrieved details for " + comic['ComicName'] )
    # print ("Series Published" + parseit.resultPublished)

    CV_NoYearGiven = "no"
    #if the SeriesYear returned by CV is blank or none (0000), let's use the gcd one.
    if comic['ComicYear'] is None or comic['ComicYear'] == '0000':
        if mylar.CV_ONLY:
            #we'll defer this until later when we grab all the issues and then figure it out
            logger.info("Uh-oh. I can't find a Series Year for this series. I'm going to try analyzing deeper.")
            SeriesYear = cv.getComic(comicid,'firstissue',comic['FirstIssueID'])
            if SeriesYear == '0000':
                logger.info("Ok - I couldn't find a Series Year at all. Loading in the issue data now and will figure out the Series Year.")
                CV_NoYearGiven = "yes"
                issued = cv.getComic(comicid,'issue')
                SeriesYear = issued['firstdate'][:4]
        else:
            SeriesYear = gcdinfo['SeriesYear']
    else:
        SeriesYear = comic['ComicYear']

    #let's do the Annual check here.
    if mylar.ANNUALS_ON:
        annualcomicname = re.sub('[\,\:]', '', comic['ComicName'])
        annuals = comicbookdb.cbdb(annualcomicname, SeriesYear)
        print ("Number of Annuals returned: " + str(annuals['totalissues']))
        nb = 0
        while (nb <= int(annuals['totalissues'])):
            try:
                annualval = annuals['annualslist'][nb]
            except IndexError:
                break
            newCtrl = {"IssueID": str(annualval['AnnualIssue'] + annualval['AnnualDate'])}
            newVals = {"Issue_Number":  annualval['AnnualIssue'],
                       "IssueDate":     annualval['AnnualDate'],
                       "IssueName":    annualval['AnnualTitle'],
                       "ComicID":       comicid,
                       "Status":        "Skipped"}
            myDB.upsert("annuals", newVals, newCtrl)
            nb+=1

    #parseit.annualCheck(gcomicid=gcdinfo['GCDComicID'], comicid=comicid, comicname=comic['ComicName'], comicyear=SeriesYear)
    #comic book location on machine
    # setup default location here

    if comlocation is None:
        # let's remove the non-standard characters here.
        u_comicnm = comic['ComicName']
        u_comicname = u_comicnm.encode('ascii', 'ignore').strip()
        if ':' in u_comicname or '/' in u_comicname or ',' in u_comicname or '?' in u_comicname:
            comicdir = u_comicname
            if ':' in comicdir:
                comicdir = comicdir.replace(':','')
            if '/' in comicdir:
                comicdir = comicdir.replace('/','-')
            if ',' in comicdir:
                comicdir = comicdir.replace(',','')
            if '?' in comicdir:
                comicdir = comicdir.replace('?','')
        else: comicdir = u_comicname

        series = comicdir
        publisher = re.sub('!','',comic['ComicPublisher']) # thanks Boom!
        year = SeriesYear
        comversion = comic['ComicVersion']
        if comversion is None:
            comversion = 'None'
        #if comversion is None, remove it so it doesn't populate with 'None'
        if comversion == 'None':
            chunk_f_f = re.sub('\$VolumeN','',mylar.FILE_FORMAT)
            chunk_f = re.compile(r'\s+')
            mylar.FILE_FORMAT = chunk_f.sub(' ', chunk_f_f)
         
        #do work to generate folder path

        values = {'$Series':        series,
                  '$Publisher':     publisher,
                  '$Year':          year,
                  '$series':        series.lower(),
                  '$publisher':     publisher.lower(),
                  '$VolumeY':       'V' + str(year),
                  '$VolumeN':       comversion
                  }



        #print mylar.FOLDER_FORMAT
        #print 'working dir:'
        #print helpers.replace_all(mylar.FOLDER_FORMAT, values)

        if mylar.FOLDER_FORMAT == '':
            comlocation = mylar.DESTINATION_DIR + "/" + comicdir + " (" + SeriesYear + ")"
        else:
            comlocation = mylar.DESTINATION_DIR + "/" + helpers.replace_all(mylar.FOLDER_FORMAT, values)


        #comlocation = mylar.DESTINATION_DIR + "/" + comicdir + " (" + comic['ComicYear'] + ")"
        if mylar.DESTINATION_DIR == "":
            logger.error(u"There is no general directory specified - please specify in Config/Post-Processing.")
            return
        if mylar.REPLACE_SPACES:
            #mylar.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
            comlocation = comlocation.replace(' ', mylar.REPLACE_CHAR)

    #moved this out of the above loop so it will chk for existance of comlocation in case moved
    #if it doesn't exist - create it (otherwise will bugger up later on)
    if os.path.isdir(str(comlocation)):
        logger.info(u"Directory (" + str(comlocation) + ") already exists! Continuing...")
    else:
        #print ("Directory doesn't exist!")
        #try:
        #    os.makedirs(str(comlocation))
        #    logger.info(u"Directory successfully created at: " + str(comlocation))
        #except OSError:
        #    logger.error(u"Could not create comicdir : " + str(comlocation))
        filechecker.validateAndCreateDirectory(comlocation, True)

    #try to account for CV not updating new issues as fast as GCD
    #seems CV doesn't update total counts
    #comicIssues = gcdinfo['totalissues']
    comicIssues = comic['ComicIssues']

    if not mylar.CV_ONLY:
        if gcdinfo['gcdvariation'] == "cv":
            comicIssues = str(int(comic['ComicIssues']) + 1)

    #let's download the image...
    if os.path.exists(mylar.CACHE_DIR):pass
    else:
        #let's make the dir.
        try:
            os.makedirs(str(mylar.CACHE_DIR))
            logger.info(u"Cache Directory successfully created at: " + str(mylar.CACHE_DIR))

        except OSError:
            logger.error('Could not create cache dir. Check permissions of cache dir: ' + str(mylar.CACHE_DIR))

    coverfile = os.path.join(mylar.CACHE_DIR,  str(comicid) + ".jpg")

    #try:
    urllib.urlretrieve(str(comic['ComicImage']), str(coverfile))
    try:
        with open(str(coverfile)) as f:
            ComicImage = os.path.join('cache',str(comicid) + ".jpg")

            #this is for Firefox when outside the LAN...it works, but I don't know how to implement it
            #without breaking the normal flow for inside the LAN (above)
            #ComicImage = "http://" + str(mylar.HTTP_HOST) + ":" + str(mylar.HTTP_PORT) + "/cache/" + str(comicid) + ".jpg"

            logger.info(u"Sucessfully retrieved cover for " + comic['ComicName'])
            #if the comic cover local is checked, save a cover.jpg to the series folder.
            if mylar.COMIC_COVER_LOCAL:
                comiclocal = os.path.join(str(comlocation) + "/cover.jpg")
                shutil.copy(ComicImage,comiclocal)
    except IOError as e:
        logger.error(u"Unable to save cover locally at this time.")

    if oldcomversion is None:
        if comic['ComicVersion'].isdigit():
            comicVol = "v" + comic['ComicVersion']
        else:
            comicVol = None
    else:
        comicVol = oldcomversion

    #for description ...
    #Cdesc = helpers.cleanhtml(comic['ComicDescription'])
    #cdes_find = Cdesc.find("Collected")
    #cdes_removed = Cdesc[:cdes_find]
    #print cdes_removed

    controlValueDict = {"ComicID":      comicid}
    newValueDict = {"ComicName":        comic['ComicName'],
                    "ComicSortName":    sortname,
                    "ComicYear":        SeriesYear,
                    "ComicImage":       ComicImage,
                    "Total":            comicIssues,
                    "ComicVersion":     comicVol,
                    "ComicLocation":    comlocation,
                    "ComicPublisher":   comic['ComicPublisher'],
                    #"Description":      Cdesc.decode('utf-8', 'replace'),
                    "DetailURL":        comic['ComicURL'],
#                    "ComicPublished":   gcdinfo['resultPublished'],
                    "ComicPublished":   'Unknown',
                    "DateAdded":        helpers.today(),
                    "Status":           "Loading"}
    
    myDB.upsert("comics", newValueDict, controlValueDict)

    #comicsort here...
    #run the re-sortorder here in order to properly display the page
    if pullupd is None:
        helpers.ComicSort(sequence='update')

    if CV_NoYearGiven == 'no':
        #if set to 'no' then we haven't pulled down the issues, otherwise we did it already
        issued = cv.getComic(comicid,'issue')
    logger.info(u"Sucessfully retrieved issue details for " + comic['ComicName'] )
    n = 0
    iscnt = int(comicIssues)
    issid = []
    issnum = []
    issname = []
    issdate = []
    int_issnum = []
    #let's start issue #'s at 0 -- thanks to DC for the new 52 reboot! :)
    latestiss = "0"
    latestdate = "0000-00-00"
    firstiss = "10000000"
    firstdate = "2099-00-00"
    #print ("total issues:" + str(iscnt))
    #---removed NEW code here---
    logger.info(u"Now adding/updating issues for " + comic['ComicName'])

    if not mylar.CV_ONLY:
        #fccnt = int(fc['comiccount'])
        #logger.info(u"Found " + str(fccnt) + "/" + str(iscnt) + " issues of " + comic['ComicName'] + "...verifying")
        #fcnew = []
        if iscnt > 0: #if a series is brand new, it wont have any issues/details yet so skip this part
            while (n <= iscnt):
            #---NEW.code
                try:
                    firstval = issued['issuechoice'][n]
                except IndexError:
                    break
                cleanname = helpers.cleanName(firstval['Issue_Name'])
                issid = str(firstval['Issue_ID'])
                issnum = str(firstval['Issue_Number'])
                #print ("issnum: " + str(issnum))
                issname = cleanname
                if '.' in str(issnum):
                    issn_st = str(issnum).find('.')
                    issn_b4dec = str(issnum)[:issn_st]
                    #if the length of decimal is only 1 digit, assume it's a tenth
                    dec_is = str(issnum)[issn_st + 1:]
                    if len(dec_is) == 1:
                        dec_nisval = int(dec_is) * 10
                        iss_naftdec = str(dec_nisval)
                    if len(dec_is) == 2:
                        dec_nisval = int(dec_is)
                        iss_naftdec = str(dec_nisval)
                    iss_issue = issn_b4dec + "." + iss_naftdec
                    issis = (int(issn_b4dec) * 1000) + dec_nisval
                elif 'au' in issnum.lower():
                    print ("au detected")
                    stau = issnum.lower().find('au')
                    issnum_au = issnum[:stau] 
                    print ("issnum_au: " + str(issnum_au))
                    #account for Age of Ultron mucked up numbering
                    issis = str(int(issnum_au) * 1000) + 'AU'
                else: issis = int(issnum) * 1000

                bb = 0
                while (bb <= iscnt):
                    try: 
                        gcdval = gcdinfo['gcdchoice'][bb]
                        #print ("gcdval: " + str(gcdval))
                    except IndexError:
                        #account for gcd variation here
                        if gcdinfo['gcdvariation'] == 'gcd':
                            #logger.fdebug("gcd-variation accounted for.")
                            issdate = '0000-00-00'
                            int_issnum =  int ( issis / 1000 )
                        break
                    if 'nn' in str(gcdval['GCDIssue']):
                        #no number detected - GN, TP or the like
                        logger.warn(u"Non Series detected (Graphic Novel, etc) - cannot proceed at this time.")
                        updater.no_searchresults(comicid)
                        return
                    elif 'au' in gcdval['GCDIssue'].lower():
                        #account for Age of Ultron mucked up numbering - this is in format of 5AU.00
                        gstau = gcdval['GCDIssue'].lower().find('au')
                        gcdis_au = gcdval['GCDIssue'][:gstau]
                        gcdis = str(int(gcdis_au) * 1000) + 'AU'
                    elif '.' in str(gcdval['GCDIssue']):
                        #logger.fdebug("g-issue:" + str(gcdval['GCDIssue']))
                        issst = str(gcdval['GCDIssue']).find('.')
                        #logger.fdebug("issst:" + str(issst))
                        issb4dec = str(gcdval['GCDIssue'])[:issst]
                        #logger.fdebug("issb4dec:" + str(issb4dec))
                        #if the length of decimal is only 1 digit, assume it's a tenth
                        decis = str(gcdval['GCDIssue'])[issst+1:]
                        #logger.fdebug("decis:" + str(decis))
                        if len(decis) == 1:
                            decisval = int(decis) * 10
                            issaftdec = str(decisval)
                        if len(decis) == 2:
                            decisval = int(decis)
                            issaftdec = str(decisval)
                        gcd_issue = issb4dec + "." + issaftdec
                        #logger.fdebug("gcd_issue:" + str(gcd_issue))
                        try:
                            gcdis = (int(issb4dec) * 1000) + decisval
                        except ValueError:
                            logger.error("This has no issue #'s for me to get - Either a Graphic Novel or one-shot. This feature to allow these will be added in the near future.")
                            updater.no_searchresults(comicid)
                            return
                    else:
                        gcdis = int(str(gcdval['GCDIssue'])) * 1000
                    if gcdis == issis:
                        issdate = str(gcdval['GCDDate'])
                        if str(issis).isdigit():
                            int_issnum = int( gcdis / 1000 )
                        else:
                            if 'au' in issis.lower():
                                int_issnum = str(int(gcdis[:-2]) / 1000) + 'AU'
                            else:
                                logger.error("this has an alpha-numeric in the issue # which I cannot account for. Get on github and log the issue for evilhero.")
                                return
                        #get the latest issue / date using the date.
                        if gcdval['GCDDate'] > latestdate:
                            latestiss = str(issnum)
                            latestdate = str(gcdval['GCDDate'])
                            break
                       #bb = iscnt
                    bb+=1
                #print("(" + str(n) + ") IssueID: " + str(issid) + " IssueNo: " + str(issnum) + " Date" + str(issdate))
                #---END.NEW.

                # check if the issue already exists
                iss_exists = myDB.action('SELECT * from issues WHERE IssueID=?', [issid]).fetchone()

                # Only change the status & add DateAdded if the issue is already in the database
                if iss_exists is None:
                    newValueDict['DateAdded'] = helpers.today()

                controlValueDict = {"IssueID":  issid}
                newValueDict = {"ComicID":            comicid,
                                "ComicName":          comic['ComicName'],
                                "IssueName":          issname,
                                "Issue_Number":       issnum,
                                "IssueDate":          issdate,
                                "Int_IssueNumber":    int_issnum
                                }        
                if mylar.AUTOWANT_ALL:
                    newValueDict['Status'] = "Wanted"
                elif issdate > helpers.today() and mylar.AUTOWANT_UPCOMING:
                    newValueDict['Status'] = "Wanted"
                else:
                    newValueDict['Status'] = "Skipped"

                if iss_exists:
                    #print ("Existing status : " + str(iss_exists['Status']))
                    newValueDict['Status'] = iss_exists['Status']     
            
                try:     
                    myDB.upsert("issues", newValueDict, controlValueDict)
                except sqlite3.InterfaceError, e:
                    #raise sqlite3.InterfaceError(e)
                    logger.error("MAJOR error trying to get issue data, this is most likey a MULTI-VOLUME series and you need to use the custom_exceptions.csv file.")
                    myDB.action("DELETE FROM comics WHERE ComicID=?", [comicid])
                    return
                n+=1
Exemple #9
0
    def Process(self):
            self._log("nzb name: " + str(self.nzb_name), logger.DEBUG)
            self._log("nzb folder: " + str(self.nzb_folder), logger.DEBUG)
            logger.fdebug("nzb name: " + str(self.nzb_name))
            logger.fdebug("nzb folder: " + str(self.nzb_folder))
            #lookup nzb_name in nzblog table to get issueid

            #query SAB to find out if Replace Spaces enabled / not as well as Replace Decimals
            #http://localhost:8080/sabnzbd/api?mode=set_config&section=misc&keyword=dirscan_speed&value=5
            querysab = str(mylar.SAB_HOST) + "/api?mode=get_config&section=misc&output=xml&apikey=" + str(mylar.SAB_APIKEY)
            #logger.info("querysab_string:" + str(querysab))
            file = urllib2.urlopen(querysab)
            data = file.read()
            file.close()
            dom = parseString(data)

            sabreps = dom.getElementsByTagName('replace_spaces')[0].firstChild.wholeText
            sabrepd = dom.getElementsByTagName('replace_dots')[0].firstChild.wholeText
            logger.fdebug("SAB Replace Spaces: " + str(sabreps))
            logger.fdebug("SAB Replace Dots: " + str(sabrepd))
            myDB = db.DBConnection()

            nzbname = self.nzb_name
            #remove extensions from nzb_name if they somehow got through (Experimental most likely)
            extensions = ('.cbr', '.cbz')

            if nzbname.lower().endswith(extensions):
                fd, ext = os.path.splitext(nzbname)
                self._log("Removed extension from nzb: " + ext, logger.DEBUG)
                nzbname = re.sub(str(ext), '', str(nzbname))

            #replace spaces
            nzbname = re.sub(' ', '.', str(nzbname))
            nzbname = re.sub('[\,\:]', '', str(nzbname))

            logger.fdebug("After conversions, nzbname is : " + str(nzbname))
            self._log("nzbname: " + str(nzbname), logger.DEBUG)

            nzbiss = myDB.action("SELECT * from nzblog WHERE nzbname=?", [nzbname]).fetchone()

            if nzbiss is None:
                self._log("Failure - could not initially locate nzbfile in my database to rename.", logger.DEBUG)
                logger.fdebug("Failure - could not locate nzbfile initially.")
                # if failed on spaces, change it all to decimals and try again.
                nzbname = re.sub('_', '.', str(nzbname))
                self._log("trying again with this nzbname: " + str(nzbname), logger.DEBUG)
                logger.fdebug("trying again with nzbname of : " + str(nzbname))
                nzbiss = myDB.action("SELECT * from nzblog WHERE nzbname=?", [nzbname]).fetchone()
                if nzbiss is None:
                    logger.error(u"Unable to locate downloaded file to rename. PostProcessing aborted.")
                    return
                else:
                    self._log("I corrected and found the nzb as : " + str(nzbname))
                    logger.fdebug("auto-corrected and found the nzb as : " + str(nzbname))
                    issueid = nzbiss['IssueID']
            else: 
                issueid = nzbiss['IssueID']
                #use issueid to get publisher, series, year, issue number
            issuenzb = myDB.action("SELECT * from issues WHERE issueid=?", [issueid]).fetchone()
            comicid = issuenzb['ComicID']
            issuenum = issuenzb['Issue_Number']
            #issueno = str(issuenum).split('.')[0]

            iss_find = issuenum.find('.')
            iss_b4dec = issuenum[:iss_find]
            iss_decval = issuenum[iss_find+1:]
            if int(iss_decval) == 0:
                iss = iss_b4dec
                issdec = int(iss_decval)
                issueno = str(iss)
                self._log("Issue Number: " + str(issueno), logger.DEBUG)
                logger.fdebug("Issue Number: " + str(issueno))
            else:
                if len(iss_decval) == 1:
                    iss = iss_b4dec + "." + iss_decval
                    issdec = int(iss_decval) * 10
                else:
                    iss = iss_b4dec + "." + iss_decval.rstrip('0')
                    issdec = int(iss_decval.rstrip('0')) * 10
                issueno = iss_b4dec
                self._log("Issue Number: " + str(iss), logger.DEBUG)
                logger.fdebug("Issue Number: " + str(iss))

            # issue zero-suppression here
            if mylar.ZERO_LEVEL == "0": 
                zeroadd = ""
            else:
                if mylar.ZERO_LEVEL_N  == "none": zeroadd = ""
                elif mylar.ZERO_LEVEL_N == "0x": zeroadd = "0"
                elif mylar.ZERO_LEVEL_N == "00x": zeroadd = "00"

            logger.fdebug("Zero Suppression set to : " + str(mylar.ZERO_LEVEL_N))

            if str(len(issueno)) > 1:
                if int(issueno) < 10:
                    self._log("issue detected less than 10", logger.DEBUG)
                    if int(iss_decval) > 0:
                        issueno = str(iss)
                        prettycomiss = str(zeroadd) + str(iss)
                    else:
                        prettycomiss = str(zeroadd) + str(int(issueno))
                    self._log("Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ". Issue will be set as : " + str(prettycomiss), logger.DEBUG)
                elif int(issueno) >= 10 and int(issueno) < 100:
                    self._log("issue detected greater than 10, but less than 100", logger.DEBUG)
                    if mylar.ZERO_LEVEL_N == "none":
                        zeroadd = ""
                    else:
                        zeroadd = "0"
                    if int(iss_decval) > 0:
                        issueno = str(iss)
                        prettycomiss = str(zeroadd) + str(iss)
                    else:
                        prettycomiss = str(zeroadd) + str(int(issueno))
                    self._log("Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ".Issue will be set as : " + str(prettycomiss), logger.DEBUG)
                else:
                    self._log("issue detected greater than 100", logger.DEBUG)
                    if int(iss_decval) > 0:
                        issueno = str(iss)
                    prettycomiss = str(issueno)
                    self._log("Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ". Issue will be set as : " + str(prettycomiss), logger.DEBUG)
            else:
                prettycomiss = str(issueno)
                self._log("issue length error - cannot determine length. Defaulting to None:  " + str(prettycomiss), logger.DEBUG)

            logger.fdebug("Pretty Comic Issue is : " + str(prettycomiss))
            issueyear = issuenzb['IssueDate'][:4]
            self._log("Issue Year: " + str(issueyear), logger.DEBUG)
            logger.fdebug("Issue Year : " + str(issueyear))
            comicnzb= myDB.action("SELECT * from comics WHERE comicid=?", [comicid]).fetchone()
            publisher = comicnzb['ComicPublisher']
            self._log("Publisher: " + publisher, logger.DEBUG)
            logger.fdebug("Publisher: " + str(publisher))
            series = comicnzb['ComicName']
            self._log("Series: " + series, logger.DEBUG)
            logger.fdebug("Series: " + str(series))
            seriesyear = comicnzb['ComicYear']
            self._log("Year: " + seriesyear, logger.DEBUG)
            logger.fdebug("Year: "  + str(seriesyear))
            comlocation = comicnzb['ComicLocation']
            self._log("Comic Location: " + comlocation, logger.DEBUG)
            logger.fdebug("Comic Location: " + str(comlocation))

            #Run Pre-script

            if mylar.ENABLE_PRE_SCRIPTS:
                nzbn = self.nzb_name #original nzb name
                nzbf = self.nzb_folder #original nzb folder
                #name, comicyear, comicid , issueid, issueyear, issue, publisher
                #create the dic and send it.
                seriesmeta = []
                seriesmetadata = {}
                seriesmeta.append({
                            'name':                 series,
                            'comicyear':            seriesyear,
                            'comicid':              comicid,
                            'issueid':              issueid,
                            'issueyear':            issueyear,
                            'issue':                issuenum,
                            'publisher':            publisher
                            })
                seriesmetadata['seriesmeta'] = seriesmeta
                self._run_pre_scripts(nzbn, nzbf, seriesmetadata )

        #rename file and move to new path
        #nfilename = series + " " + issueno + " (" + seriesyear + ")"

            file_values = {'$Series':    series,
                           '$Issue':     prettycomiss,
                           '$Year':      issueyear,
                           '$series':    series.lower(),
                           '$Publisher': publisher,
                           '$publisher': publisher.lower(),
                           '$Volume':    seriesyear
                          }

            for root, dirnames, filenames in os.walk(self.nzb_folder):
                for filename in filenames:
                    if filename.lower().endswith(extensions):
                        ofilename = filename
                        path, ext = os.path.splitext(ofilename)
            self._log("Original Filename: " + ofilename, logger.DEBUG)
            self._log("Original Extension: " + ext, logger.DEBUG)
            logger.fdebug("Original Filname: " + str(ofilename))
            logger.fdebug("Original Extension: " + str(ext))

            if mylar.FILE_FORMAT == '':
                self._log("Rename Files isn't enabled...keeping original filename.", logger.DEBUG)
                logger.fdebug("Rename Files isn't enabled - keeping original filename.")
                #check if extension is in nzb_name - will screw up otherwise
                if ofilename.lower().endswith(extensions):
                    nfilename = ofilename[:-4]
                else:
                    nfilename = ofilename
            else:
                nfilename = helpers.replace_all(mylar.FILE_FORMAT, file_values)
                if mylar.REPLACE_SPACES:
                    #mylar.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
                    nfilename = nfilename.replace(' ', mylar.REPLACE_CHAR)
            nfilename = re.sub('[\,\:]', '', nfilename)
            self._log("New Filename: " + nfilename, logger.DEBUG)
            logger.fdebug("New Filename: " + str(nfilename))

            src = os.path.join(self.nzb_folder, ofilename)
            if mylar.LOWERCASE_FILENAMES:
                dst = (comlocation + "/" + nfilename + ext).lower()
            else:
                dst = comlocation + "/" + nfilename + ext.lower()    
            self._log("Source:" + src, logger.DEBUG)
            self._log("Destination:" +  dst, logger.DEBUG)
            logger.fdebug("Source: " + str(src))
            logger.fdebug("Destination: " + str(dst))

            os.rename(os.path.join(self.nzb_folder, str(ofilename)), os.path.join(self.nzb_folder,str(nfilename + ext)))
            src = os.path.join(self.nzb_folder, str(nfilename + ext))
            try:
                shutil.move(src, dst)
            except (OSError, IOError):
                self._log("Failed to move directory - check directories and manually re-run.", logger.DEBUG)
                self._log("Post-Processing ABORTED.", logger.DEBUG)
                return
            #tidyup old path
            try:
                shutil.rmtree(self.nzb_folder)
            except (OSError, IOError):
                self._log("Failed to remove temporary directory - check directory and manually re-run.", logger.DEBUG)
                self._log("Post-Processing ABORTED.", logger.DEBUG)
                return

            self._log("Removed temporary directory : " + str(self.nzb_folder), logger.DEBUG)
                    #delete entry from nzblog table
            myDB.action('DELETE from nzblog WHERE issueid=?', [issueid])
                    #force rescan of files
            updater.forceRescan(comicid)
            logger.info(u"Post-Processing completed for: " + series + " issue: " + str(issuenum) )
            self._log(u"Post Processing SUCCESSFULL! ", logger.DEBUG)

            # retrieve/create the corresponding comic objects

            if mylar.ENABLE_EXTRA_SCRIPTS:
                folderp = str(dst) #folder location after move/rename
                nzbn = self.nzb_name #original nzb name
                filen = str(nfilename + ext) #new filename
                #name, comicyear, comicid , issueid, issueyear, issue, publisher
                #create the dic and send it.
                seriesmeta = []
                seriesmetadata = {}
                seriesmeta.append({
                            'name':                 series,
                            'comicyear':            seriesyear,
                            'comicid':              comicid,
                            'issueid':              issueid,
                            'issueyear':            issueyear,
                            'issue':                issuenum,
                            'publisher':            publisher
                            })
                seriesmetadata['seriesmeta'] = seriesmeta
                self._run_extra_scripts(nzbname, self.nzb_folder, filen, folderp, seriesmetadata )

            return self.log
Exemple #10
0
def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None):
    # Putting this here to get around the circular import. Will try to use this to update images at later date.
#    from mylar import cache
    
    myDB = db.DBConnection()
    
    # We need the current minimal info in the database instantly
    # so we don't throw a 500 error when we redirect to the artistPage

    controlValueDict = {"ComicID":     comicid}

    dbcomic = myDB.action('SELECT * FROM comics WHERE ComicID=?', [comicid]).fetchone()
    if dbcomic is None:
        newValueDict = {"ComicName":   "Comic ID: %s" % (comicid),
                "Status":   "Loading"}
        comlocation = None
    else:
        newValueDict = {"Status":   "Loading"}
        comlocation = dbcomic['ComicLocation']

    myDB.upsert("comics", newValueDict, controlValueDict)

    # we need to lookup the info for the requested ComicID in full now        
    comic = cv.getComic(comicid,'comic')
    #comic = myDB.action('SELECT * FROM comics WHERE ComicID=?', [comicid]).fetchone()
    if not comic:
        logger.warn("Error fetching comic. ID for : " + comicid)
        if dbcomic is None:
            newValueDict = {"ComicName":   "Fetch failed, try refreshing. (%s)" % (comicid),
                    "Status":   "Active"}
        else:
            newValueDict = {"Status":   "Active"}
        myDB.upsert("comics", newValueDict, controlValueDict)
        return
    
    if comic['ComicName'].startswith('The '):
        sortname = comic['ComicName'][4:]
    else:
        sortname = comic['ComicName']
        

    logger.info(u"Now adding/updating: " + comic['ComicName'])
    #--Now that we know ComicName, let's try some scraping
    #--Start
    # gcd will return issue details (most importantly publishing date)
    if mismatch == "no" or mismatch is None:
        gcdinfo=parseit.GCDScraper(comic['ComicName'], comic['ComicYear'], comic['ComicIssues'], comicid) 
        #print ("gcdinfo: " + str(gcdinfo))
        mismatch_com = "no"
        if gcdinfo == "No Match":
            updater.no_searchresults(comicid)
            nomatch = "true"
            logger.info(u"There was an error when trying to add " + comic['ComicName'] + " (" + comic['ComicYear'] + ")" )
            return nomatch
        else:
            mismatch_com = "yes"
            #print ("gcdinfo:" + str(gcdinfo))

    elif mismatch == "yes":
        CV_EXcomicid = myDB.action("SELECT * from exceptions WHERE ComicID=?", [comicid]).fetchone()
        if CV_EXcomicid['variloop'] is None: pass
        else:
            vari_loop = CV_EXcomicid['variloop']
            NewComicID = CV_EXcomicid['NewComicID']
            gcomicid = CV_EXcomicid['GComicID']
            resultURL = "/series/" + str(NewComicID) + "/"
            #print ("variloop" + str(CV_EXcomicid['variloop']))
            #if vari_loop == '99':
            gcdinfo = parseit.GCDdetails(comseries=None, resultURL=resultURL, vari_loop=0, ComicID=comicid, TotalIssues=0, issvariation="no", resultPublished=None)

    logger.info(u"Sucessfully retrieved details for " + comic['ComicName'] )
    # print ("Series Published" + parseit.resultPublished)

    #comic book location on machine
    # setup default location here

    if comlocation is None:
        if ':' in comic['ComicName'] or '/' in comic['ComicName'] or ',' in comic['ComicName']:
            comicdir = comic['ComicName']
            if ':' in comicdir:
                comicdir = comicdir.replace(':','')
            if '/' in comicdir:
                comicdir = comicdir.replace('/','-')
            if ',' in comicdir:
                comicdir = comicdir.replace(',','')
        else: comicdir = comic['ComicName']

        series = comicdir
        publisher = comic['ComicPublisher']
        year = comic['ComicYear']

        #do work to generate folder path

        values = {'$Series':        series,
                  '$Publisher':     publisher,
                  '$Year':          year,
                  '$series':        series.lower(),
                  '$publisher':     publisher.lower(),
                  '$Volume':        year
                  }

        #print mylar.FOLDER_FORMAT
        #print 'working dir:'
        #print helpers.replace_all(mylar.FOLDER_FORMAT, values)

        if mylar.FOLDER_FORMAT == '':
            comlocation = mylar.DESTINATION_DIR + "/" + comicdir + " (" + comic['ComicYear'] + ")"
        else:
            comlocation = mylar.DESTINATION_DIR + "/" + helpers.replace_all(mylar.FOLDER_FORMAT, values)


        #comlocation = mylar.DESTINATION_DIR + "/" + comicdir + " (" + comic['ComicYear'] + ")"
        if mylar.DESTINATION_DIR == "":
            logger.error(u"There is no general directory specified - please specify in Config/Post-Processing.")
            return
        if mylar.REPLACE_SPACES:
            #mylar.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
            comlocation = comlocation.replace(' ', mylar.REPLACE_CHAR)
        #if it doesn't exist - create it (otherwise will bugger up later on)
        if os.path.isdir(str(comlocation)):
            logger.info(u"Directory (" + str(comlocation) + ") already exists! Continuing...")
        else:
            #print ("Directory doesn't exist!")
            try:
                os.makedirs(str(comlocation))
                logger.info(u"Directory successfully created at: " + str(comlocation))
            except OSError:
                logger.error(u"Could not create comicdir : " + str(comlocation))

    #try to account for CV not updating new issues as fast as GCD
    #seems CV doesn't update total counts
    #comicIssues = gcdinfo['totalissues']
    if gcdinfo['gcdvariation'] == "cv":
        comicIssues = str(int(comic['ComicIssues']) + 1)
    else:
        comicIssues = comic['ComicIssues']

    #let's download the image...
    if os.path.exists(mylar.CACHE_DIR):pass
    else:
        #let's make the dir.
        try:
            os.makedirs(str(mylar.CACHE_DIR))
            logger.info(u"Cache Directory successfully created at: " + str(mylar.CACHE_DIR))

        except OSError:
            logger.error('Could not create cache dir. Check permissions of cache dir: ' + str(mylar.CACHE_DIR))

    coverfile = mylar.CACHE_DIR + "/" + str(comicid) + ".jpg"

    #try:
    urllib.urlretrieve(str(comic['ComicImage']), str(coverfile))
    try:
        with open(str(coverfile)) as f:
            ComicImage = os.path.join('cache',str(comicid) + ".jpg")
            logger.info(u"Sucessfully retrieved cover for " + str(comic['ComicName']))
            #if the comic cover local is checked, save a cover.jpg to the series folder.
            if mylar.COMIC_COVER_LOCAL:
                comiclocal = os.path.join(str(comlocation) + "/cover.jpg")
                shutil.copy(ComicImage,comiclocal)
    except IOError as e:
        logger.error(u"Unable to save cover locally at this time.")



    controlValueDict = {"ComicID":      comicid}
    newValueDict = {"ComicName":        comic['ComicName'],
                    "ComicSortName":    sortname,
                    "ComicYear":        comic['ComicYear'],
                    "ComicImage":       ComicImage,
                    "Total":            comicIssues,
                    "ComicLocation":    comlocation,
                    "ComicPublisher":   comic['ComicPublisher'],
                    "ComicPublished":   gcdinfo['resultPublished'],
                    "DateAdded":        helpers.today(),
                    "Status":           "Loading"}
    
    myDB.upsert("comics", newValueDict, controlValueDict)

    issued = cv.getComic(comicid,'issue')
    logger.info(u"Sucessfully retrieved issue details for " + comic['ComicName'] )
    n = 0
    iscnt = int(comicIssues)
    issid = []
    issnum = []
    issname = []
    issdate = []
    int_issnum = []
    #let's start issue #'s at 0 -- thanks to DC for the new 52 reboot! :)
    latestiss = "0"
    latestdate = "0000-00-00"
    #print ("total issues:" + str(iscnt))
    #---removed NEW code here---
    logger.info(u"Now adding/updating issues for " + comic['ComicName'])

    # file check to see if issue exists
    logger.info(u"Checking directory for existing issues.")
    #fc = filechecker.listFiles(dir=comlocation, watchcomic=comic['ComicName'])
    #havefiles = 0

    #fccnt = int(fc['comiccount'])
    #logger.info(u"Found " + str(fccnt) + "/" + str(iscnt) + " issues of " + comic['ComicName'] + "...verifying")
    #fcnew = []
    if iscnt > 0: #if a series is brand new, it wont have any issues/details yet so skip this part
        while (n <= iscnt):
        #---NEW.code
            try:
                firstval = issued['issuechoice'][n]
            except IndexError:
                break
            cleanname = helpers.cleanName(firstval['Issue_Name'])
            issid = str(firstval['Issue_ID'])
            issnum = str(firstval['Issue_Number'])
            issname = cleanname
            if '.' in str(issnum):
                issn_st = str(issnum).find('.')
                issn_b4dec = str(issnum)[:issn_st]
                #if the length of decimal is only 1 digit, assume it's a tenth
                dec_is = str(issnum)[issn_st + 1:]
                if len(dec_is) == 1:
                    dec_nisval = int(dec_is) * 10
                    iss_naftdec = str(dec_nisval)
                if len(dec_is) == 2:
                    dec_nisval = int(dec_is)
                    iss_naftdec = str(dec_nisval)
                iss_issue = issn_b4dec + "." + iss_naftdec
                issis = (int(issn_b4dec) * 1000) + dec_nisval
            else: issis = int(issnum) * 1000

            bb = 0
            while (bb <= iscnt):
                try: 
                    gcdval = gcdinfo['gcdchoice'][bb]
                except IndexError:
                    #account for gcd variation here
                    if gcdinfo['gcdvariation'] == 'gcd':
                        #logger.fdebug("gcd-variation accounted for.")
                        issdate = '0000-00-00'
                        int_issnum =  int ( issis / 1000 )
                    break
                if 'nn' in str(gcdval['GCDIssue']):
                    #no number detected - GN, TP or the like
                    logger.warn(u"Non Series detected (Graphic Novel, etc) - cannot proceed at this time.")
                    updater.no_searchresults(comicid)
                    return
                elif '.' in str(gcdval['GCDIssue']):
                    #logger.fdebug("g-issue:" + str(gcdval['GCDIssue']))
                    issst = str(gcdval['GCDIssue']).find('.')
                    #logger.fdebug("issst:" + str(issst))
                    issb4dec = str(gcdval['GCDIssue'])[:issst]
                    #logger.fdebug("issb4dec:" + str(issb4dec))
                    #if the length of decimal is only 1 digit, assume it's a tenth
                    decis = str(gcdval['GCDIssue'])[issst+1:]
                    #logger.fdebug("decis:" + str(decis))
                    if len(decis) == 1:
                        decisval = int(decis) * 10
                        issaftdec = str(decisval)
                    if len(decis) == 2:
                        decisval = int(decis)
                        issaftdec = str(decisval)
                    gcd_issue = issb4dec + "." + issaftdec
                    #logger.fdebug("gcd_issue:" + str(gcd_issue))
                    gcdis = (int(issb4dec) * 1000) + decisval
                else:
                    gcdis = int(str(gcdval['GCDIssue'])) * 1000
                if gcdis == issis:
                    issdate = str(gcdval['GCDDate'])
                    int_issnum = int( gcdis / 1000 )
                    #get the latest issue / date using the date.
                    if gcdval['GCDDate'] > latestdate:
                        latestiss = str(issnum)
                        latestdate = str(gcdval['GCDDate'])
                        break
                   #bb = iscnt
                bb+=1
            #print("(" + str(n) + ") IssueID: " + str(issid) + " IssueNo: " + str(issnum) + " Date" + str(issdate))
            #---END.NEW.

            # check if the issue already exists
            iss_exists = myDB.action('SELECT * from issues WHERE IssueID=?', [issid]).fetchone()

            # Only change the status & add DateAdded if the issue is already in the database
            if iss_exists is None:
                newValueDict['DateAdded'] = helpers.today()

            controlValueDict = {"IssueID":  issid}
            newValueDict = {"ComicID":            comicid,
                            "ComicName":          comic['ComicName'],
                            "IssueName":          issname,
                            "Issue_Number":       issnum,
                            "IssueDate":          issdate,
                            "Int_IssueNumber":    int_issnum
                            }        
            if mylar.AUTOWANT_ALL:
                newValueDict['Status'] = "Wanted"
            elif issdate > helpers.today() and mylar.AUTOWANT_UPCOMING:
                newValueDict['Status'] = "Wanted"
            else:
                newValueDict['Status'] = "Skipped"

            if iss_exists:
                #print ("Existing status : " + str(iss_exists['Status']))
                newValueDict['Status'] = iss_exists['Status']     
            
            try:     
                myDB.upsert("issues", newValueDict, controlValueDict)
            except sqlite3.InterfaceError, e:
                #raise sqlite3.InterfaceError(e)
                logger.error("MAJOR error trying to get issue data, this is most likey a MULTI-VOLUME series and you need to use the custom_exceptions.csv file.")
                myDB.action("DELETE FROM comics WHERE ComicID=?", [comicid])
                return
            n+=1
Exemple #11
0
    def Process(self):
            self._log("nzb name: " + str(self.nzb_name), logger.DEBUG)
            self._log("nzb folder: " + str(self.nzb_folder), logger.DEBUG)
            logger.fdebug("nzb name: " + str(self.nzb_name))
            logger.fdebug("nzb folder: " + str(self.nzb_folder))
            if mylar.USE_SABNZBD==0:
                logger.fdebug("Not using SABNzbd")
            else:
                # if the SAB Directory option is enabled, let's use that folder name and append the jobname.
                if mylar.SAB_DIRECTORY is not None and mylar.SAB_DIRECTORY is not 'None' and len(mylar.SAB_DIRECTORY) > 4:
                    self.nzb_folder = os.path.join(mylar.SAB_DIRECTORY, self.nzb_name).encode(mylar.SYS_ENCODING)
    
                #lookup nzb_name in nzblog table to get issueid
    
                #query SAB to find out if Replace Spaces enabled / not as well as Replace Decimals
                #http://localhost:8080/sabnzbd/api?mode=set_config&section=misc&keyword=dirscan_speed&value=5
                querysab = str(mylar.SAB_HOST) + "/api?mode=get_config&section=misc&output=xml&apikey=" + str(mylar.SAB_APIKEY)
                #logger.info("querysab_string:" + str(querysab))
                file = urllib2.urlopen(querysab)
                data = file.read()
                file.close()
                dom = parseString(data)

                try:
                    sabreps = dom.getElementsByTagName('replace_spaces')[0].firstChild.wholeText
                except:
                    errorm = dom.getElementsByTagName('error')[0].firstChild.wholeText
                    logger.error(u"Error detected attempting to retrieve SAB data : " + errorm)
                    return
                sabrepd = dom.getElementsByTagName('replace_dots')[0].firstChild.wholeText
                logger.fdebug("SAB Replace Spaces: " + str(sabreps))
                logger.fdebug("SAB Replace Dots: " + str(sabrepd))
            if mylar.USE_NZBGET==1:
                logger.fdebug("Using NZBGET")
                logger.fdebug("NZB name as passed from NZBGet: " + self.nzb_name)
            myDB = db.DBConnection()

            nzbname = self.nzb_name
            #remove extensions from nzb_name if they somehow got through (Experimental most likely)
            extensions = ('.cbr', '.cbz')

            if nzbname.lower().endswith(extensions):
                fd, ext = os.path.splitext(nzbname)
                self._log("Removed extension from nzb: " + ext, logger.DEBUG)
                nzbname = re.sub(str(ext), '', str(nzbname))

            #replace spaces
            nzbname = re.sub(' ', '.', str(nzbname))
            nzbname = re.sub('[\,\:\?]', '', str(nzbname))
            nzbname = re.sub('[\&]', 'and', str(nzbname))

            logger.fdebug("After conversions, nzbname is : " + str(nzbname))
#            if mylar.USE_NZBGET==1:
#                nzbname=self.nzb_name
            self._log("nzbname: " + str(nzbname), logger.DEBUG)

            nzbiss = myDB.action("SELECT * from nzblog WHERE nzbname=?", [nzbname]).fetchone()

            if nzbiss is None:
                self._log("Failure - could not initially locate nzbfile in my database to rename.", logger.DEBUG)
                logger.fdebug("Failure - could not locate nzbfile initially.")
                # if failed on spaces, change it all to decimals and try again.
                nzbname = re.sub('_', '.', str(nzbname))
                self._log("trying again with this nzbname: " + str(nzbname), logger.DEBUG)
                logger.fdebug("trying again with nzbname of : " + str(nzbname))
                nzbiss = myDB.action("SELECT * from nzblog WHERE nzbname=?", [nzbname]).fetchone()
                if nzbiss is None:
                    logger.error(u"Unable to locate downloaded file to rename. PostProcessing aborted.")
                    return
                else:
                    self._log("I corrected and found the nzb as : " + str(nzbname))
                    logger.fdebug("auto-corrected and found the nzb as : " + str(nzbname))
                    issueid = nzbiss['IssueID']
            else: 
                issueid = nzbiss['IssueID']
                logger.fdebug("issueid:" + str(issueid))
                sarc = nzbiss['SARC']
                #use issueid to get publisher, series, year, issue number
            issuenzb = myDB.action("SELECT * from issues WHERE issueid=?", [issueid]).fetchone()
            if issuenzb is not None:
                if helpers.is_number(issueid):
                    sandwich = int(issuenzb['IssueID'])
            else:
                #if it's non-numeric, it contains a 'G' at the beginning indicating it's a multi-volume
                #using GCD data. Set sandwich to 1 so it will bypass and continue post-processing.
                if 'S' in issueid:
                    sandwich = issueid
                elif 'G' in issueid: 
                    sandwich = 1
            if helpers.is_number(sandwich):
                if sandwich < 900000:
                    # if sandwich is less than 900000 it's a normal watchlist download. Bypass.
                    pass
            else:
                if issuenzb is None or 'S' in sandwich or int(sandwich) >= 900000:
                    # this has no issueID, therefore it's a one-off or a manual post-proc.
                    # At this point, let's just drop it into the Comic Location folder and forget about it..
                    if 'S' in sandwich:
                        self._log("One-off STORYARC mode enabled for Post-Processing for " + str(sarc))
                        logger.info("One-off STORYARC mode enabled for Post-Processing for " + str(sarc))
                        if mylar.STORYARCDIR:
                            storyarcd = os.path.join(mylar.DESTINATION_DIR, "StoryArcs", sarc)
                            self._log("StoryArc Directory set to : " + storyarcd, logger.DEBUG)
                        else:
                            self._log("Grab-Bag Directory set to : " + mylar.GRABBAG_DIR, logger.DEBUG)

                    else:
                        self._log("One-off mode enabled for Post-Processing. All I'm doing is moving the file untouched into the Grab-bag directory.", logger.DEBUG)
                        logger.info("One-off mode enabled for Post-Processing. Will move into Grab-bag directory.")
                        self._log("Grab-Bag Directory set to : " + mylar.GRABBAG_DIR, logger.DEBUG)

                    for root, dirnames, filenames in os.walk(self.nzb_folder):
                        for filename in filenames:
                            if filename.lower().endswith(extensions):
                                ofilename = filename
                                path, ext = os.path.splitext(ofilename)
      
                    if 'S' in sandwich:
                        if mylar.STORYARCDIR:
                            grdst = storyarcd
                        else:
                            grdst = mylar.DESTINATION_DIR
                    else:
                        if mylar.GRABBAG_DIR:
                            grdst = mylar.GRABBAG_DIR
                        else:
                            grdst = mylar.DESTINATION_DIR

                    filechecker.validateAndCreateDirectory(grdst, True)
    
                    grab_dst = os.path.join(grdst, ofilename)
                    self._log("Destination Path : " + grab_dst, logger.DEBUG)
                    logger.info("Destination Path : " + grab_dst)
                    grab_src = os.path.join(self.nzb_folder, ofilename)
                    self._log("Source Path : " + grab_src, logger.DEBUG)
                    logger.info("Source Path : " + grab_src)

                    logger.info("Moving " + str(ofilename) + " into directory : " + str(grdst))

                    try:
                        shutil.move(grab_src, grab_dst)
                    except (OSError, IOError):
                        self._log("Failed to move directory - check directories and manually re-run.", logger.DEBUG)
                        logger.debug("Failed to move directory - check directories and manually re-run.")
                        return
                    #tidyup old path
                    try:
                        shutil.rmtree(self.nzb_folder)
                    except (OSError, IOError):
                        self._log("Failed to remove temporary directory.", logger.DEBUG)
                        logger.debug("Failed to remove temporary directory - check directory and manually re-run.")
                        return

                    logger.debug("Removed temporary directory : " + str(self.nzb_folder))
                    self._log("Removed temporary directory : " + self.nzb_folder, logger.DEBUG)
                    #delete entry from nzblog table
                    myDB.action('DELETE from nzblog WHERE issueid=?', [issueid])

                    if 'S' in issueid:
                        issuearcid = re.sub('S', '', issueid)
                        logger.info("IssueArcID is : " + str(issuearcid))
                        ctrlVal = {"IssueArcID":  issuearcid}
                        newVal = {"Status":    "Downloaded",
                                  "Location":  grab_dst }
                        myDB.upsert("readinglist",newVal,ctrlVal)
                        logger.info("updated status to Downloaded")
                    return self.log

            comicid = issuenzb['ComicID']
            issuenumOG = issuenzb['Issue_Number']
            #issueno = str(issuenum).split('.')[0]
            #new CV API - removed all decimals...here we go AGAIN!
            issuenum = issuenumOG
            issue_except = 'None'
            if 'au' in issuenum.lower():
                issuenum = re.sub("[^0-9]", "", issuenum)
                issue_except = ' AU'
            if '.' in issuenum:
                iss_find = issuenum.find('.')
                iss_b4dec = issuenum[:iss_find]
                iss_decval = issuenum[iss_find+1:]
                if int(iss_decval) == 0:
                    iss = iss_b4dec
                    issdec = int(iss_decval)
                    issueno = str(iss)
                    self._log("Issue Number: " + str(issueno), logger.DEBUG)
                    logger.fdebug("Issue Number: " + str(issueno))
                else:
                    if len(iss_decval) == 1:
                        iss = iss_b4dec + "." + iss_decval
                        issdec = int(iss_decval) * 10
                    else:
                        iss = iss_b4dec + "." + iss_decval.rstrip('0')
                        issdec = int(iss_decval.rstrip('0')) * 10
                    issueno = iss_b4dec
                    self._log("Issue Number: " + str(iss), logger.DEBUG)
                    logger.fdebug("Issue Number: " + str(iss))
            else:
                iss = issuenum
                issueno = str(iss)
            # issue zero-suppression here
            if mylar.ZERO_LEVEL == "0": 
                zeroadd = ""
            else:
                if mylar.ZERO_LEVEL_N  == "none": zeroadd = ""
                elif mylar.ZERO_LEVEL_N == "0x": zeroadd = "0"
                elif mylar.ZERO_LEVEL_N == "00x": zeroadd = "00"

            logger.fdebug("Zero Suppression set to : " + str(mylar.ZERO_LEVEL_N))

            if str(len(issueno)) > 1:
                if int(issueno) < 10:
                    self._log("issue detected less than 10", logger.DEBUG)
                    if '.' in iss:
                        if int(iss_decval) > 0:
                            issueno = str(iss)
                            prettycomiss = str(zeroadd) + str(iss)
                        else:
                            prettycomiss = str(zeroadd) + str(int(issueno))
                    else:
                        prettycomiss = str(zeroadd) + str(iss)
                    if issue_except != 'None': 
                        prettycomiss = str(prettycomiss) + issue_except
                    self._log("Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ". Issue will be set as : " + str(prettycomiss), logger.DEBUG)
                elif int(issueno) >= 10 and int(issueno) < 100:
                    self._log("issue detected greater than 10, but less than 100", logger.DEBUG)
                    if mylar.ZERO_LEVEL_N == "none":
                        zeroadd = ""
                    else:
                        zeroadd = "0"
                    if '.' in iss:
                        if int(iss_decval) > 0:
                            issueno = str(iss)
                            prettycomiss = str(zeroadd) + str(iss)
                        else:
                           prettycomiss = str(zeroadd) + str(int(issueno))
                    else:
                        prettycomiss = str(zeroadd) + str(iss)
                    if issue_except != 'None':
                        prettycomiss = str(prettycomiss) + issue_except
                    self._log("Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ".Issue will be set as : " + str(prettycomiss), logger.DEBUG)
                else:
                    self._log("issue detected greater than 100", logger.DEBUG)
                    if '.' in iss:
                        if int(iss_decval) > 0:
                            issueno = str(iss)
                    prettycomiss = str(issueno)
                    if issue_except != 'None':
                        prettycomiss = str(prettycomiss) + issue_except
                    self._log("Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ". Issue will be set as : " + str(prettycomiss), logger.DEBUG)
            else:
                prettycomiss = str(issueno)
                self._log("issue length error - cannot determine length. Defaulting to None:  " + str(prettycomiss), logger.DEBUG)

            logger.fdebug("Pretty Comic Issue is : " + str(prettycomiss))
            issueyear = issuenzb['IssueDate'][:4]
            self._log("Issue Year: " + str(issueyear), logger.DEBUG)
            logger.fdebug("Issue Year : " + str(issueyear))
            comicnzb= myDB.action("SELECT * from comics WHERE comicid=?", [comicid]).fetchone()
            publisher = comicnzb['ComicPublisher']
            self._log("Publisher: " + publisher, logger.DEBUG)
            logger.fdebug("Publisher: " + str(publisher))
            #we need to un-unicode this to make sure we can write the filenames properly for spec.chars
            series = comicnzb['ComicName'].encode('ascii', 'ignore').strip()
            self._log("Series: " + series, logger.DEBUG)
            logger.fdebug("Series: " + str(series))
            seriesyear = comicnzb['ComicYear']
            self._log("Year: " + seriesyear, logger.DEBUG)
            logger.fdebug("Year: "  + str(seriesyear))
            comlocation = comicnzb['ComicLocation']
            self._log("Comic Location: " + comlocation, logger.DEBUG)
            logger.fdebug("Comic Location: " + str(comlocation))
            comversion = comicnzb['ComicVersion']
            self._log("Comic Version: " + str(comversion), logger.DEBUG)
            logger.fdebug("Comic Version: " + str(comversion))
            if comversion is None:
                comversion = 'None'
            #if comversion is None, remove it so it doesn't populate with 'None'
            if comversion == 'None':
                chunk_f_f = re.sub('\$VolumeN','',mylar.FILE_FORMAT)
                chunk_f = re.compile(r'\s+')
                chunk_file_format = chunk_f.sub(' ', chunk_f_f)
                self._log("No version # found for series - tag will not be available for renaming.", logger.DEBUG)
                logger.fdebug("No version # found for series, removing from filename")
                logger.fdebug("new format is now: " + str(chunk_file_format))
            else:
                chunk_file_format = mylar.FILE_FORMAT
            #Run Pre-script

            if mylar.ENABLE_PRE_SCRIPTS:
                nzbn = self.nzb_name #original nzb name
                nzbf = self.nzb_folder #original nzb folder
                #name, comicyear, comicid , issueid, issueyear, issue, publisher
                #create the dic and send it.
                seriesmeta = []
                seriesmetadata = {}
                seriesmeta.append({
                            'name':                 series,
                            'comicyear':            seriesyear,
                            'comicid':              comicid,
                            'issueid':              issueid,
                            'issueyear':            issueyear,
                            'issue':                issuenum,
                            'publisher':            publisher
                            })
                seriesmetadata['seriesmeta'] = seriesmeta
                self._run_pre_scripts(nzbn, nzbf, seriesmetadata )

        #rename file and move to new path
        #nfilename = series + " " + issueno + " (" + seriesyear + ")"

            file_values = {'$Series':    series,
                           '$Issue':     prettycomiss,
                           '$Year':      issueyear,
                           '$series':    series.lower(),
                           '$Publisher': publisher,
                           '$publisher': publisher.lower(),
                           '$VolumeY':   'V' + str(seriesyear),
                           '$VolumeN':   comversion
                          }

            ofilename = None

            for root, dirnames, filenames in os.walk(self.nzb_folder):
                for filename in filenames:
                    if filename.lower().endswith(extensions):
                        ofilename = filename
                        path, ext = os.path.splitext(ofilename)

            if ofilename is None:
                logger.error(u"Aborting PostProcessing - the filename doesn't exist in the location given. Make sure that " + str(self.nzb_folder) + " exists and is the correct location.")
                return
            self._log("Original Filename: " + ofilename, logger.DEBUG)
            self._log("Original Extension: " + ext, logger.DEBUG)
            logger.fdebug("Original Filname: " + str(ofilename))
            logger.fdebug("Original Extension: " + str(ext))

            if mylar.FILE_FORMAT == '' or not mylar.RENAME_FILES:
                self._log("Rename Files isn't enabled...keeping original filename.", logger.DEBUG)
                logger.fdebug("Rename Files isn't enabled - keeping original filename.")
                #check if extension is in nzb_name - will screw up otherwise
                if ofilename.lower().endswith(extensions):
                    nfilename = ofilename[:-4]
                else:
                    nfilename = ofilename
            else:
                nfilename = helpers.replace_all(chunk_file_format, file_values)
                if mylar.REPLACE_SPACES:
                    #mylar.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
                    nfilename = nfilename.replace(' ', mylar.REPLACE_CHAR)
            nfilename = re.sub('[\,\:\?]', '', nfilename)
            self._log("New Filename: " + nfilename, logger.DEBUG)
            logger.fdebug("New Filename: " + str(nfilename))

            src = os.path.join(self.nzb_folder, ofilename)

            filechecker.validateAndCreateDirectory(comlocation, True)

            if mylar.LOWERCASE_FILENAMES:
                dst = (comlocation + "/" + nfilename + ext).lower()
            else:
                dst = comlocation + "/" + nfilename + ext.lower()    
            self._log("Source:" + src, logger.DEBUG)
            self._log("Destination:" +  dst, logger.DEBUG)
            logger.fdebug("Source: " + str(src))
            logger.fdebug("Destination: " + str(dst))

            os.rename(os.path.join(self.nzb_folder, str(ofilename)), os.path.join(self.nzb_folder,str(nfilename + ext)))
            src = os.path.join(self.nzb_folder, str(nfilename + ext))
            try:
                shutil.move(src, dst)
            except (OSError, IOError):
                self._log("Failed to move directory - check directories and manually re-run.", logger.DEBUG)
                self._log("Post-Processing ABORTED.", logger.DEBUG)
                return
            #tidyup old path
            try:
                shutil.rmtree(self.nzb_folder)
            except (OSError, IOError):
                self._log("Failed to remove temporary directory - check directory and manually re-run.", logger.DEBUG)
                self._log("Post-Processing ABORTED.", logger.DEBUG)
                return

            self._log("Removed temporary directory : " + str(self.nzb_folder), logger.DEBUG)
                    #delete entry from nzblog table
            myDB.action('DELETE from nzblog WHERE issueid=?', [issueid])
                    #update snatched table to change status to Downloaded
            updater.foundsearch(comicid, issueid, down='True')
                    #force rescan of files
            updater.forceRescan(comicid)
            logger.info(u"Post-Processing completed for: " + series + " issue: " + str(issuenumOG) )
            self._log(u"Post Processing SUCCESSFULL! ", logger.DEBUG)

            if mylar.PROWL_ENABLED:
                pushmessage = series + '(' + issueyear + ') - issue #' + issuenumOG
                logger.info(u"Prowl request")
                prowl = notifiers.PROWL()
                prowl.notify(pushmessage,"Download and Postprocessing completed")

            if mylar.NMA_ENABLED:
                nma = notifiers.NMA()
                nma.notify(series, str(issueyear), str(issuenumOG))

            if mylar.PUSHOVER_ENABLED:
                pushmessage = series + ' (' + str(issueyear) + ') - issue #' + str(issuenumOG)
                logger.info(u"Pushover request")
                pushover = notifiers.PUSHOVER()
                pushover.notify(pushmessage, "Download and Post-Processing completed")
             
            # retrieve/create the corresponding comic objects

            if mylar.ENABLE_EXTRA_SCRIPTS:
                folderp = str(dst) #folder location after move/rename
                nzbn = self.nzb_name #original nzb name
                filen = str(nfilename + ext) #new filename
                #name, comicyear, comicid , issueid, issueyear, issue, publisher
                #create the dic and send it.
                seriesmeta = []
                seriesmetadata = {}
                seriesmeta.append({
                            'name':                 series,
                            'comicyear':            seriesyear,
                            'comicid':              comicid,
                            'issueid':              issueid,
                            'issueyear':            issueyear,
                            'issue':                issuenum,
                            'publisher':            publisher
                            })
                seriesmetadata['seriesmeta'] = seriesmeta
                self._run_extra_scripts(nzbname, self.nzb_folder, filen, folderp, seriesmetadata )

            return self.log
Exemple #12
0
def GCDimport(gcomicid, pullupd=None, imported=None, ogcname=None):
    # this is for importing via GCD only and not using CV.
    # used when volume spanning is discovered for a Comic (and can't be added using CV).
    # Issue Counts are wrong (and can't be added).

    # because Comicvine ComicID and GCD ComicID could be identical at some random point, let's distinguish.
    # CV = comicid, GCD = gcomicid :) (ie. CV=2740, GCD=G3719)

    gcdcomicid = gcomicid
    myDB = db.DBConnection()

    # We need the current minimal info in the database instantly
    # so we don't throw a 500 error when we redirect to the artistPage

    controlValueDict = {"ComicID": gcdcomicid}

    comic = myDB.action(
        'SELECT ComicName, ComicYear, Total, ComicPublished, ComicImage, ComicLocation, ComicPublisher FROM comics WHERE ComicID=?',
        [gcomicid]).fetchone()
    ComicName = comic[0]
    ComicYear = comic[1]
    ComicIssues = comic[2]
    ComicPublished = comic[3]
    comlocation = comic[5]
    ComicPublisher = comic[6]
    #ComicImage = comic[4]
    #print ("Comic:" + str(ComicName))

    newValueDict = {"Status": "Loading"}
    myDB.upsert("comics", newValueDict, controlValueDict)

    # we need to lookup the info for the requested ComicID in full now
    #comic = cv.getComic(comicid,'comic')

    if not comic:
        logger.warn("Error fetching comic. ID for : " + gcdcomicid)
        if dbcomic is None:
            newValueDict = {
                "ComicName":
                "Fetch failed, try refreshing. (%s)" % (gcdcomicid),
                "Status": "Active"
            }
        else:
            newValueDict = {"Status": "Active"}
        myDB.upsert("comics", newValueDict, controlValueDict)
        return

    #run the re-sortorder here in order to properly display the page
    if pullupd is None:
        helpers.ComicSort(comicorder=mylar.COMICSORT, imported=gcomicid)

    if ComicName.startswith('The '):
        sortname = ComicName[4:]
    else:
        sortname = ComicName

    logger.info(u"Now adding/updating: " + ComicName)
    #--Now that we know ComicName, let's try some scraping
    #--Start
    # gcd will return issue details (most importantly publishing date)
    comicid = gcomicid[1:]
    resultURL = "/series/" + str(comicid) + "/"
    gcdinfo = parseit.GCDdetails(comseries=None,
                                 resultURL=resultURL,
                                 vari_loop=0,
                                 ComicID=gcdcomicid,
                                 TotalIssues=ComicIssues,
                                 issvariation=None,
                                 resultPublished=None)
    if gcdinfo == "No Match":
        logger.warn("No matching result found for " + ComicName + " (" +
                    ComicYear + ")")
        updater.no_searchresults(gcomicid)
        nomatch = "true"
        return nomatch
    logger.info(u"Sucessfully retrieved details for " + ComicName)
    # print ("Series Published" + parseit.resultPublished)
    #--End

    ComicImage = gcdinfo['ComicImage']

    #comic book location on machine
    # setup default location here
    if comlocation is None:
        # let's remove the non-standard characters here.
        u_comicnm = ComicName
        u_comicname = u_comicnm.encode('ascii', 'ignore').strip()
        if ':' in u_comicname or '/' in u_comicname or ',' in u_comicname:
            comicdir = u_comicname
            if ':' in comicdir:
                comicdir = comicdir.replace(':', '')
            if '/' in comicdir:
                comicdir = comicdir.replace('/', '-')
            if ',' in comicdir:
                comicdir = comicdir.replace(',', '')
        else:
            comicdir = u_comicname

        series = comicdir
        publisher = ComicPublisher
        year = ComicYear

        #do work to generate folder path
        values = {
            '$Series': series,
            '$Publisher': publisher,
            '$Year': year,
            '$series': series.lower(),
            '$publisher': publisher.lower(),
            '$Volume': year
        }

        if mylar.FOLDER_FORMAT == '':
            comlocation = mylar.DESTINATION_DIR + "/" + comicdir + " (" + comic[
                'ComicYear'] + ")"
        else:
            comlocation = mylar.DESTINATION_DIR + "/" + helpers.replace_all(
                mylar.FOLDER_FORMAT, values)

        #comlocation = mylar.DESTINATION_DIR + "/" + comicdir + " (" + ComicYear + ")"
        if mylar.DESTINATION_DIR == "":
            logger.error(
                u"There is no general directory specified - please specify in Config/Post-Processing."
            )
            return
        if mylar.REPLACE_SPACES:
            #mylar.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
            comlocation = comlocation.replace(' ', mylar.REPLACE_CHAR)

    #if it doesn't exist - create it (otherwise will bugger up later on)
    if os.path.isdir(str(comlocation)):
        logger.info(u"Directory (" + str(comlocation) +
                    ") already exists! Continuing...")
    else:
        #print ("Directory doesn't exist!")
        #try:
        #    os.makedirs(str(comlocation))
        #    logger.info(u"Directory successfully created at: " + str(comlocation))
        #except OSError:
        #    logger.error(u"Could not create comicdir : " + str(comlocation))
        filechecker.validateAndCreateDirectory(comlocation, True)

    comicIssues = gcdinfo['totalissues']

    #let's download the image...
    if os.path.exists(mylar.CACHE_DIR): pass
    else:
        #let's make the dir.
        try:
            os.makedirs(str(mylar.CACHE_DIR))
            logger.info(u"Cache Directory successfully created at: " +
                        str(mylar.CACHE_DIR))

        except OSError:
            logger.error(u"Could not create cache dir : " +
                         str(mylar.CACHE_DIR))

    coverfile = os.path.join(mylar.CACHE_DIR, str(gcomicid) + ".jpg")

    #try:
    urllib.urlretrieve(str(ComicImage), str(coverfile))
    try:
        with open(str(coverfile)) as f:
            ComicImage = os.path.join('cache', str(gcomicid) + ".jpg")

            #this is for Firefox when outside the LAN...it works, but I don't know how to implement it
            #without breaking the normal flow for inside the LAN (above)
            #ComicImage = "http://" + str(mylar.HTTP_HOST) + ":" + str(mylar.HTTP_PORT) + "/cache/" + str(comi$

            logger.info(u"Sucessfully retrieved cover for " + ComicName)
            #if the comic cover local is checked, save a cover.jpg to the series folder.
            if mylar.COMIC_COVER_LOCAL:
                comiclocal = os.path.join(str(comlocation) + "/cover.jpg")
                shutil.copy(ComicImage, comiclocal)
    except IOError as e:
        logger.error(u"Unable to save cover locally at this time.")

    #if comic['ComicVersion'].isdigit():
    #    comicVol = "v" + comic['ComicVersion']
    #else:
    #    comicVol = None

    controlValueDict = {"ComicID": gcomicid}
    newValueDict = {
        "ComicName": ComicName,
        "ComicSortName": sortname,
        "ComicYear": ComicYear,
        "Total": comicIssues,
        "ComicLocation": comlocation,
        #"ComicVersion":     comicVol,
        "ComicImage": ComicImage,
        #"ComicPublisher":   comic['ComicPublisher'],
        #"ComicPublished":   comicPublished,
        "DateAdded": helpers.today(),
        "Status": "Loading"
    }

    myDB.upsert("comics", newValueDict, controlValueDict)

    #comicsort here...
    #run the re-sortorder here in order to properly display the page
    if pullupd is None:
        helpers.ComicSort(sequence='update')

    logger.info(u"Sucessfully retrieved issue details for " + ComicName)
    n = 0
    iscnt = int(comicIssues)
    issnum = []
    issname = []
    issdate = []
    int_issnum = []
    #let's start issue #'s at 0 -- thanks to DC for the new 52 reboot! :)
    latestiss = "0"
    latestdate = "0000-00-00"
    #print ("total issues:" + str(iscnt))
    #---removed NEW code here---
    logger.info(u"Now adding/updating issues for " + ComicName)
    bb = 0
    while (bb <= iscnt):
        #---NEW.code
        try:
            gcdval = gcdinfo['gcdchoice'][bb]
            #print ("gcdval: " + str(gcdval))
        except IndexError:
            #account for gcd variation here
            if gcdinfo['gcdvariation'] == 'gcd':
                #print ("gcd-variation accounted for.")
                issdate = '0000-00-00'
                int_issnum = int(issis / 1000)
            break
        if 'nn' in str(gcdval['GCDIssue']):
            #no number detected - GN, TP or the like
            logger.warn(
                u"Non Series detected (Graphic Novel, etc) - cannot proceed at this time."
            )
            updater.no_searchresults(comicid)
            return
        elif '.' in str(gcdval['GCDIssue']):
            issst = str(gcdval['GCDIssue']).find('.')
            issb4dec = str(gcdval['GCDIssue'])[:issst]
            #if the length of decimal is only 1 digit, assume it's a tenth
            decis = str(gcdval['GCDIssue'])[issst + 1:]
            if len(decis) == 1:
                decisval = int(decis) * 10
                issaftdec = str(decisval)
            if len(decis) == 2:
                decisval = int(decis)
                issaftdec = str(decisval)
            if int(issaftdec) == 0: issaftdec = "00"
            gcd_issue = issb4dec + "." + issaftdec
            gcdis = (int(issb4dec) * 1000) + decisval
        else:
            gcdis = int(str(gcdval['GCDIssue'])) * 1000
            gcd_issue = str(gcdval['GCDIssue'])
        #get the latest issue / date using the date.
        int_issnum = int(gcdis / 1000)
        issdate = str(gcdval['GCDDate'])
        issid = "G" + str(gcdval['IssueID'])
        if gcdval['GCDDate'] > latestdate:
            latestiss = str(gcd_issue)
            latestdate = str(gcdval['GCDDate'])
        #print("(" + str(bb) + ") IssueID: " + str(issid) + " IssueNo: " + str(gcd_issue) + " Date" + str(issdate) )
        #---END.NEW.

        # check if the issue already exists
        iss_exists = myDB.action('SELECT * from issues WHERE IssueID=?',
                                 [issid]).fetchone()

        # Only change the status & add DateAdded if the issue is not already in the database
        if iss_exists is None:
            newValueDict['DateAdded'] = helpers.today()

        #adjust for inconsistencies in GCD date format - some dates have ? which borks up things.
        if "?" in str(issdate):
            issdate = "0000-00-00"

        controlValueDict = {"IssueID": issid}
        newValueDict = {
            "ComicID": gcomicid,
            "ComicName": ComicName,
            "Issue_Number": gcd_issue,
            "IssueDate": issdate,
            "Int_IssueNumber": int_issnum
        }

        #print ("issueid:" + str(controlValueDict))
        #print ("values:" + str(newValueDict))

        if mylar.AUTOWANT_ALL:
            newValueDict['Status'] = "Wanted"
        elif issdate > helpers.today() and mylar.AUTOWANT_UPCOMING:
            newValueDict['Status'] = "Wanted"
        else:
            newValueDict['Status'] = "Skipped"

        if iss_exists:
            #print ("Existing status : " + str(iss_exists['Status']))
            newValueDict['Status'] = iss_exists['Status']

        myDB.upsert("issues", newValueDict, controlValueDict)
        bb += 1

#        logger.debug(u"Updating comic cache for " + ComicName)
#        cache.getThumb(ComicID=issue['issueid'])

#        logger.debug(u"Updating cache for: " + ComicName)
#        cache.getThumb(ComicIDcomicid)

    controlValueStat = {"ComicID": gcomicid}
    newValueStat = {
        "Status": "Active",
        "LatestIssue": latestiss,
        "LatestDate": latestdate,
        "LastUpdated": helpers.now()
    }

    myDB.upsert("comics", newValueStat, controlValueStat)

    if mylar.CVINFO:
        if not os.path.exists(comlocation + "/cvinfo"):
            with open(comlocation + "/cvinfo", "w") as text_file:
                text_file.write("http://www.comicvine.com/volume/49-" +
                                str(comicid))

    logger.info(u"Updating complete for: " + ComicName)

    #move the files...if imported is not empty (meaning it's not from the mass importer.)
    if imported is None or imported == 'None':
        pass
    else:
        if mylar.IMP_MOVE:
            logger.info("Mass import - Move files")
            moveit.movefiles(gcomicid, comlocation, ogcname)
        else:
            logger.info(
                "Mass import - Moving not Enabled. Setting Archived Status for import."
            )
            moveit.archivefiles(gcomicid, ogcname)

    #check for existing files...
    updater.forceRescan(gcomicid)

    if pullupd is None:
        # lets' check the pullist for anyting at this time as well since we're here.
        if mylar.AUTOWANT_UPCOMING and 'Present' in ComicPublished:
            logger.info(u"Checking this week's pullist for new issues of " +
                        ComicName)
            updater.newpullcheck(comic['ComicName'], gcomicid)

        #here we grab issues that have been marked as wanted above...

        results = myDB.select(
            "SELECT * FROM issues where ComicID=? AND Status='Wanted'",
            [gcomicid])
        if results:
            logger.info(u"Attempting to grab wanted issues for : " + ComicName)

            for result in results:
                foundNZB = "none"
                if (mylar.NZBSU or mylar.DOGNZB or mylar.EXPERIMENTAL
                        or mylar.NEWZNAB or mylar.NZBX) and (mylar.SAB_HOST):
                    foundNZB = search.searchforissue(result['IssueID'])
                    if foundNZB == "yes":
                        updater.foundsearch(result['ComicID'],
                                            result['IssueID'])
        else:
            logger.info(u"No issues marked as wanted for " + ComicName)

        logger.info(u"Finished grabbing what I could.")
Exemple #13
0
def addComictoDB(comicid,
                 mismatch=None,
                 pullupd=None,
                 imported=None,
                 ogcname=None):
    # Putting this here to get around the circular import. Will try to use this to update images at later date.
    #    from mylar import cache

    myDB = db.DBConnection()

    # We need the current minimal info in the database instantly
    # so we don't throw a 500 error when we redirect to the artistPage

    controlValueDict = {"ComicID": comicid}

    dbcomic = myDB.action('SELECT * FROM comics WHERE ComicID=?',
                          [comicid]).fetchone()
    if dbcomic is None:
        newValueDict = {
            "ComicName": "Comic ID: %s" % (comicid),
            "Status": "Loading"
        }
        comlocation = None
        oldcomversion = None
    else:
        newValueDict = {"Status": "Loading"}
        comlocation = dbcomic['ComicLocation']
        filechecker.validateAndCreateDirectory(comlocation, True)
        oldcomversion = dbcomic[
            'ComicVersion']  #store the comicversion and chk if it exists before hammering.

    myDB.upsert("comics", newValueDict, controlValueDict)

    #run the re-sortorder here in order to properly display the page
    if pullupd is None:
        helpers.ComicSort(comicorder=mylar.COMICSORT, imported=comicid)

    # we need to lookup the info for the requested ComicID in full now
    comic = cv.getComic(comicid, 'comic')
    #comic = myDB.action('SELECT * FROM comics WHERE ComicID=?', [comicid]).fetchone()
    if not comic:
        logger.warn("Error fetching comic. ID for : " + comicid)
        if dbcomic is None:
            newValueDict = {
                "ComicName": "Fetch failed, try refreshing. (%s)" % (comicid),
                "Status": "Active"
            }
        else:
            newValueDict = {"Status": "Active"}
        myDB.upsert("comics", newValueDict, controlValueDict)
        return

    if comic['ComicName'].startswith('The '):
        sortname = comic['ComicName'][4:]
    else:
        sortname = comic['ComicName']

    logger.info(u"Now adding/updating: " + comic['ComicName'])
    #--Now that we know ComicName, let's try some scraping
    #--Start
    # gcd will return issue details (most importantly publishing date)
    if not mylar.CV_ONLY:
        if mismatch == "no" or mismatch is None:
            gcdinfo = parseit.GCDScraper(comic['ComicName'],
                                         comic['ComicYear'],
                                         comic['ComicIssues'], comicid)
            #print ("gcdinfo: " + str(gcdinfo))
            mismatch_com = "no"
            if gcdinfo == "No Match":
                updater.no_searchresults(comicid)
                nomatch = "true"
                logger.info(u"There was an error when trying to add " +
                            comic['ComicName'] + " (" + comic['ComicYear'] +
                            ")")
                return nomatch
            else:
                mismatch_com = "yes"
                #print ("gcdinfo:" + str(gcdinfo))

        elif mismatch == "yes":
            CV_EXcomicid = myDB.action(
                "SELECT * from exceptions WHERE ComicID=?",
                [comicid]).fetchone()
            if CV_EXcomicid['variloop'] is None: pass
            else:
                vari_loop = CV_EXcomicid['variloop']
                NewComicID = CV_EXcomicid['NewComicID']
                gcomicid = CV_EXcomicid['GComicID']
                resultURL = "/series/" + str(NewComicID) + "/"
                #print ("variloop" + str(CV_EXcomicid['variloop']))
                #if vari_loop == '99':
                gcdinfo = parseit.GCDdetails(comseries=None,
                                             resultURL=resultURL,
                                             vari_loop=0,
                                             ComicID=comicid,
                                             TotalIssues=0,
                                             issvariation="no",
                                             resultPublished=None)

    logger.info(u"Sucessfully retrieved details for " + comic['ComicName'])
    # print ("Series Published" + parseit.resultPublished)

    CV_NoYearGiven = "no"
    #if the SeriesYear returned by CV is blank or none (0000), let's use the gcd one.
    if comic['ComicYear'] is None or comic['ComicYear'] == '0000':
        if mylar.CV_ONLY:
            #we'll defer this until later when we grab all the issues and then figure it out
            logger.info(
                "Uh-oh. I can't find a Series Year for this series. I'm going to try analyzing deeper."
            )
            SeriesYear = cv.getComic(comicid, 'firstissue',
                                     comic['FirstIssueID'])
            if SeriesYear == '0000':
                logger.info(
                    "Ok - I couldn't find a Series Year at all. Loading in the issue data now and will figure out the Series Year."
                )
                CV_NoYearGiven = "yes"
                issued = cv.getComic(comicid, 'issue')
                SeriesYear = issued['firstdate'][:4]
        else:
            SeriesYear = gcdinfo['SeriesYear']
    else:
        SeriesYear = comic['ComicYear']

    #let's do the Annual check here.
    if mylar.ANNUALS_ON:
        annualcomicname = re.sub('[\,\:]', '', comic['ComicName'])
        annuals = comicbookdb.cbdb(annualcomicname, SeriesYear)
        print("Number of Annuals returned: " + str(annuals['totalissues']))
        nb = 0
        while (nb <= int(annuals['totalissues'])):
            try:
                annualval = annuals['annualslist'][nb]
            except IndexError:
                break
            newCtrl = {
                "IssueID":
                str(annualval['AnnualIssue'] + annualval['AnnualDate'])
            }
            newVals = {
                "Issue_Number": annualval['AnnualIssue'],
                "IssueDate": annualval['AnnualDate'],
                "IssueName": annualval['AnnualTitle'],
                "ComicID": comicid,
                "Status": "Skipped"
            }
            myDB.upsert("annuals", newVals, newCtrl)
            nb += 1

    #parseit.annualCheck(gcomicid=gcdinfo['GCDComicID'], comicid=comicid, comicname=comic['ComicName'], comicyear=SeriesYear)
    #comic book location on machine
    # setup default location here

    if comlocation is None:
        # let's remove the non-standard characters here.
        u_comicnm = comic['ComicName']
        u_comicname = u_comicnm.encode('ascii', 'ignore').strip()
        if ':' in u_comicname or '/' in u_comicname or ',' in u_comicname or '?' in u_comicname:
            comicdir = u_comicname
            if ':' in comicdir:
                comicdir = comicdir.replace(':', '')
            if '/' in comicdir:
                comicdir = comicdir.replace('/', '-')
            if ',' in comicdir:
                comicdir = comicdir.replace(',', '')
            if '?' in comicdir:
                comicdir = comicdir.replace('?', '')
        else:
            comicdir = u_comicname

        series = comicdir
        publisher = re.sub('!', '', comic['ComicPublisher'])  # thanks Boom!
        year = SeriesYear
        comversion = comic['ComicVersion']
        if comversion is None:
            comversion = 'None'
        #if comversion is None, remove it so it doesn't populate with 'None'
        if comversion == 'None':
            chunk_f_f = re.sub('\$VolumeN', '', mylar.FILE_FORMAT)
            chunk_f = re.compile(r'\s+')
            mylar.FILE_FORMAT = chunk_f.sub(' ', chunk_f_f)

        #do work to generate folder path

        values = {
            '$Series': series,
            '$Publisher': publisher,
            '$Year': year,
            '$series': series.lower(),
            '$publisher': publisher.lower(),
            '$VolumeY': 'V' + str(year),
            '$VolumeN': comversion
        }

        #print mylar.FOLDER_FORMAT
        #print 'working dir:'
        #print helpers.replace_all(mylar.FOLDER_FORMAT, values)

        if mylar.FOLDER_FORMAT == '':
            comlocation = mylar.DESTINATION_DIR + "/" + comicdir + " (" + SeriesYear + ")"
        else:
            comlocation = mylar.DESTINATION_DIR + "/" + helpers.replace_all(
                mylar.FOLDER_FORMAT, values)

        #comlocation = mylar.DESTINATION_DIR + "/" + comicdir + " (" + comic['ComicYear'] + ")"
        if mylar.DESTINATION_DIR == "":
            logger.error(
                u"There is no general directory specified - please specify in Config/Post-Processing."
            )
            return
        if mylar.REPLACE_SPACES:
            #mylar.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
            comlocation = comlocation.replace(' ', mylar.REPLACE_CHAR)

    #moved this out of the above loop so it will chk for existance of comlocation in case moved
    #if it doesn't exist - create it (otherwise will bugger up later on)
    if os.path.isdir(str(comlocation)):
        logger.info(u"Directory (" + str(comlocation) +
                    ") already exists! Continuing...")
    else:
        #print ("Directory doesn't exist!")
        #try:
        #    os.makedirs(str(comlocation))
        #    logger.info(u"Directory successfully created at: " + str(comlocation))
        #except OSError:
        #    logger.error(u"Could not create comicdir : " + str(comlocation))
        filechecker.validateAndCreateDirectory(comlocation, True)

    #try to account for CV not updating new issues as fast as GCD
    #seems CV doesn't update total counts
    #comicIssues = gcdinfo['totalissues']
    comicIssues = comic['ComicIssues']

    if not mylar.CV_ONLY:
        if gcdinfo['gcdvariation'] == "cv":
            comicIssues = str(int(comic['ComicIssues']) + 1)

    #let's download the image...
    if os.path.exists(mylar.CACHE_DIR): pass
    else:
        #let's make the dir.
        try:
            os.makedirs(str(mylar.CACHE_DIR))
            logger.info(u"Cache Directory successfully created at: " +
                        str(mylar.CACHE_DIR))

        except OSError:
            logger.error(
                'Could not create cache dir. Check permissions of cache dir: '
                + str(mylar.CACHE_DIR))

    coverfile = os.path.join(mylar.CACHE_DIR, str(comicid) + ".jpg")

    #try:
    urllib.urlretrieve(str(comic['ComicImage']), str(coverfile))
    try:
        with open(str(coverfile)) as f:
            ComicImage = os.path.join('cache', str(comicid) + ".jpg")

            #this is for Firefox when outside the LAN...it works, but I don't know how to implement it
            #without breaking the normal flow for inside the LAN (above)
            #ComicImage = "http://" + str(mylar.HTTP_HOST) + ":" + str(mylar.HTTP_PORT) + "/cache/" + str(comicid) + ".jpg"

            logger.info(u"Sucessfully retrieved cover for " +
                        comic['ComicName'])
            #if the comic cover local is checked, save a cover.jpg to the series folder.
            if mylar.COMIC_COVER_LOCAL:
                comiclocal = os.path.join(str(comlocation) + "/cover.jpg")
                shutil.copy(ComicImage, comiclocal)
    except IOError as e:
        logger.error(u"Unable to save cover locally at this time.")

    if oldcomversion is None:
        if comic['ComicVersion'].isdigit():
            comicVol = "v" + comic['ComicVersion']
        else:
            comicVol = None
    else:
        comicVol = oldcomversion

    #for description ...
    #Cdesc = helpers.cleanhtml(comic['ComicDescription'])
    #cdes_find = Cdesc.find("Collected")
    #cdes_removed = Cdesc[:cdes_find]
    #print cdes_removed

    controlValueDict = {"ComicID": comicid}
    newValueDict = {
        "ComicName": comic['ComicName'],
        "ComicSortName": sortname,
        "ComicYear": SeriesYear,
        "ComicImage": ComicImage,
        "Total": comicIssues,
        "ComicVersion": comicVol,
        "ComicLocation": comlocation,
        "ComicPublisher": comic['ComicPublisher'],
        #"Description":      Cdesc.decode('utf-8', 'replace'),
        "DetailURL": comic['ComicURL'],
        #                    "ComicPublished":   gcdinfo['resultPublished'],
        "ComicPublished": 'Unknown',
        "DateAdded": helpers.today(),
        "Status": "Loading"
    }

    myDB.upsert("comics", newValueDict, controlValueDict)

    #comicsort here...
    #run the re-sortorder here in order to properly display the page
    if pullupd is None:
        helpers.ComicSort(sequence='update')

    if CV_NoYearGiven == 'no':
        #if set to 'no' then we haven't pulled down the issues, otherwise we did it already
        issued = cv.getComic(comicid, 'issue')
    logger.info(u"Sucessfully retrieved issue details for " +
                comic['ComicName'])
    n = 0
    iscnt = int(comicIssues)
    issid = []
    issnum = []
    issname = []
    issdate = []
    int_issnum = []
    #let's start issue #'s at 0 -- thanks to DC for the new 52 reboot! :)
    latestiss = "0"
    latestdate = "0000-00-00"
    firstiss = "10000000"
    firstdate = "2099-00-00"
    #print ("total issues:" + str(iscnt))
    #---removed NEW code here---
    logger.info(u"Now adding/updating issues for " + comic['ComicName'])

    if not mylar.CV_ONLY:
        #fccnt = int(fc['comiccount'])
        #logger.info(u"Found " + str(fccnt) + "/" + str(iscnt) + " issues of " + comic['ComicName'] + "...verifying")
        #fcnew = []
        if iscnt > 0:  #if a series is brand new, it wont have any issues/details yet so skip this part
            while (n <= iscnt):
                #---NEW.code
                try:
                    firstval = issued['issuechoice'][n]
                except IndexError:
                    break
                cleanname = helpers.cleanName(firstval['Issue_Name'])
                issid = str(firstval['Issue_ID'])
                issnum = str(firstval['Issue_Number'])
                #print ("issnum: " + str(issnum))
                issname = cleanname
                if '.' in str(issnum):
                    issn_st = str(issnum).find('.')
                    issn_b4dec = str(issnum)[:issn_st]
                    #if the length of decimal is only 1 digit, assume it's a tenth
                    dec_is = str(issnum)[issn_st + 1:]
                    if len(dec_is) == 1:
                        dec_nisval = int(dec_is) * 10
                        iss_naftdec = str(dec_nisval)
                    if len(dec_is) == 2:
                        dec_nisval = int(dec_is)
                        iss_naftdec = str(dec_nisval)
                    iss_issue = issn_b4dec + "." + iss_naftdec
                    issis = (int(issn_b4dec) * 1000) + dec_nisval
                elif 'au' in issnum.lower():
                    print("au detected")
                    stau = issnum.lower().find('au')
                    issnum_au = issnum[:stau]
                    print("issnum_au: " + str(issnum_au))
                    #account for Age of Ultron mucked up numbering
                    issis = str(int(issnum_au) * 1000) + 'AU'
                else:
                    issis = int(issnum) * 1000

                bb = 0
                while (bb <= iscnt):
                    try:
                        gcdval = gcdinfo['gcdchoice'][bb]
                        #print ("gcdval: " + str(gcdval))
                    except IndexError:
                        #account for gcd variation here
                        if gcdinfo['gcdvariation'] == 'gcd':
                            #logger.fdebug("gcd-variation accounted for.")
                            issdate = '0000-00-00'
                            int_issnum = int(issis / 1000)
                        break
                    if 'nn' in str(gcdval['GCDIssue']):
                        #no number detected - GN, TP or the like
                        logger.warn(
                            u"Non Series detected (Graphic Novel, etc) - cannot proceed at this time."
                        )
                        updater.no_searchresults(comicid)
                        return
                    elif 'au' in gcdval['GCDIssue'].lower():
                        #account for Age of Ultron mucked up numbering - this is in format of 5AU.00
                        gstau = gcdval['GCDIssue'].lower().find('au')
                        gcdis_au = gcdval['GCDIssue'][:gstau]
                        gcdis = str(int(gcdis_au) * 1000) + 'AU'
                    elif '.' in str(gcdval['GCDIssue']):
                        #logger.fdebug("g-issue:" + str(gcdval['GCDIssue']))
                        issst = str(gcdval['GCDIssue']).find('.')
                        #logger.fdebug("issst:" + str(issst))
                        issb4dec = str(gcdval['GCDIssue'])[:issst]
                        #logger.fdebug("issb4dec:" + str(issb4dec))
                        #if the length of decimal is only 1 digit, assume it's a tenth
                        decis = str(gcdval['GCDIssue'])[issst + 1:]
                        #logger.fdebug("decis:" + str(decis))
                        if len(decis) == 1:
                            decisval = int(decis) * 10
                            issaftdec = str(decisval)
                        if len(decis) == 2:
                            decisval = int(decis)
                            issaftdec = str(decisval)
                        gcd_issue = issb4dec + "." + issaftdec
                        #logger.fdebug("gcd_issue:" + str(gcd_issue))
                        try:
                            gcdis = (int(issb4dec) * 1000) + decisval
                        except ValueError:
                            logger.error(
                                "This has no issue #'s for me to get - Either a Graphic Novel or one-shot. This feature to allow these will be added in the near future."
                            )
                            updater.no_searchresults(comicid)
                            return
                    else:
                        gcdis = int(str(gcdval['GCDIssue'])) * 1000
                    if gcdis == issis:
                        issdate = str(gcdval['GCDDate'])
                        if str(issis).isdigit():
                            int_issnum = int(gcdis / 1000)
                        else:
                            if 'au' in issis.lower():
                                int_issnum = str(int(gcdis[:-2]) / 1000) + 'AU'
                            else:
                                logger.error(
                                    "this has an alpha-numeric in the issue # which I cannot account for. Get on github and log the issue for evilhero."
                                )
                                return
                        #get the latest issue / date using the date.
                        if gcdval['GCDDate'] > latestdate:
                            latestiss = str(issnum)
                            latestdate = str(gcdval['GCDDate'])
                            break
                    #bb = iscnt
                    bb += 1
                #print("(" + str(n) + ") IssueID: " + str(issid) + " IssueNo: " + str(issnum) + " Date" + str(issdate))
                #---END.NEW.

                # check if the issue already exists
                iss_exists = myDB.action(
                    'SELECT * from issues WHERE IssueID=?',
                    [issid]).fetchone()

                # Only change the status & add DateAdded if the issue is already in the database
                if iss_exists is None:
                    newValueDict['DateAdded'] = helpers.today()

                controlValueDict = {"IssueID": issid}
                newValueDict = {
                    "ComicID": comicid,
                    "ComicName": comic['ComicName'],
                    "IssueName": issname,
                    "Issue_Number": issnum,
                    "IssueDate": issdate,
                    "Int_IssueNumber": int_issnum
                }
                if mylar.AUTOWANT_ALL:
                    newValueDict['Status'] = "Wanted"
                elif issdate > helpers.today() and mylar.AUTOWANT_UPCOMING:
                    newValueDict['Status'] = "Wanted"
                else:
                    newValueDict['Status'] = "Skipped"

                if iss_exists:
                    #print ("Existing status : " + str(iss_exists['Status']))
                    newValueDict['Status'] = iss_exists['Status']

                try:
                    myDB.upsert("issues", newValueDict, controlValueDict)
                except sqlite3.InterfaceError, e:
                    #raise sqlite3.InterfaceError(e)
                    logger.error(
                        "MAJOR error trying to get issue data, this is most likey a MULTI-VOLUME series and you need to use the custom_exceptions.csv file."
                    )
                    myDB.action("DELETE FROM comics WHERE ComicID=?",
                                [comicid])
                    return
                n += 1
Exemple #14
0
    def Process_next(self,comicid,issueid,issuenumOG,ml=None):
            annchk = "no"
            extensions = ('.cbr', '.cbz')
            snatchedtorrent = False
            myDB = db.DBConnection()
            comicnzb = myDB.selectone("SELECT * from comics WHERE comicid=?", [comicid]).fetchone()
            issuenzb = myDB.selectone("SELECT * from issues WHERE issueid=? AND comicid=? AND ComicName NOT NULL", [issueid,comicid]).fetchone()
            if ml is not None and mylar.SNATCHEDTORRENT_NOTIFY:
                snatchnzb = myDB.selectone("SELECT * from snatched WHERE IssueID=? AND ComicID=? AND (provider=? OR provider=?) AND Status='Snatched'", [issueid,comicid,'KAT','CBT']).fetchone() 
                if snatchnzb is None:
                    logger.fdebug('Was not downloaded with Mylar and the usage of torrents. Disabling torrent manual post-processing completion notification.')
                else:
                    logger.fdebug('Was downloaded from ' + snatchnzb['Provider'] + '. Enabling torrent manual post-processing completion notification.')
                    snatchedtorrent = True
            logger.fdebug('issueid: ' + str(issueid))
            logger.fdebug('issuenumOG: ' + str(issuenumOG))
            if issuenzb is None:
                issuenzb = myDB.selectone("SELECT * from annuals WHERE issueid=? and comicid=?", [issueid,comicid]).fetchone()
                annchk = "yes"
            #issueno = str(issuenum).split('.')[0]
            #new CV API - removed all decimals...here we go AGAIN!
            issuenum = issuenzb['Issue_Number']
            issue_except = 'None'

            if 'au' in issuenum.lower() and issuenum[:1].isdigit():
                issuenum = re.sub("[^0-9]", "", issuenum)
                issue_except = ' AU'
            elif 'ai' in issuenum.lower() and issuenum[:1].isdigit():
                issuenum = re.sub("[^0-9]", "", issuenum)
                issue_except = ' AI'
            elif 'inh' in issuenum.lower() and issuenum[:1].isdigit():
                issuenum = re.sub("[^0-9]", "", issuenum)
                issue_except = '.INH'
            elif 'now' in issuenum.lower() and issuenum[:1].isdigit():
                if '!' in issuenum: issuenum = re.sub('\!', '', issuenum)
                issuenum = re.sub("[^0-9]", "", issuenum)
                issue_except = '.NOW'

            if '.' in issuenum:
                iss_find = issuenum.find('.')
                iss_b4dec = issuenum[:iss_find]
                iss_decval = issuenum[iss_find+1:]
                if int(iss_decval) == 0:
                    iss = iss_b4dec
                    issdec = int(iss_decval)
                    issueno = str(iss)
                    self._log("Issue Number: " + str(issueno))
                    logger.fdebug("Issue Number: " + str(issueno))
                else:
                    if len(iss_decval) == 1:
                        iss = iss_b4dec + "." + iss_decval
                        issdec = int(iss_decval) * 10
                    else:
                        iss = iss_b4dec + "." + iss_decval.rstrip('0')
                        issdec = int(iss_decval.rstrip('0')) * 10
                    issueno = iss_b4dec
                    self._log("Issue Number: " + str(iss))
                    logger.fdebug("Issue Number: " + str(iss))
            else:
                iss = issuenum
                issueno = str(iss)

            # issue zero-suppression here
            if mylar.ZERO_LEVEL == "0": 
                zeroadd = ""
            else:
                if mylar.ZERO_LEVEL_N  == "none": zeroadd = ""
                elif mylar.ZERO_LEVEL_N == "0x": zeroadd = "0"
                elif mylar.ZERO_LEVEL_N == "00x": zeroadd = "00"

            logger.fdebug("Zero Suppression set to : " + str(mylar.ZERO_LEVEL_N))

            if str(len(issueno)) > 1:
                if int(issueno) < 0:
                    self._log("issue detected is a negative")
                    prettycomiss = '-' + str(zeroadd) + str(abs(issueno))
                elif int(issueno) < 10:
                    self._log("issue detected less than 10")
                    if '.' in iss:
                        if int(iss_decval) > 0:
                            issueno = str(iss)
                            prettycomiss = str(zeroadd) + str(iss)
                        else:
                            prettycomiss = str(zeroadd) + str(int(issueno))
                    else:
                        prettycomiss = str(zeroadd) + str(iss)
                    if issue_except != 'None': 
                        prettycomiss = str(prettycomiss) + issue_except
                    self._log("Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ". Issue will be set as : " + str(prettycomiss))
                elif int(issueno) >= 10 and int(issueno) < 100:
                    self._log("issue detected greater than 10, but less than 100")
                    if mylar.ZERO_LEVEL_N == "none":
                        zeroadd = ""
                    else:
                        zeroadd = "0"
                    if '.' in iss:
                        if int(iss_decval) > 0:
                            issueno = str(iss)
                            prettycomiss = str(zeroadd) + str(iss)
                        else:
                           prettycomiss = str(zeroadd) + str(int(issueno))
                    else:
                        prettycomiss = str(zeroadd) + str(iss)
                    if issue_except != 'None':
                        prettycomiss = str(prettycomiss) + issue_except
                    self._log("Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ".Issue will be set as : " + str(prettycomiss))
                else:
                    self._log("issue detected greater than 100")
                    if '.' in iss:
                        if int(iss_decval) > 0:
                            issueno = str(iss)
                    prettycomiss = str(issueno)
                    if issue_except != 'None':
                        prettycomiss = str(prettycomiss) + issue_except
                    self._log("Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ". Issue will be set as : " + str(prettycomiss))
            else:
                prettycomiss = str(issueno)
                self._log("issue length error - cannot determine length. Defaulting to None:  " + str(prettycomiss))

            if annchk == "yes":
                self._log("Annual detected.")
            logger.fdebug("Pretty Comic Issue is : " + str(prettycomiss))
            issueyear = issuenzb['IssueDate'][:4]
            self._log("Issue Year: " + str(issueyear))
            logger.fdebug("Issue Year : " + str(issueyear))
            month = issuenzb['IssueDate'][5:7].replace('-','').strip()
            month_name = helpers.fullmonth(month)
#            comicnzb= myDB.action("SELECT * from comics WHERE comicid=?", [comicid]).fetchone()
            publisher = comicnzb['ComicPublisher']
            self._log("Publisher: " + publisher)
            logger.fdebug("Publisher: " + str(publisher))
            #we need to un-unicode this to make sure we can write the filenames properly for spec.chars
            series = comicnzb['ComicName'].encode('ascii', 'ignore').strip()
            self._log("Series: " + series)
            logger.fdebug("Series: " + str(series))
            seriesyear = comicnzb['ComicYear']
            self._log("Year: " + seriesyear)
            logger.fdebug("Year: "  + str(seriesyear))
            comlocation = comicnzb['ComicLocation']
            self._log("Comic Location: " + comlocation)
            logger.fdebug("Comic Location: " + str(comlocation))
            comversion = comicnzb['ComicVersion']
            self._log("Comic Version: " + str(comversion))
            logger.fdebug("Comic Version: " + str(comversion))
            if comversion is None:
                comversion = 'None'
            #if comversion is None, remove it so it doesn't populate with 'None'
            if comversion == 'None':
                chunk_f_f = re.sub('\$VolumeN','',mylar.FILE_FORMAT)
                chunk_f = re.compile(r'\s+')
                chunk_file_format = chunk_f.sub(' ', chunk_f_f)
                self._log("No version # found for series - tag will not be available for renaming.")
                logger.fdebug("No version # found for series, removing from filename")
                logger.fdebug("new format is now: " + str(chunk_file_format))
            else:
                chunk_file_format = mylar.FILE_FORMAT

            if annchk == "no":
                chunk_f_f = re.sub('\$Annual','',chunk_file_format)
                chunk_f = re.compile(r'\s+')
                chunk_file_format = chunk_f.sub(' ', chunk_f_f)
                logger.fdebug('not an annual - removing from filename paramaters')
                logger.fdebug('new format: ' + str(chunk_file_format))

            else:
                logger.fdebug('chunk_file_format is: ' + str(chunk_file_format))
                if '$Annual' not in chunk_file_format:
                #if it's an annual, but $Annual isn't specified in file_format, we need to
                #force it in there, by default in the format of $Annual $Issue
                    prettycomiss = "Annual " + str(prettycomiss)
                    logger.fdebug('prettycomiss: ' + str(prettycomiss))


            ofilename = None

            #if meta-tagging is not enabled, we need to declare the check as being fail
            #if meta-tagging is enabled, it gets changed just below to a default of pass
            pcheck = "fail"

            #tag the meta.
            if mylar.ENABLE_META:
                self._log("Metatagging enabled - proceeding...")
                logger.fdebug("Metatagging enabled - proceeding...")
                pcheck = "pass"
                try:
                    import cmtagmylar
                    if ml is None:
                        pcheck = cmtagmylar.run(self.nzb_folder, issueid=issueid)
                    else:
                        pcheck = cmtagmylar.run(self.nzb_folder, issueid=issueid, manual="yes", filename=ml['ComicLocation'])

                except ImportError:
                    logger.fdebug("comictaggerlib not found on system. Ensure the ENTIRE lib directory is located within mylar/lib/comictaggerlib/")
                    logger.fdebug("continuing with PostProcessing, but I'm not using metadata.")
                    pcheck = "fail"
                
                if pcheck == "fail":
                    self._log("Unable to write metadata successfully - check mylar.log file. Attempting to continue without tagging...")
                    logger.fdebug("Unable to write metadata successfully - check mylar.log file. Attempting to continue without tagging...")
                elif pcheck == "unrar error":
                    self._log("This is a corrupt archive - whether CRC errors or it's incomplete. Marking as BAD, and retrying a different copy.")
                    logger.error("This is a corrupt archive - whether CRC errors or it's incomplete. Marking as BAD, and retrying a different copy.")
                    return self.log
                else:
                    otofilename = pcheck
                    self._log("Sucessfully wrote metadata to .cbz - Continuing..")
                    logger.fdebug("Sucessfully wrote metadata to .cbz (" + str(otofilename) + ") - Continuing..")
            #Run Pre-script

            if mylar.ENABLE_PRE_SCRIPTS:
                nzbn = self.nzb_name #original nzb name
                nzbf = self.nzb_folder #original nzb folder
                #name, comicyear, comicid , issueid, issueyear, issue, publisher
                #create the dic and send it.
                seriesmeta = []
                seriesmetadata = {}
                seriesmeta.append({
                            'name':                 series,
                            'comicyear':            seriesyear,
                            'comicid':              comicid,
                            'issueid':              issueid,
                            'issueyear':            issueyear,
                            'issue':                issuenum,
                            'publisher':            publisher
                            })
                seriesmetadata['seriesmeta'] = seriesmeta
                self._run_pre_scripts(nzbn, nzbf, seriesmetadata )

        #rename file and move to new path
        #nfilename = series + " " + issueno + " (" + seriesyear + ")"

            file_values = {'$Series':    series,
                           '$Issue':     prettycomiss,
                           '$Year':      issueyear,
                           '$series':    series.lower(),
                           '$Publisher': publisher,
                           '$publisher': publisher.lower(),
                           '$VolumeY':   'V' + str(seriesyear),
                           '$VolumeN':   comversion,
                           '$monthname': month_name,
                           '$month':     month,
                           '$Annual':    'Annual'
                          }


            #if it's a Manual Run, use the ml['ComicLocation'] for the exact filename.
            if ml is None:
               
                for root, dirnames, filenames in os.walk(self.nzb_folder):
                    for filename in filenames:
                        if filename.lower().endswith(extensions):
                            odir = root
                            ofilename = filename
                            path, ext = os.path.splitext(ofilename)
 
                if odir is None:
                    logger.fdebug('no root folder set.')
                    odir = self.nzb_folder
                logger.fdebug('odir: ' + str(odir))
                logger.fdebug('ofilename: ' + str(ofilename))

            else:
                if pcheck == "fail":
                    otofilename = ml['ComicLocation']
                logger.fdebug('otofilename:' + str(otofilename))
                odir, ofilename = os.path.split(otofilename)
                logger.fdebug('odir: ' + str(odir))
                logger.fdebug('ofilename: ' + str(ofilename))
                path, ext = os.path.splitext(ofilename)
                logger.fdebug('path: ' + str(path))
                logger.fdebug('ext:' + str(ext))

            if ofilename is None:
                logger.error(u"Aborting PostProcessing - the filename doesn't exist in the location given. Make sure that " + str(self.nzb_folder) + " exists and is the correct location.")
                return
            self._log("Original Filename: " + ofilename)
            self._log("Original Extension: " + ext)
            logger.fdebug("Original Filname: " + str(ofilename))
            logger.fdebug("Original Extension: " + str(ext))

            if mylar.FILE_FORMAT == '' or not mylar.RENAME_FILES:
                self._log("Rename Files isn't enabled...keeping original filename.")
                logger.fdebug("Rename Files isn't enabled - keeping original filename.")
                #check if extension is in nzb_name - will screw up otherwise
                if ofilename.lower().endswith(extensions):
                    nfilename = ofilename[:-4]
                else:
                    nfilename = ofilename
            else:
                nfilename = helpers.replace_all(chunk_file_format, file_values)
                if mylar.REPLACE_SPACES:
                    #mylar.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
                    nfilename = nfilename.replace(' ', mylar.REPLACE_CHAR)
            nfilename = re.sub('[\,\:\?]', '', nfilename)
            nfilename = re.sub('[\/]', '-', nfilename)
            self._log("New Filename: " + nfilename)
            logger.fdebug("New Filename: " + str(nfilename))

            #src = os.path.join(self.nzb_folder, ofilename)
            src = os.path.join(odir, ofilename)
            filechecker.validateAndCreateDirectory(comlocation, True)

            if mylar.LOWERCASE_FILENAMES:
                dst = (comlocation + "/" + nfilename + ext).lower()
            else:
                dst = comlocation + "/" + nfilename + ext.lower()    
            self._log("Source:" + src)
            self._log("Destination:" +  dst)
            logger.fdebug("Source: " + str(src))
            logger.fdebug("Destination: " + str(dst))

            if ml is None:
                #downtype = for use with updater on history table to set status to 'Downloaded'
                downtype = 'True'
                #non-manual run moving/deleting...
                logger.fdebug('self.nzb_folder: ' + self.nzb_folder)
                logger.fdebug('odir: ' + str(odir))
                logger.fdebug('ofilename:' + str(ofilename))
                logger.fdebug('nfilename:' + str(nfilename + ext))
                if mylar.RENAME_FILES:
                    if str(ofilename) != str(nfilename + ext):
                        logger.fdebug("Renaming " + os.path.join(odir, str(ofilename)) + " ..to.. " + os.path.join(odir,str(nfilename + ext)))
                        os.rename(os.path.join(odir, str(ofilename)), os.path.join(odir,str(nfilename + ext)))
                    else:
                        logger.fdebug('filename is identical as original, not renaming.')

                #src = os.path.join(self.nzb_folder, str(nfilename + ext))
                src = os.path.join(odir, str(nfilename + ext))
                try:
                    shutil.move(src, dst)
                except (OSError, IOError):
                    self._log("Failed to move directory - check directories and manually re-run.")
                    self._log("Post-Processing ABORTED.")
                    return
                #tidyup old path
                try:
                    shutil.rmtree(self.nzb_folder)
                except (OSError, IOError):
                    self._log("Failed to remove temporary directory - check directory and manually re-run.")
                    self._log("Post-Processing ABORTED.")
                    return

                self._log("Removed temporary directory : " + str(self.nzb_folder))
            else:
                #downtype = for use with updater on history table to set status to 'Post-Processed'
                downtype = 'PP'
                #Manual Run, this is the portion.
                if mylar.RENAME_FILES:
                    if str(ofilename) != str(nfilename + ext):
                        logger.fdebug("Renaming " + os.path.join(self.nzb_folder, str(ofilename)) + " ..to.. " + os.path.join(self.nzb_folder,str(nfilename + ext)))
                        os.rename(os.path.join(odir, str(ofilename)), os.path.join(odir ,str(nfilename + ext)))
                    else:
                        logger.fdebug('filename is identical as original, not renaming.')
                src = os.path.join(odir, str(nfilename + ext))
                logger.fdebug('odir rename: ' + os.path.join(odir, str(ofilename)) + ' TO ' + os.path.join(odir, str(nfilename + ext)))
                logger.fdebug('odir src : ' + os.path.join(odir, str(nfilename + ext)))
                logger.fdebug("Moving " + src + " ... to ... " + dst)
                try:
                    shutil.move(src, dst)
                except (OSError, IOError):
                    logger.fdebug("Failed to move directory - check directories and manually re-run.")
                    logger.fdebug("Post-Processing ABORTED.")
                    return
                logger.fdebug("Successfully moved to : " + dst)
                #tidyup old path
                #try:
                #    os.remove(os.path.join(self.nzb_folder, str(ofilename)))
                #    logger.fdebug("Deleting : " + os.path.join(self.nzb_folder, str(ofilename)))
                #except (OSError, IOError):
                #    logger.fdebug("Failed to remove temporary directory - check directory and manually re-run.")
                #    logger.fdebug("Post-Processing ABORTED.")
                #    return
                #logger.fdebug("Removed temporary directory : " + str(self.nzb_folder))

            #Hopefully set permissions on downloaded file
            try:
                permission = int(mylar.CHMOD_FILE, 8)
                os.umask(0)
                os.chmod(dst.rstrip(), permission)
            except OSError:
                logger.error('Failed to change file permissions. Ensure that the user running Mylar has proper permissions to change permissions in : ' + dst)
                logger.fdebug('Continuing post-processing but unable to change file permissions in ' + dst)
                    #delete entry from nzblog table
            myDB.action('DELETE from nzblog WHERE issueid=?', [issueid])
                    #update snatched table to change status to Downloaded
            
            if annchk == "no":
                updater.foundsearch(comicid, issueid, down=downtype)
                dispiss = 'issue: ' + str(issuenumOG)
            else:
                updater.foundsearch(comicid, issueid, mode='want_ann', down=downtype)
                dispiss = 'annual issue: ' + str(issuenumOG)

                    #force rescan of files
            updater.forceRescan(comicid)
            logger.info(u"Post-Processing completed for: " + series + " " + dispiss )
            self._log(u"Post Processing SUCCESSFUL! ")

            if mylar.WEEKFOLDER:
                #if enabled, will *copy* the post-processed file to the weeklypull list folder for the given week.
                weeklypull.weekly_singlecopy(comicid,issuenum,str(nfilename+ext),dst)

            # retrieve/create the corresponding comic objects
            if mylar.ENABLE_EXTRA_SCRIPTS:
                folderp = str(dst) #folder location after move/rename
                nzbn = self.nzb_name #original nzb name
                filen = str(nfilename + ext) #new filename
                #name, comicyear, comicid , issueid, issueyear, issue, publisher
                #create the dic and send it.
                seriesmeta = []
                seriesmetadata = {}
                seriesmeta.append({
                            'name':                 series,
                            'comicyear':            seriesyear,
                            'comicid':              comicid,
                            'issueid':              issueid,
                            'issueyear':            issueyear,
                            'issue':                issuenum,
                            'publisher':            publisher
                            })
                seriesmetadata['seriesmeta'] = seriesmeta
                self._run_extra_scripts(nzbn, self.nzb_folder, filen, folderp, seriesmetadata )

            if ml is not None:
                #we only need to return self.log if it's a manual run and it's not a snatched torrent
                if snatchedtorrent: 
                    #manual run + snatched torrent
                    pass
                else:
                    #manual run + not snatched torrent (or normal manual-run)
                    return self.log

            if annchk == "no":
                prline = series + '(' + issueyear + ') - issue #' + issuenumOG
            else:
                prline = series + ' Annual (' + issueyear + ') - issue #' + issuenumOG
            prline2 = 'Mylar has downloaded and post-processed: ' + prline

            if mylar.PROWL_ENABLED:
                pushmessage = prline
                logger.info(u"Prowl request")
                prowl = notifiers.PROWL()
                prowl.notify(pushmessage,"Download and Postprocessing completed")
    
            if mylar.NMA_ENABLED:
                nma = notifiers.NMA()
                nma.notify(prline=prline, prline2=prline2)

            if mylar.PUSHOVER_ENABLED:
                logger.info(u"Pushover request")
                pushover = notifiers.PUSHOVER()
                pushover.notify(prline, "Download and Post-Processing completed")

            if mylar.BOXCAR_ENABLED:
                boxcar = notifiers.BOXCAR()
                boxcar.notify(prline=prline, prline2=prline2)

            if mylar.PUSHBULLET_ENABLED:
                pushbullet = notifiers.PUSHBULLET()
                pushbullet.notify(prline=prline, prline2=prline2)
             
            return self.log
Exemple #15
0
def addComictoDB(comicid, mismatch=None):
    # Putting this here to get around the circular import. Will try to use this to update images at later date.
    from mylar import cache

    myDB = db.DBConnection()

    # We need the current minimal info in the database instantly
    # so we don't throw a 500 error when we redirect to the artistPage

    controlValueDict = {"ComicID": comicid}

    dbcomic = myDB.action("SELECT * FROM comics WHERE ComicID=?", [comicid]).fetchone()
    if dbcomic is None:
        newValueDict = {"ComicName": "Comic ID: %s" % (comicid), "Status": "Loading"}
        comlocation = None
    else:
        newValueDict = {"Status": "Loading"}
        comlocation = dbcomic["ComicLocation"]

    myDB.upsert("comics", newValueDict, controlValueDict)

    # we need to lookup the info for the requested ComicID in full now
    comic = cv.getComic(comicid, "comic")
    # comic = myDB.action('SELECT * FROM comics WHERE ComicID=?', [comicid]).fetchone()
    if not comic:
        logger.warn("Error fetching comic. ID for : " + comicid)
        if dbcomic is None:
            newValueDict = {"ComicName": "Fetch failed, try refreshing. (%s)" % (comicid), "Status": "Active"}
        else:
            newValueDict = {"Status": "Active"}
        myDB.upsert("comics", newValueDict, controlValueDict)
        return

    if comic["ComicName"].startswith("The "):
        sortname = comic["ComicName"][4:]
    else:
        sortname = comic["ComicName"]

    logger.info(u"Now adding/updating: " + comic["ComicName"])
    # --Now that we know ComicName, let's try some scraping
    # --Start
    # gcd will return issue details (most importantly publishing date)
    if mismatch == "no" or mismatch is None:
        gcdinfo = parseit.GCDScraper(comic["ComicName"], comic["ComicYear"], comic["ComicIssues"], comicid)
        mismatch_com = "no"
        if gcdinfo == "No Match":
            updater.no_searchresults(comicid)
            nomatch = "true"
            logger.info(
                u"There was an error when trying to add " + comic["ComicName"] + " (" + comic["ComicYear"] + ")"
            )
            return nomatch
        else:
            mismatch_com = "yes"
            # print ("gcdinfo:" + str(gcdinfo))

    elif mismatch == "yes":
        CV_EXcomicid = myDB.action("SELECT * from exceptions WHERE ComicID=?", [comicid]).fetchone()
        if CV_EXcomicid["variloop"] is None:
            pass
        else:
            vari_loop = CV_EXcomicid["variloop"]
            NewComicID = CV_EXcomicid["NewComicID"]
            gcomicid = CV_EXcomicid["GComicID"]
            resultURL = "/series/" + str(NewComicID) + "/"
            # print ("variloop" + str(CV_EXcomicid['variloop']))
            # if vari_loop == '99':
            gcdinfo = parseit.GCDdetails(
                comseries=None,
                resultURL=resultURL,
                vari_loop=0,
                ComicID=comicid,
                TotalIssues=0,
                issvariation="no",
                resultPublished=None,
            )

    logger.info(u"Sucessfully retrieved details for " + comic["ComicName"])
    # print ("Series Published" + parseit.resultPublished)

    # comic book location on machine
    # setup default location here

    if comlocation is None:
        if ":" in comic["ComicName"] or "/" in comic["ComicName"] or "," in comic["ComicName"]:
            comicdir = comic["ComicName"]
            if ":" in comicdir:
                comicdir = comicdir.replace(":", "")
            if "/" in comicdir:
                comicdir = comicdir.replace("/", "-")
            if "," in comicdir:
                comicdir = comicdir.replace(",", "")
        else:
            comicdir = comic["ComicName"]

        series = comicdir
        publisher = comic["ComicPublisher"]
        year = comic["ComicYear"]

        # do work to generate folder path

        values = {"$Series": series, "$Publisher": publisher, "$Year": year}

        # print mylar.FOLDER_FORMAT
        # print 'working dir:'
        # print helpers.replace_all(mylar.FOLDER_FORMAT, values)

        if mylar.FOLDER_FORMAT == "":
            comlocation = mylar.DESTINATION_DIR + "/" + comicdir + " (" + comic["ComicYear"] + ")"
        else:
            comlocation = mylar.DESTINATION_DIR + "/" + helpers.replace_all(mylar.FOLDER_FORMAT, values)

        # comlocation = mylar.DESTINATION_DIR + "/" + comicdir + " (" + comic['ComicYear'] + ")"
        if mylar.DESTINATION_DIR == "":
            logger.error(u"There is no general directory specified - please specify in Config/Post-Processing.")
            return
        if mylar.REPLACE_SPACES:
            # mylar.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
            comlocation = comlocation.replace(" ", mylar.REPLACE_CHAR)
        # if it doesn't exist - create it (otherwise will bugger up later on)
        if os.path.isdir(str(comlocation)):
            logger.info(u"Directory (" + str(comlocation) + ") already exists! Continuing...")
        else:
            # print ("Directory doesn't exist!")
            try:
                os.makedirs(str(comlocation))
                logger.info(u"Directory successfully created at: " + str(comlocation))
            except OSError:
                logger.error(u"Could not create comicdir : " + str(comlocation))

    # try to account for CV not updating new issues as fast as GCD
    # seems CV doesn't update total counts
    # comicIssues = gcdinfo['totalissues']
    if gcdinfo["gcdvariation"] == "cv":
        comicIssues = str(int(comic["ComicIssues"]) + 1)
    else:
        comicIssues = comic["ComicIssues"]

    # let's download the image...
    if os.path.exists(mylar.CACHE_DIR):
        pass
    else:
        # let's make the dir.
        try:
            os.makedirs(str(mylar.CACHE_DIR))
            logger.info(u"Cache Directory successfully created at: " + str(mylar.CACHE_DIR))

        except OSError:
            logger.error("Could not create cache dir. Check permissions of cache dir: " + str(mylar.CACHE_DIR))

    coverfile = mylar.CACHE_DIR + "/" + str(comicid) + ".jpg"

    # try:
    urllib.urlretrieve(str(comic["ComicImage"]), str(coverfile))
    try:
        with open(str(coverfile)) as f:
            ComicImage = "cache/" + str(comicid) + ".jpg"
            logger.info(u"Sucessfully retrieved cover for " + str(comic["ComicName"]))
    except IOError as e:
        logger.error(u"Unable to save cover locally at this time.")

    controlValueDict = {"ComicID": comicid}
    newValueDict = {
        "ComicName": comic["ComicName"],
        "ComicSortName": sortname,
        "ComicYear": comic["ComicYear"],
        "ComicImage": ComicImage,
        "Total": comicIssues,
        "ComicLocation": comlocation,
        "ComicPublisher": comic["ComicPublisher"],
        "ComicPublished": gcdinfo["resultPublished"],
        "DateAdded": helpers.today(),
        "Status": "Loading",
    }

    myDB.upsert("comics", newValueDict, controlValueDict)

    issued = cv.getComic(comicid, "issue")
    logger.info(u"Sucessfully retrieved issue details for " + comic["ComicName"])
    n = 0
    iscnt = int(comicIssues)
    issid = []
    issnum = []
    issname = []
    issdate = []
    int_issnum = []
    # let's start issue #'s at 0 -- thanks to DC for the new 52 reboot! :)
    latestiss = "0"
    latestdate = "0000-00-00"
    # print ("total issues:" + str(iscnt))
    # ---removed NEW code here---
    logger.info(u"Now adding/updating issues for " + comic["ComicName"])

    # file check to see if issue exists
    logger.info(u"Checking directory for existing issues.")
    # fc = filechecker.listFiles(dir=comlocation, watchcomic=comic['ComicName'])
    # havefiles = 0

    # fccnt = int(fc['comiccount'])
    # logger.info(u"Found " + str(fccnt) + "/" + str(iscnt) + " issues of " + comic['ComicName'] + "...verifying")
    # fcnew = []

    while n <= iscnt:
        # ---NEW.code
        try:
            firstval = issued["issuechoice"][n]
        except IndexError:
            break
        cleanname = helpers.cleanName(firstval["Issue_Name"])
        issid = str(firstval["Issue_ID"])
        issnum = str(firstval["Issue_Number"])
        issname = cleanname
        if "." in str(issnum):
            issn_st = str(issnum).find(".")
            issn_b4dec = str(issnum)[:issn_st]
            # if the length of decimal is only 1 digit, assume it's a tenth
            dec_is = str(issnum)[issn_st + 1 :]
            if len(dec_is) == 1:
                dec_nisval = int(dec_is) * 10
                iss_naftdec = str(dec_nisval)
            if len(dec_is) == 2:
                dec_nisval = int(dec_is)
                iss_naftdec = str(dec_nisval)
            iss_issue = issn_b4dec + "." + iss_naftdec
            issis = (int(issn_b4dec) * 1000) + dec_nisval
        else:
            issis = int(issnum) * 1000

        bb = 0
        while bb <= iscnt:
            try:
                gcdval = gcdinfo["gcdchoice"][bb]
            except IndexError:
                # account for gcd variation here
                if gcdinfo["gcdvariation"] == "gcd":
                    # print ("gcd-variation accounted for.")
                    issdate = "0000-00-00"
                    int_issnum = int(issis / 1000)
                break
            if "nn" in str(gcdval["GCDIssue"]):
                # no number detected - GN, TP or the like
                logger.warn(u"Non Series detected (Graphic Novel, etc) - cannot proceed at this time.")
                updater.no_searchresults(comicid)
                return
            elif "." in str(gcdval["GCDIssue"]):
                # print ("g-issue:" + str(gcdval['GCDIssue']))
                issst = str(gcdval["GCDIssue"]).find(".")
                # print ("issst:" + str(issst))
                issb4dec = str(gcdval["GCDIssue"])[:issst]
                # print ("issb4dec:" + str(issb4dec))
                # if the length of decimal is only 1 digit, assume it's a tenth
                decis = str(gcdval["GCDIssue"])[issst + 1 :]
                # print ("decis:" + str(decis))
                if len(decis) == 1:
                    decisval = int(decis) * 10
                    issaftdec = str(decisval)
                if len(decis) == 2:
                    decisval = int(decis)
                    issaftdec = str(decisval)
                gcd_issue = issb4dec + "." + issaftdec
                # print ("gcd_issue:" + str(gcd_issue))
                gcdis = (int(issb4dec) * 1000) + decisval
            else:
                gcdis = int(str(gcdval["GCDIssue"])) * 1000
            if gcdis == issis:
                issdate = str(gcdval["GCDDate"])
                int_issnum = int(gcdis / 1000)
                # get the latest issue / date using the date.
                if gcdval["GCDDate"] > latestdate:
                    latestiss = str(issnum)
                    latestdate = str(gcdval["GCDDate"])
                    break
                # bb = iscnt
            bb += 1
        # print("(" + str(n) + ") IssueID: " + str(issid) + " IssueNo: " + str(issnum) + " Date" + str(issdate))
        # ---END.NEW.

        # check if the issue already exists
        iss_exists = myDB.action("SELECT * from issues WHERE IssueID=?", [issid]).fetchone()

        # Only change the status & add DateAdded if the issue is already in the database
        if iss_exists is None:
            newValueDict["DateAdded"] = helpers.today()

        controlValueDict = {"IssueID": issid}
        newValueDict = {
            "ComicID": comicid,
            "ComicName": comic["ComicName"],
            "IssueName": issname,
            "Issue_Number": issnum,
            "IssueDate": issdate,
            "Int_IssueNumber": int_issnum,
        }
        if mylar.AUTOWANT_ALL:
            newValueDict["Status"] = "Wanted"
            # elif release_dict['releasedate'] > helpers.today() and mylar.AUTOWANT_UPCOMING:
            #    newValueDict['Status'] = "Wanted"
        else:
            newValueDict["Status"] = "Skipped"

        if iss_exists:
            # print ("Existing status : " + str(iss_exists['Status']))
            newValueDict["Status"] = iss_exists["Status"]

        myDB.upsert("issues", newValueDict, controlValueDict)
        n += 1

    #        logger.debug(u"Updating comic cache for " + comic['ComicName'])
    #        cache.getThumb(ComicID=issue['issueid'])

    #        logger.debug(u"Updating cache for: " + comic['ComicName'])
    #        cache.getThumb(ComicIDcomicid)

    # check for existing files...
    updater.forceRescan(comicid)

    controlValueStat = {"ComicID": comicid}
    newValueStat = {
        "Status": "Active",
        "LatestIssue": latestiss,
        "LatestDate": latestdate,
        "LastUpdated": helpers.now(),
    }

    myDB.upsert("comics", newValueStat, controlValueStat)

    logger.info(u"Updating complete for: " + comic["ComicName"])

    # lets' check the pullist for anyting at this time as well since we're here.
    if mylar.AUTOWANT_UPCOMING:
        logger.info(u"Checking this week's pullist for new issues of " + str(comic["ComicName"]))
        updater.newpullcheck()

    # here we grab issues that have been marked as wanted above...

    results = myDB.select("SELECT * FROM issues where ComicID=? AND Status='Wanted'", [comicid])
    if results:
        logger.info(u"Attempting to grab wanted issues for : " + comic["ComicName"])

        for result in results:
            foundNZB = "none"
            if (mylar.NZBSU or mylar.DOGNZB or mylar.EXPERIMENTAL) and (mylar.SAB_HOST):
                foundNZB = search.searchforissue(result["IssueID"])
                if foundNZB == "yes":
                    updater.foundsearch(result["ComicID"], result["IssueID"])
    else:
        logger.info(u"No issues marked as wanted for " + comic["ComicName"])

    logger.info(u"Finished grabbing what I could.")
Exemple #16
0
def initialize():

    with INIT_LOCK:
    
        global __INITIALIZED__, FULL_PATH, PROG_DIR, VERBOSE, DAEMON, DATA_DIR, CONFIG_FILE, CFG, CONFIG_VERSION, LOG_DIR, CACHE_DIR, LOGVERBOSE, \
                HTTP_PORT, HTTP_HOST, HTTP_USERNAME, HTTP_PASSWORD, HTTP_ROOT, LAUNCH_BROWSER, GIT_PATH, \
                CURRENT_VERSION, LATEST_VERSION, CHECK_GITHUB, CHECK_GITHUB_ON_STARTUP, CHECK_GITHUB_INTERVAL, USER_AGENT, MUSIC_DIR, DESTINATION_DIR, \
                DOWNLOAD_DIR, USENET_RETENTION, SEARCH_INTERVAL, NZB_STARTUP_SEARCH, INTERFACE, AUTOWANT_ALL, AUTOWANT_UPCOMING, ZERO_LEVEL, ZERO_LEVEL_N, COMIC_COVER_LOCAL, \
                LIBRARYSCAN, LIBRARYSCAN_INTERVAL, DOWNLOAD_SCAN_INTERVAL, USE_SABNZBD, SAB_HOST, SAB_USERNAME, SAB_PASSWORD, SAB_APIKEY, SAB_CATEGORY, SAB_PRIORITY, SAB_DIRECTORY, BLACKHOLE, BLACKHOLE_DIR, ADD_COMICS, COMIC_DIR, IMP_MOVE, IMP_RENAME, IMP_METADATA, \
                USE_NZBGET, NZBGET_HOST, NZBGET_PORT, NZBGET_USERNAME, NZBGET_PASSWORD, NZBGET_CATEGORY, NZBGET_PRIORITY, NZBSU, NZBSU_APIKEY, DOGNZB, DOGNZB_APIKEY, NZBX,\
                NEWZNAB, NEWZNAB_HOST, NEWZNAB_APIKEY, NEWZNAB_ENABLED, EXTRA_NEWZNABS,\
                RAW, RAW_PROVIDER, RAW_USERNAME, RAW_PASSWORD, RAW_GROUPS, EXPERIMENTAL, \
                PROWL_ENABLED, PROWL_PRIORITY, PROWL_KEYS, PROWL_ONSNATCH, NMA_ENABLED, NMA_APIKEY, NMA_PRIORITY, NMA_ONSNATCH, \
                PREFERRED_QUALITY, MOVE_FILES, RENAME_FILES, LOWERCASE_FILENAMES, USE_MINSIZE, MINSIZE, USE_MAXSIZE, MAXSIZE, CORRECT_METADATA, FOLDER_FORMAT, FILE_FORMAT, REPLACE_CHAR, REPLACE_SPACES, ADD_TO_CSV, CVINFO, LOG_LEVEL, POST_PROCESSING, \
                COMIC_LOCATION, QUAL_ALTVERS, QUAL_SCANNER, QUAL_TYPE, QUAL_QUALITY, ENABLE_EXTRA_SCRIPTS, EXTRA_SCRIPTS, ENABLE_PRE_SCRIPTS, PRE_SCRIPTS, PULLNEW, COUNT_ISSUES, COUNT_HAVES, COUNT_COMICS
                
        if __INITIALIZED__:
            return False

        # Make sure all the config sections exist
        CheckSection('General')
        CheckSection('SABnzbd')
        CheckSection('NZBGet')
        CheckSection('NZBsu')
        CheckSection('DOGnzb')
        CheckSection('Raw')
        CheckSection('Experimental')        
        CheckSection('Newznab')
        # Set global variables based on config file or use defaults
        try:
            HTTP_PORT = check_setting_int(CFG, 'General', 'http_port', 8090)
        except:
            HTTP_PORT = 8090
            
        if HTTP_PORT < 21 or HTTP_PORT > 65535:
            HTTP_PORT = 8090
            
#        CONFIG_VERSION = check_setting_str(CFG, 'General', 'config_version', '')
        HTTP_HOST = check_setting_str(CFG, 'General', 'http_host', '0.0.0.0')
        HTTP_USERNAME = check_setting_str(CFG, 'General', 'http_username', '')
        HTTP_PASSWORD = check_setting_str(CFG, 'General', 'http_password', '')
        HTTP_ROOT = check_setting_str(CFG, 'General', 'http_root', '/')
        LAUNCH_BROWSER = bool(check_setting_int(CFG, 'General', 'launch_browser', 1))
        LOGVERBOSE = bool(check_setting_int(CFG, 'General', 'logverbose', 1))
        GIT_PATH = check_setting_str(CFG, 'General', 'git_path', '')
        LOG_DIR = check_setting_str(CFG, 'General', 'log_dir', '')
        if not CACHE_DIR:
            CACHE_DIR = check_setting_str(CFG, 'General', 'cache_dir', '')
        
        CHECK_GITHUB = bool(check_setting_int(CFG, 'General', 'check_github', 1))
        CHECK_GITHUB_ON_STARTUP = bool(check_setting_int(CFG, 'General', 'check_github_on_startup', 1))
        CHECK_GITHUB_INTERVAL = check_setting_int(CFG, 'General', 'check_github_interval', 360)
        
        DESTINATION_DIR = check_setting_str(CFG, 'General', 'destination_dir', '')
        USENET_RETENTION = check_setting_int(CFG, 'General', 'usenet_retention', '1500')
        
        SEARCH_INTERVAL = check_setting_int(CFG, 'General', 'search_interval', 360)
        NZB_STARTUP_SEARCH = bool(check_setting_int(CFG, 'General', 'nzb_startup_search', 0))
        LIBRARYSCAN = bool(check_setting_int(CFG, 'General', 'libraryscan', 1))
        LIBRARYSCAN_INTERVAL = check_setting_int(CFG, 'General', 'libraryscan_interval', 300)
        ADD_COMICS = bool(check_setting_int(CFG, 'General', 'add_comics', 0))
        COMIC_DIR = check_setting_str(CFG, 'General', 'comic_dir', '')
        IMP_MOVE = bool(check_setting_int(CFG, 'General', 'imp_move', 0))
        IMP_RENAME = bool(check_setting_int(CFG, 'General', 'imp_rename', 0))
        IMP_METADATA = bool(check_setting_int(CFG, 'General', 'imp_metadata', 0))
        DOWNLOAD_SCAN_INTERVAL = check_setting_int(CFG, 'General', 'download_scan_interval', 5)
        INTERFACE = check_setting_str(CFG, 'General', 'interface', 'default')
        AUTOWANT_ALL = bool(check_setting_int(CFG, 'General', 'autowant_all', 0))
        AUTOWANT_UPCOMING = bool(check_setting_int(CFG, 'General', 'autowant_upcoming', 1))
        COMIC_COVER_LOCAL = bool(check_setting_int(CFG, 'General', 'comic_cover_local', 0))
        PREFERRED_QUALITY = bool(check_setting_int(CFG, 'General', 'preferred_quality', 0))
        CORRECT_METADATA = bool(check_setting_int(CFG, 'General', 'correct_metadata', 0))
        MOVE_FILES = bool(check_setting_int(CFG, 'General', 'move_files', 0))
        RENAME_FILES = bool(check_setting_int(CFG, 'General', 'rename_files', 0))
        FOLDER_FORMAT = check_setting_str(CFG, 'General', 'folder_format', '$Series-($Year)')
        FILE_FORMAT = check_setting_str(CFG, 'General', 'file_format', '$Series $Issue ($Year)')
        BLACKHOLE = bool(check_setting_int(CFG, 'General', 'blackhole', 0))
        BLACKHOLE_DIR = check_setting_str(CFG, 'General', 'blackhole_dir', '')
        REPLACE_SPACES = bool(check_setting_int(CFG, 'General', 'replace_spaces', 0))
        REPLACE_CHAR = check_setting_str(CFG, 'General', 'replace_char', '')
        ZERO_LEVEL = bool(check_setting_int(CFG, 'General', 'zero_level', 0))
        ZERO_LEVEL_N = check_setting_str(CFG, 'General', 'zero_level_n', '')
        LOWERCASE_FILENAMES = bool(check_setting_int(CFG, 'General', 'lowercase_filenames', 0))

        PROWL_ENABLED = bool(check_setting_int(CFG, 'Prowl', 'prowl_enabled', 0))
        PROWL_KEYS = check_setting_str(CFG, 'Prowl', 'prowl_keys', '')
        PROWL_ONSNATCH = bool(check_setting_int(CFG, 'Prowl', 'prowl_onsnatch', 0))
        PROWL_PRIORITY = check_setting_int(CFG, 'Prowl', 'prowl_priority', 0)

        NMA_ENABLED = bool(check_setting_int(CFG, 'NMA', 'nma_enabled', 0))
        NMA_APIKEY = check_setting_str(CFG, 'NMA', 'nma_apikey', '')
        NMA_PRIORITY = check_setting_int(CFG, 'NMA', 'nma_priority', 0)
        NMA_ONSNATCH = bool(check_setting_int(CFG, 'NMA', 'nma_onsnatch', 0))

        USE_MINSIZE = bool(check_setting_int(CFG, 'General', 'use_minsize', 0))
        MINSIZE = check_setting_str(CFG, 'General', 'minsize', '')
        USE_MAXSIZE = bool(check_setting_int(CFG, 'General', 'use_maxsize', 0))
        MAXSIZE = check_setting_str(CFG, 'General', 'maxsize', '')
        ADD_TO_CSV = bool(check_setting_int(CFG, 'General', 'add_to_csv', 1))
        CVINFO = bool(check_setting_int(CFG, 'General', 'cvinfo', 0))
        LOG_LEVEL = check_setting_str(CFG, 'General', 'log_level', '')
        ENABLE_EXTRA_SCRIPTS = bool(check_setting_int(CFG, 'General', 'enable_extra_scripts', 0))
        EXTRA_SCRIPTS = check_setting_str(CFG, 'General', 'extra_scripts', '')

        ENABLE_PRE_SCRIPTS = bool(check_setting_int(CFG, 'General', 'enable_pre_scripts', 0))
        PRE_SCRIPTS = check_setting_str(CFG, 'General', 'pre_scripts', '')
        POST_PROCESSING = bool(check_setting_int(CFG, 'General', 'post_processing', 1))

        USE_SABNZBD = bool(check_setting_int(CFG, 'SABnzbd', 'use_sabnzbd', 0))
        SAB_HOST = check_setting_str(CFG, 'SABnzbd', 'sab_host', '')
        SAB_USERNAME = check_setting_str(CFG, 'SABnzbd', 'sab_username', '')
        SAB_PASSWORD = check_setting_str(CFG, 'SABnzbd', 'sab_password', '')
        SAB_APIKEY = check_setting_str(CFG, 'SABnzbd', 'sab_apikey', '')
        SAB_CATEGORY = check_setting_str(CFG, 'SABnzbd', 'sab_category', '')
        SAB_DIRECTORY = check_setting_str(CFG, 'SABnzbd', 'sab_directory', '')
        SAB_PRIORITY = check_setting_str(CFG, 'SABnzbd', 'sab_priority', '')
        if SAB_PRIORITY.isdigit():
            if SAB_PRIORITY == "0": SAB_PRIORITY = "Default"
            elif SAB_PRIORITY == "1": SAB_PRIORITY = "Low"
            elif SAB_PRIORITY == "2": SAB_PRIORITY = "Normal"
            elif SAB_PRIORITY == "3": SAB_PRIORITY = "High"
            elif SAB_PRIORITY == "4": SAB_PRIORITY = "Paused"
            else: SAB_PRIORITY = "Default"

        USE_NZBGET = bool(check_setting_int(CFG, 'NZBGet', 'use_nzbget', 0))
        NZBGET_HOST = check_setting_str(CFG, 'NZBGet', 'nzbget_host', '')
        NZBGET_PORT = check_setting_str(CFG, 'NZBGet', 'nzbget_port', '')
        NZBGET_USERNAME = check_setting_str(CFG, 'NZBGet', 'nzbget_username', '')
        NZBGET_PASSWORD = check_setting_str(CFG, 'NZBGet', 'nzbget_password', '')
        NZBGET_CATEGORY = check_setting_str(CFG, 'NZBGet', 'nzbget_category', '')
        NZBGET_PRIORITY = check_setting_str(CFG, 'NZBGet', 'nzbget_priority', '')

        NZBSU = bool(check_setting_int(CFG, 'NZBsu', 'nzbsu', 0))
        NZBSU_APIKEY = check_setting_str(CFG, 'NZBsu', 'nzbsu_apikey', '')

        DOGNZB = bool(check_setting_int(CFG, 'DOGnzb', 'dognzb', 0))
        DOGNZB_APIKEY = check_setting_str(CFG, 'DOGnzb', 'dognzb_apikey', '')

        NZBX = bool(check_setting_int(CFG, 'nzbx', 'nzbx', 0))

        RAW = bool(check_setting_int(CFG, 'Raw', 'raw', 0))
        RAW_PROVIDER = check_setting_str(CFG, 'Raw', 'raw_provider', '')
        RAW_USERNAME = check_setting_str(CFG, 'Raw', 'raw_username', '')
        RAW_PASSWORD  = check_setting_str(CFG, 'Raw', 'raw_password', '')
        RAW_GROUPS = check_setting_str(CFG, 'Raw', 'raw_groups', '')

        EXPERIMENTAL = bool(check_setting_int(CFG, 'Experimental', 'experimental', 0))

        NEWZNAB = bool(check_setting_int(CFG, 'Newznab', 'newznab', 0))
        NEWZNAB_HOST = check_setting_str(CFG, 'Newznab', 'newznab_host', '')
        NEWZNAB_APIKEY = check_setting_str(CFG, 'Newznab', 'newznab_apikey', '')
        NEWZNAB_ENABLED = bool(check_setting_int(CFG, 'Newznab', 'newznab_enabled', 1))

        # Need to pack the extra newznabs back into a list of tuples
        flattened_newznabs = check_setting_str(CFG, 'Newznab', 'extra_newznabs', [], log=False)
        EXTRA_NEWZNABS = list(itertools.izip(*[itertools.islice(flattened_newznabs, i, None, 3) for i in range(3)]))

        
        # update folder formats in the config & bump up config version
        if CONFIG_VERSION == '0':
            from mylar.helpers import replace_all
            file_values = { 'issue':  'Issue', 'title': 'Title', 'series' : 'Series', 'year' : 'Year' }
            folder_values = { 'series' : 'Series', 'publisher':'Publisher', 'year' : 'Year', 'first' : 'First', 'lowerfirst' : 'first' }
            FILE_FORMAT = replace_all(FILE_FORMAT, file_values)
            FOLDER_FORMAT = replace_all(FOLDER_FORMAT, folder_values)
            
            CONFIG_VERSION = '1'
            
        if CONFIG_VERSION == '1':

            from mylar.helpers import replace_all

            file_values = { 'Issue':        '$Issue',
                            'Title':        '$Title',
                            'Series':       '$Series',
                            'Year':         '$Year',
                            'title':        '$title',
                            'series':       '$series',
                            'year':         '$year'
                            }
            folder_values = {   'Series':       '$Series',
                                'Publisher':    '$Publisher',
                                'Year':         '$Year',
                                'First':        '$First',
                                'series':       '$series',
                                'publisher':    '$publisher',
                                'year':         '$year',
                                'first':        '$first'
                            }   
            FILE_FORMAT = replace_all(FILE_FORMAT, file_values)
            FOLDER_FORMAT = replace_all(FOLDER_FORMAT, folder_values)
            
            CONFIG_VERSION = '2'

        if 'http://' not in SAB_HOST[:7] and 'https://' not in SAB_HOST[:8]:
            SAB_HOST = 'http://' + SAB_HOST
            #print ("SAB_HOST:" + SAB_HOST)

        if not LOG_DIR:
            LOG_DIR = os.path.join(DATA_DIR, 'logs')

        if not os.path.exists(LOG_DIR):
            try:
                os.makedirs(LOG_DIR)
            except OSError:
                if VERBOSE:
                    print 'Unable to create the log directory. Logging to screen only.'

        # Start the logger, silence console logging if we need to
        logger.mylar_log.initLogger(verbose=VERBOSE)

        # Put the cache dir in the data dir for now
        if not CACHE_DIR:
            CACHE_DIR = os.path.join(str(DATA_DIR), 'cache')
        logger.info("cache set to : " + str(CACHE_DIR))
        if not os.path.exists(CACHE_DIR):
            try:
               os.makedirs(CACHE_DIR)
            except OSError:
                logger.error('Could not create cache dir. Check permissions of datadir: ' + DATA_DIR)

        # Sanity check for search interval. Set it to at least 6 hours
        if SEARCH_INTERVAL < 360:
            logger.info("Search interval too low. Resetting to 6 hour minimum")
            SEARCH_INTERVAL = 360


        # Initialize the database
        logger.info('Checking to see if the database has all tables....')
        try:
            dbcheck()
        except Exception, e:
            logger.error("Can't connect to the database: %s" % e)

        # Get the currently installed version - returns None, 'win32' or the git hash
        # Also sets INSTALL_TYPE variable to 'win', 'git' or 'source'
        CURRENT_VERSION = versioncheck.getVersion()
        if CURRENT_VERSION is not None:
            hash = CURRENT_VERSION[:7]
        else:
            hash = "unknown"

        if version.MYLAR_VERSION == 'master':
            vers = 'M'
        else:
           vers = 'D'

        USER_AGENT = 'Mylar/'+str(hash)+'('+vers+') +http://www.github.com/evilhero/mylar/'

        # Check for new versions
        if CHECK_GITHUB_ON_STARTUP:
            try:
                LATEST_VERSION = versioncheck.checkGithub()
            except:
                LATEST_VERSION = CURRENT_VERSION
        else:
            LATEST_VERSION = CURRENT_VERSION

        __INITIALIZED__ = True
        return True
Exemple #17
0
    def Process(self):
            self._log("nzb name: " + str(self.nzb_name), logger.DEBUG)
            self._log("nzb folder: " + str(self.nzb_folder), logger.DEBUG)
            logger.fdebug("nzb name: " + str(self.nzb_name))
            logger.fdebug("nzb folder: " + str(self.nzb_folder))
            if mylar.USE_SABNZBD==0:
                logger.fdebug("Not using SABNzbd")
            else:
                # if the SAB Directory option is enabled, let's use that folder name and append the jobname.
                if mylar.SAB_DIRECTORY is not None and mylar.SAB_DIRECTORY is not 'None' and len(mylar.SAB_DIRECTORY) > 4:
                    self.nzb_folder = os.path.join(mylar.SAB_DIRECTORY, self.nzb_name).encode(mylar.SYS_ENCODING)
    
                #lookup nzb_name in nzblog table to get issueid
    
                #query SAB to find out if Replace Spaces enabled / not as well as Replace Decimals
                #http://localhost:8080/sabnzbd/api?mode=set_config&section=misc&keyword=dirscan_speed&value=5
                querysab = str(mylar.SAB_HOST) + "/api?mode=get_config&section=misc&output=xml&apikey=" + str(mylar.SAB_APIKEY)
                #logger.info("querysab_string:" + str(querysab))
                file = urllib2.urlopen(querysab)
                data = file.read()
                file.close()
                dom = parseString(data)
    
                sabreps = dom.getElementsByTagName('replace_spaces')[0].firstChild.wholeText
                sabrepd = dom.getElementsByTagName('replace_dots')[0].firstChild.wholeText
                logger.fdebug("SAB Replace Spaces: " + str(sabreps))
                logger.fdebug("SAB Replace Dots: " + str(sabrepd))
            if mylar.USE_NZBGET==1:
                logger.fdebug("Using NZBGET")
                logger.fdebug("NZB name as passed from NZBGet: " + self.nzb_name)
            myDB = db.DBConnection()

            nzbname = self.nzb_name
            #remove extensions from nzb_name if they somehow got through (Experimental most likely)
            extensions = ('.cbr', '.cbz')

            if nzbname.lower().endswith(extensions):
                fd, ext = os.path.splitext(nzbname)
                self._log("Removed extension from nzb: " + ext, logger.DEBUG)
                nzbname = re.sub(str(ext), '', str(nzbname))

            #replace spaces
            nzbname = re.sub(' ', '.', str(nzbname))
            nzbname = re.sub('[\,\:\?]', '', str(nzbname))
            nzbname = re.sub('[\&]', 'and', str(nzbname))

            logger.fdebug("After conversions, nzbname is : " + str(nzbname))
#            if mylar.USE_NZBGET==1:
#                nzbname=self.nzb_name
            self._log("nzbname: " + str(nzbname), logger.DEBUG)

            nzbiss = myDB.action("SELECT * from nzblog WHERE nzbname=?", [nzbname]).fetchone()

            if nzbiss is None:
                self._log("Failure - could not initially locate nzbfile in my database to rename.", logger.DEBUG)
                logger.fdebug("Failure - could not locate nzbfile initially.")
                # if failed on spaces, change it all to decimals and try again.
                nzbname = re.sub('_', '.', str(nzbname))
                self._log("trying again with this nzbname: " + str(nzbname), logger.DEBUG)
                logger.fdebug("trying again with nzbname of : " + str(nzbname))
                nzbiss = myDB.action("SELECT * from nzblog WHERE nzbname=?", [nzbname]).fetchone()
                if nzbiss is None:
                    logger.error(u"Unable to locate downloaded file to rename. PostProcessing aborted.")
                    return
                else:
                    self._log("I corrected and found the nzb as : " + str(nzbname))
                    logger.fdebug("auto-corrected and found the nzb as : " + str(nzbname))
                    issueid = nzbiss['IssueID']
            else: 
                issueid = nzbiss['IssueID']
                print "issueid:" + str(issueid)
                #use issueid to get publisher, series, year, issue number
            issuenzb = myDB.action("SELECT * from issues WHERE issueid=?", [issueid]).fetchone()
            if helpers.is_number(issueid):
                sandwich = int(issuenzb['IssueID'])
            else:
                #if it's non-numeric, it contains a 'G' at the beginning indicating it's a multi-volume
                #using GCD data. Set sandwich to 1 so it will bypass and continue post-processing.
                sandwich = 1
            if issuenzb is None or sandwich >= 900000:
                # this has no issueID, therefore it's a one-off or a manual post-proc.
                # At this point, let's just drop it into the Comic Location folder and forget about it..
                self._log("One-off mode enabled for Post-Processing. All I'm doing is moving the file untouched into the Grab-bag directory.", logger.DEBUG)
                logger.info("One-off mode enabled for Post-Processing. Will move into Grab-bag directory.")
                self._log("Grab-Bag Directory set to : " + mylar.GRABBAG_DIR, logger.DEBUG)
                for root, dirnames, filenames in os.walk(self.nzb_folder):
                    for filename in filenames:
                        if filename.lower().endswith(extensions):
                            ofilename = filename
                            path, ext = os.path.splitext(ofilename)

                if mylar.GRABBAG_DIR:
                    grdst = mylar.GRABBAG_DIR
                else:
                    grdst = mylar.DESTINATION_DIR

                grab_dst = os.path.join(grdst, ofilename)
                self._log("Destination Path : " + grab_dst, logger.DEBUG)
                grab_src = os.path.join(self.nzb_folder, ofilename)
                self._log("Source Path : " + grab_src, logger.DEBUG)
                logger.info("Moving " + str(ofilename) + " into grab-bag directory : " + str(grdst))

                try:
                    shutil.move(grab_src, grab_dst)
                except (OSError, IOError):
                    self.log("Failed to move directory - check directories and manually re-run.", logger.DEBUG)
                    logger.debug("Failed to move directory - check directories and manually re-run.")
                    return self.log
                #tidyup old path
                try:
                    shutil.rmtree(self.nzb_folder)
                except (OSError, IOError):
                    self._log("Failed to remove temporary directory.", logger.DEBUG)
                    logger.debug("Failed to remove temporary directory - check directory and manually re-run.")
                    return self.log

                logger.debug("Removed temporary directory : " + str(self.nzb_folder))
                self._log("Removed temporary directory : " + self.nzb_folder, logger.DEBUG)
                #delete entry from nzblog table
                myDB.action('DELETE from nzblog WHERE issueid=?', [issueid])
                return self.log

            comicid = issuenzb['ComicID']
            issuenumOG = issuenzb['Issue_Number']
            #issueno = str(issuenum).split('.')[0]
            #new CV API - removed all decimals...here we go AGAIN!
            issuenum = issuenumOG
            issue_except = 'None'
            if 'au' in issuenum.lower():
                issuenum = re.sub("[^0-9]", "", issuenum)
                issue_except = ' AU'
            if '.' in issuenum:
                iss_find = issuenum.find('.')
                iss_b4dec = issuenum[:iss_find]
                iss_decval = issuenum[iss_find+1:]
                if int(iss_decval) == 0:
                    iss = iss_b4dec
                    issdec = int(iss_decval)
                    issueno = str(iss)
                    self._log("Issue Number: " + str(issueno), logger.DEBUG)
                    logger.fdebug("Issue Number: " + str(issueno))
                else:
                    if len(iss_decval) == 1:
                        iss = iss_b4dec + "." + iss_decval
                        issdec = int(iss_decval) * 10
                    else:
                        iss = iss_b4dec + "." + iss_decval.rstrip('0')
                        issdec = int(iss_decval.rstrip('0')) * 10
                    issueno = iss_b4dec
                    self._log("Issue Number: " + str(iss), logger.DEBUG)
                    logger.fdebug("Issue Number: " + str(iss))
            else:
                iss = issuenum
                issueno = str(iss)
            # issue zero-suppression here
            if mylar.ZERO_LEVEL == "0": 
                zeroadd = ""
            else:
                if mylar.ZERO_LEVEL_N  == "none": zeroadd = ""
                elif mylar.ZERO_LEVEL_N == "0x": zeroadd = "0"
                elif mylar.ZERO_LEVEL_N == "00x": zeroadd = "00"

            logger.fdebug("Zero Suppression set to : " + str(mylar.ZERO_LEVEL_N))

            if str(len(issueno)) > 1:
                if int(issueno) < 10:
                    self._log("issue detected less than 10", logger.DEBUG)
                    if '.' in iss:
                        if int(iss_decval) > 0:
                            issueno = str(iss)
                            prettycomiss = str(zeroadd) + str(iss)
                        else:
                            prettycomiss = str(zeroadd) + str(int(issueno))
                    else:
                        prettycomiss = str(zeroadd) + str(iss)
                    if issue_except != 'None': 
                        prettycomiss = str(prettycomiss) + issue_except
                    self._log("Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ". Issue will be set as : " + str(prettycomiss), logger.DEBUG)
                elif int(issueno) >= 10 and int(issueno) < 100:
                    self._log("issue detected greater than 10, but less than 100", logger.DEBUG)
                    if mylar.ZERO_LEVEL_N == "none":
                        zeroadd = ""
                    else:
                        zeroadd = "0"
                    if '.' in iss:
                        if int(iss_decval) > 0:
                            issueno = str(iss)
                            prettycomiss = str(zeroadd) + str(iss)
                        else:
                           prettycomiss = str(zeroadd) + str(int(issueno))
                    else:
                        prettycomiss = str(zeroadd) + str(iss)
                    if issue_except != 'None':
                        prettycomiss = str(prettycomiss) + issue_except
                    self._log("Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ".Issue will be set as : " + str(prettycomiss), logger.DEBUG)
                else:
                    self._log("issue detected greater than 100", logger.DEBUG)
                    if '.' in iss:
                        if int(iss_decval) > 0:
                            issueno = str(iss)
                    prettycomiss = str(issueno)
                    if issue_except != 'None':
                        prettycomiss = str(prettycomiss) + issue_except
                    self._log("Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ". Issue will be set as : " + str(prettycomiss), logger.DEBUG)
            else:
                prettycomiss = str(issueno)
                self._log("issue length error - cannot determine length. Defaulting to None:  " + str(prettycomiss), logger.DEBUG)

            logger.fdebug("Pretty Comic Issue is : " + str(prettycomiss))
            issueyear = issuenzb['IssueDate'][:4]
            self._log("Issue Year: " + str(issueyear), logger.DEBUG)
            logger.fdebug("Issue Year : " + str(issueyear))
            comicnzb= myDB.action("SELECT * from comics WHERE comicid=?", [comicid]).fetchone()
            publisher = comicnzb['ComicPublisher']
            self._log("Publisher: " + publisher, logger.DEBUG)
            logger.fdebug("Publisher: " + str(publisher))
            #we need to un-unicode this to make sure we can write the filenames properly for spec.chars
            series = comicnzb['ComicName'].encode('ascii', 'ignore').strip()
            self._log("Series: " + series, logger.DEBUG)
            logger.fdebug("Series: " + str(series))
            seriesyear = comicnzb['ComicYear']
            self._log("Year: " + seriesyear, logger.DEBUG)
            logger.fdebug("Year: "  + str(seriesyear))
            comlocation = comicnzb['ComicLocation']
            self._log("Comic Location: " + comlocation, logger.DEBUG)
            logger.fdebug("Comic Location: " + str(comlocation))
            comversion = comicnzb['ComicVersion']
            self._log("Comic Version: " + str(comversion), logger.DEBUG)
            logger.fdebug("Comic Version: " + str(comversion))
            if comversion is None:
                comversion = 'None'
            #if comversion is None, remove it so it doesn't populate with 'None'
            if comversion == 'None':
                chunk_f_f = re.sub('\$VolumeN','',mylar.FILE_FORMAT)
                chunk_f = re.compile(r'\s+')
                chunk_file_format = chunk_f.sub(' ', chunk_f_f)
                self._log("No version # found for series - tag will not be available for renaming.", logger.DEBUG)
                logger.fdebug("No version # found for series, removing from filename")
                logger.fdebug("new format is now: " + str(chunk_file_format))
            else:
                chunk_file_format = mylar.FILE_FORMAT
            #Run Pre-script

            if mylar.ENABLE_PRE_SCRIPTS:
                nzbn = self.nzb_name #original nzb name
                nzbf = self.nzb_folder #original nzb folder
                #name, comicyear, comicid , issueid, issueyear, issue, publisher
                #create the dic and send it.
                seriesmeta = []
                seriesmetadata = {}
                seriesmeta.append({
                            'name':                 series,
                            'comicyear':            seriesyear,
                            'comicid':              comicid,
                            'issueid':              issueid,
                            'issueyear':            issueyear,
                            'issue':                issuenum,
                            'publisher':            publisher
                            })
                seriesmetadata['seriesmeta'] = seriesmeta
                self._run_pre_scripts(nzbn, nzbf, seriesmetadata )

        #rename file and move to new path
        #nfilename = series + " " + issueno + " (" + seriesyear + ")"

            file_values = {'$Series':    series,
                           '$Issue':     prettycomiss,
                           '$Year':      issueyear,
                           '$series':    series.lower(),
                           '$Publisher': publisher,
                           '$publisher': publisher.lower(),
                           '$VolumeY':   'V' + str(seriesyear),
                           '$VolumeN':   comversion
                          }

            for root, dirnames, filenames in os.walk(self.nzb_folder):
                for filename in filenames:
                    if filename.lower().endswith(extensions):
                        ofilename = filename
                        path, ext = os.path.splitext(ofilename)
            self._log("Original Filename: " + ofilename, logger.DEBUG)
            self._log("Original Extension: " + ext, logger.DEBUG)
            logger.fdebug("Original Filname: " + str(ofilename))
            logger.fdebug("Original Extension: " + str(ext))

            if mylar.FILE_FORMAT == '' or not mylar.RENAME_FILES:
                self._log("Rename Files isn't enabled...keeping original filename.", logger.DEBUG)
                logger.fdebug("Rename Files isn't enabled - keeping original filename.")
                #check if extension is in nzb_name - will screw up otherwise
                if ofilename.lower().endswith(extensions):
                    nfilename = ofilename[:-4]
                else:
                    nfilename = ofilename
            else:
                nfilename = helpers.replace_all(chunk_file_format, file_values)
                if mylar.REPLACE_SPACES:
                    #mylar.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
                    nfilename = nfilename.replace(' ', mylar.REPLACE_CHAR)
            nfilename = re.sub('[\,\:\?]', '', nfilename)
            self._log("New Filename: " + nfilename, logger.DEBUG)
            logger.fdebug("New Filename: " + str(nfilename))

            src = os.path.join(self.nzb_folder, ofilename)

            filechecker.validateAndCreateDirectory(comlocation, True)

            if mylar.LOWERCASE_FILENAMES:
                dst = (comlocation + "/" + nfilename + ext).lower()
            else:
                dst = comlocation + "/" + nfilename + ext.lower()    
            self._log("Source:" + src, logger.DEBUG)
            self._log("Destination:" +  dst, logger.DEBUG)
            logger.fdebug("Source: " + str(src))
            logger.fdebug("Destination: " + str(dst))

            os.rename(os.path.join(self.nzb_folder, str(ofilename)), os.path.join(self.nzb_folder,str(nfilename + ext)))
            src = os.path.join(self.nzb_folder, str(nfilename + ext))
            try:
                shutil.move(src, dst)
            except (OSError, IOError):
                self._log("Failed to move directory - check directories and manually re-run.", logger.DEBUG)
                self._log("Post-Processing ABORTED.", logger.DEBUG)
                return
            #tidyup old path
            try:
                shutil.rmtree(self.nzb_folder)
            except (OSError, IOError):
                self._log("Failed to remove temporary directory - check directory and manually re-run.", logger.DEBUG)
                self._log("Post-Processing ABORTED.", logger.DEBUG)
                return

            self._log("Removed temporary directory : " + str(self.nzb_folder), logger.DEBUG)
                    #delete entry from nzblog table
            myDB.action('DELETE from nzblog WHERE issueid=?', [issueid])
                    #force rescan of files
            updater.forceRescan(comicid)
            logger.info(u"Post-Processing completed for: " + series + " issue: " + str(issuenumOG) )
            self._log(u"Post Processing SUCCESSFULL! ", logger.DEBUG)

            if mylar.PROWL_ENABLED:
                pushmessage = series + '(' + issueyear + ') - issue #' + issuenumOG
                logger.info(u"Prowl request")
                prowl = notifiers.PROWL()
                prowl.notify(pushmessage,"Download and Postprocessing completed")

            if mylar.NMA_ENABLED:
                nma = notifiers.NMA()
                nma.notify(series, str(issueyear), str(issuenumOG))

            if mylar.PUSHOVER_ENABLED:
                pushmessage = series + ' (' + str(issueyear) + ') - issue #' + str(issuenumOG)
                logger.info(u"Pushover request")
                pushover = notifiers.PUSHOVER()
                pushover.notify(pushmessage, "Download and Post-Processing completed")
             
            # retrieve/create the corresponding comic objects

            if mylar.ENABLE_EXTRA_SCRIPTS:
                folderp = str(dst) #folder location after move/rename
                nzbn = self.nzb_name #original nzb name
                filen = str(nfilename + ext) #new filename
                #name, comicyear, comicid , issueid, issueyear, issue, publisher
                #create the dic and send it.
                seriesmeta = []
                seriesmetadata = {}
                seriesmeta.append({
                            'name':                 series,
                            'comicyear':            seriesyear,
                            'comicid':              comicid,
                            'issueid':              issueid,
                            'issueyear':            issueyear,
                            'issue':                issuenum,
                            'publisher':            publisher
                            })
                seriesmetadata['seriesmeta'] = seriesmeta
                self._run_extra_scripts(nzbname, self.nzb_folder, filen, folderp, seriesmetadata )

            return self.log
Exemple #18
0
def GCDimport(gcomicid, pullupd=None,imported=None,ogcname=None):
    # this is for importing via GCD only and not using CV.
    # used when volume spanning is discovered for a Comic (and can't be added using CV).
    # Issue Counts are wrong (and can't be added).

    # because Comicvine ComicID and GCD ComicID could be identical at some random point, let's distinguish.
    # CV = comicid, GCD = gcomicid :) (ie. CV=2740, GCD=G3719)
    
    gcdcomicid = gcomicid
    myDB = db.DBConnection()

    # We need the current minimal info in the database instantly
    # so we don't throw a 500 error when we redirect to the artistPage

    controlValueDict = {"ComicID":     gcdcomicid}

    comic = myDB.action('SELECT ComicName, ComicYear, Total, ComicPublished, ComicImage, ComicLocation, ComicPublisher FROM comics WHERE ComicID=?', [gcomicid]).fetchone()
    ComicName = comic[0]
    ComicYear = comic[1]
    ComicIssues = comic[2]
    ComicPublished = comic[3]
    comlocation = comic[5]
    ComicPublisher = comic[6]
    #ComicImage = comic[4]
    #print ("Comic:" + str(ComicName))

    newValueDict = {"Status":   "Loading"}
    myDB.upsert("comics", newValueDict, controlValueDict)

    # we need to lookup the info for the requested ComicID in full now
    #comic = cv.getComic(comicid,'comic')

    if not comic:
        logger.warn("Error fetching comic. ID for : " + gcdcomicid)
        if dbcomic is None:
            newValueDict = {"ComicName":   "Fetch failed, try refreshing. (%s)" % (gcdcomicid),
                    "Status":   "Active"}
        else:
            newValueDict = {"Status":   "Active"}
        myDB.upsert("comics", newValueDict, controlValueDict)
        return

    #run the re-sortorder here in order to properly display the page
    if pullupd is None:
        helpers.ComicSort(comicorder=mylar.COMICSORT, imported=gcomicid)

    if ComicName.startswith('The '):
        sortname = ComicName[4:]
    else:
        sortname = ComicName


    logger.info(u"Now adding/updating: " + ComicName)
    #--Now that we know ComicName, let's try some scraping
    #--Start
    # gcd will return issue details (most importantly publishing date)
    comicid = gcomicid[1:]
    resultURL = "/series/" + str(comicid) + "/"
    gcdinfo=parseit.GCDdetails(comseries=None, resultURL=resultURL, vari_loop=0, ComicID=gcdcomicid, TotalIssues=ComicIssues, issvariation=None, resultPublished=None)
    if gcdinfo == "No Match":
        logger.warn("No matching result found for " + ComicName + " (" + ComicYear + ")" )
        updater.no_searchresults(gcomicid)
        nomatch = "true"
        return nomatch
    logger.info(u"Sucessfully retrieved details for " + ComicName )
    # print ("Series Published" + parseit.resultPublished)
    #--End
    
    ComicImage = gcdinfo['ComicImage']

    #comic book location on machine
    # setup default location here
    if comlocation is None:
        # let's remove the non-standard characters here.
        u_comicnm = ComicName
        u_comicname = u_comicnm.encode('ascii', 'ignore').strip()
        if ':' in u_comicname or '/' in u_comicname or ',' in u_comicname:
            comicdir = u_comicname
            if ':' in comicdir:
                comicdir = comicdir.replace(':','')
            if '/' in comicdir:
                comicdir = comicdir.replace('/','-')
            if ',' in comicdir:
                comicdir = comicdir.replace(',','')            
        else: comicdir = u_comicname

        series = comicdir
        publisher = ComicPublisher
        year = ComicYear

        #do work to generate folder path
        values = {'$Series':        series,
                  '$Publisher':     publisher,
                  '$Year':          year,
                  '$series':        series.lower(),
                  '$publisher':     publisher.lower(),
                  '$Volume':        year
                  }

        if mylar.FOLDER_FORMAT == '':
            comlocation = mylar.DESTINATION_DIR + "/" + comicdir + " (" + comic['ComicYear'] + ")"
        else:
            comlocation = mylar.DESTINATION_DIR + "/" + helpers.replace_all(mylar.FOLDER_FORMAT, values)

        #comlocation = mylar.DESTINATION_DIR + "/" + comicdir + " (" + ComicYear + ")"
        if mylar.DESTINATION_DIR == "":
            logger.error(u"There is no general directory specified - please specify in Config/Post-Processing.")
            return
        if mylar.REPLACE_SPACES:
            #mylar.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
            comlocation = comlocation.replace(' ', mylar.REPLACE_CHAR)

    #if it doesn't exist - create it (otherwise will bugger up later on)
    if os.path.isdir(str(comlocation)):
        logger.info(u"Directory (" + str(comlocation) + ") already exists! Continuing...")
    else:
        #print ("Directory doesn't exist!")
        #try:
        #    os.makedirs(str(comlocation))
        #    logger.info(u"Directory successfully created at: " + str(comlocation))
        #except OSError:
        #    logger.error(u"Could not create comicdir : " + str(comlocation))
        filechecker.validateAndCreateDirectory(comlocation, True)

    comicIssues = gcdinfo['totalissues']

    #let's download the image...
    if os.path.exists(mylar.CACHE_DIR):pass
    else:
        #let's make the dir.
        try:
            os.makedirs(str(mylar.CACHE_DIR))
            logger.info(u"Cache Directory successfully created at: " + str(mylar.CACHE_DIR))

        except OSError:
            logger.error(u"Could not create cache dir : " + str(mylar.CACHE_DIR))

    coverfile = os.path.join(mylar.CACHE_DIR, str(gcomicid) + ".jpg")

    #try:
    urllib.urlretrieve(str(ComicImage), str(coverfile))
    try:
        with open(str(coverfile)) as f:
            ComicImage = os.path.join('cache',str(gcomicid) + ".jpg")

            #this is for Firefox when outside the LAN...it works, but I don't know how to implement it
            #without breaking the normal flow for inside the LAN (above)
            #ComicImage = "http://" + str(mylar.HTTP_HOST) + ":" + str(mylar.HTTP_PORT) + "/cache/" + str(comi$

            logger.info(u"Sucessfully retrieved cover for " + ComicName)
            #if the comic cover local is checked, save a cover.jpg to the series folder.
            if mylar.COMIC_COVER_LOCAL:
                comiclocal = os.path.join(str(comlocation) + "/cover.jpg")
                shutil.copy(ComicImage,comiclocal)
    except IOError as e:
        logger.error(u"Unable to save cover locally at this time.")
        
    #if comic['ComicVersion'].isdigit():
    #    comicVol = "v" + comic['ComicVersion']
    #else:
    #    comicVol = None


    controlValueDict = {"ComicID":      gcomicid}
    newValueDict = {"ComicName":        ComicName,
                    "ComicSortName":    sortname,
                    "ComicYear":        ComicYear,
                    "Total":            comicIssues,
                    "ComicLocation":    comlocation,
                    #"ComicVersion":     comicVol,
                    "ComicImage":       ComicImage,
                    #"ComicPublisher":   comic['ComicPublisher'],
                    #"ComicPublished":   comicPublished,
                    "DateAdded":        helpers.today(),
                    "Status":           "Loading"}

    myDB.upsert("comics", newValueDict, controlValueDict)

    #comicsort here...
    #run the re-sortorder here in order to properly display the page
    if pullupd is None:
        helpers.ComicSort(sequence='update')

    logger.info(u"Sucessfully retrieved issue details for " + ComicName )
    n = 0
    iscnt = int(comicIssues)
    issnum = []
    issname = []
    issdate = []
    int_issnum = []
    #let's start issue #'s at 0 -- thanks to DC for the new 52 reboot! :)
    latestiss = "0"
    latestdate = "0000-00-00"
    #print ("total issues:" + str(iscnt))
    #---removed NEW code here---
    logger.info(u"Now adding/updating issues for " + ComicName)
    bb = 0
    while (bb <= iscnt):
        #---NEW.code
        try:
            gcdval = gcdinfo['gcdchoice'][bb]
            #print ("gcdval: " + str(gcdval))
        except IndexError:
            #account for gcd variation here
            if gcdinfo['gcdvariation'] == 'gcd':
                #print ("gcd-variation accounted for.")
                issdate = '0000-00-00'
                int_issnum =  int ( issis / 1000 )
            break
        if 'nn' in str(gcdval['GCDIssue']):
            #no number detected - GN, TP or the like
            logger.warn(u"Non Series detected (Graphic Novel, etc) - cannot proceed at this time.")
            updater.no_searchresults(comicid)
            return
        elif '.' in str(gcdval['GCDIssue']):
            issst = str(gcdval['GCDIssue']).find('.')
            issb4dec = str(gcdval['GCDIssue'])[:issst]
            #if the length of decimal is only 1 digit, assume it's a tenth
            decis = str(gcdval['GCDIssue'])[issst+1:]
            if len(decis) == 1:
                decisval = int(decis) * 10
                issaftdec = str(decisval)
            if len(decis) == 2:
                decisval = int(decis)
                issaftdec = str(decisval)
            if int(issaftdec) == 0: issaftdec = "00"
            gcd_issue = issb4dec + "." + issaftdec
            gcdis = (int(issb4dec) * 1000) + decisval
        else:
            gcdis = int(str(gcdval['GCDIssue'])) * 1000
            gcd_issue = str(gcdval['GCDIssue'])
        #get the latest issue / date using the date.
        int_issnum = int( gcdis / 1000 )
        issdate = str(gcdval['GCDDate'])
        issid = "G" + str(gcdval['IssueID'])
        if gcdval['GCDDate'] > latestdate:
            latestiss = str(gcd_issue)
            latestdate = str(gcdval['GCDDate'])
        #print("(" + str(bb) + ") IssueID: " + str(issid) + " IssueNo: " + str(gcd_issue) + " Date" + str(issdate) )
        #---END.NEW.

        # check if the issue already exists
        iss_exists = myDB.action('SELECT * from issues WHERE IssueID=?', [issid]).fetchone()


        # Only change the status & add DateAdded if the issue is not already in the database
        if iss_exists is None:
            newValueDict['DateAdded'] = helpers.today()

        #adjust for inconsistencies in GCD date format - some dates have ? which borks up things.
        if "?" in str(issdate):
            issdate = "0000-00-00"             

        controlValueDict = {"IssueID":  issid}
        newValueDict = {"ComicID":            gcomicid,
                        "ComicName":          ComicName,
                        "Issue_Number":       gcd_issue,
                        "IssueDate":          issdate,
                        "Int_IssueNumber":    int_issnum
                        }

        #print ("issueid:" + str(controlValueDict))
        #print ("values:" + str(newValueDict))

        if mylar.AUTOWANT_ALL:
            newValueDict['Status'] = "Wanted"
        elif issdate > helpers.today() and mylar.AUTOWANT_UPCOMING:
            newValueDict['Status'] = "Wanted"
        else:
            newValueDict['Status'] = "Skipped"

        if iss_exists:
            #print ("Existing status : " + str(iss_exists['Status']))
            newValueDict['Status'] = iss_exists['Status']


        myDB.upsert("issues", newValueDict, controlValueDict)
        bb+=1

#        logger.debug(u"Updating comic cache for " + ComicName)
#        cache.getThumb(ComicID=issue['issueid'])

#        logger.debug(u"Updating cache for: " + ComicName)
#        cache.getThumb(ComicIDcomicid)


    controlValueStat = {"ComicID":     gcomicid}
    newValueStat = {"Status":          "Active",
                    "LatestIssue":     latestiss,
                    "LatestDate":      latestdate,
                    "LastUpdated":     helpers.now()
                   }

    myDB.upsert("comics", newValueStat, controlValueStat)

    if mylar.CVINFO:
        if not os.path.exists(comlocation + "/cvinfo"):
            with open(comlocation + "/cvinfo","w") as text_file:
                text_file.write("http://www.comicvine.com/volume/49-" + str(comicid))

    logger.info(u"Updating complete for: " + ComicName)

    #move the files...if imported is not empty (meaning it's not from the mass importer.)
    if imported is None or imported == 'None':
        pass
    else:
        if mylar.IMP_MOVE:
            logger.info("Mass import - Move files")
            moveit.movefiles(gcomicid,comlocation,ogcname)
        else:
            logger.info("Mass import - Moving not Enabled. Setting Archived Status for import.")
            moveit.archivefiles(gcomicid,ogcname)

    #check for existing files...
    updater.forceRescan(gcomicid)


    if pullupd is None:
        # lets' check the pullist for anyting at this time as well since we're here.
        if mylar.AUTOWANT_UPCOMING and 'Present' in ComicPublished:
            logger.info(u"Checking this week's pullist for new issues of " + ComicName)
            updater.newpullcheck(comic['ComicName'], gcomicid)

        #here we grab issues that have been marked as wanted above...

        results = myDB.select("SELECT * FROM issues where ComicID=? AND Status='Wanted'", [gcomicid])
        if results:
            logger.info(u"Attempting to grab wanted issues for : "  + ComicName)

            for result in results:
                foundNZB = "none"
                if (mylar.NZBSU or mylar.DOGNZB or mylar.EXPERIMENTAL or mylar.NEWZNAB or mylar.NZBX) and (mylar.SAB_HOST):
                    foundNZB = search.searchforissue(result['IssueID'])
                    if foundNZB == "yes":
                        updater.foundsearch(result['ComicID'], result['IssueID'])
        else: logger.info(u"No issues marked as wanted for " + ComicName)

        logger.info(u"Finished grabbing what I could.")
Exemple #19
0
def initialize():

    with INIT_LOCK:

        global __INITIALIZED__, FULL_PATH, PROG_DIR, VERBOSE, DAEMON, DATA_DIR, CONFIG_FILE, CFG, CONFIG_VERSION, LOG_DIR, CACHE_DIR, LOGVERBOSE, HTTP_PORT, HTTP_HOST, HTTP_USERNAME, HTTP_PASSWORD, HTTP_ROOT, LAUNCH_BROWSER, GIT_PATH, CURRENT_VERSION, LATEST_VERSION, CHECK_GITHUB, CHECK_GITHUB_ON_STARTUP, CHECK_GITHUB_INTERVAL, MUSIC_DIR, DESTINATION_DIR, DOWNLOAD_DIR, USENET_RETENTION, SEARCH_INTERVAL, INTERFACE, AUTOWANT_ALL, AUTOWANT_UPCOMING, ZERO_LEVEL, ZERO_LEVEL_N, COMIC_COVER_LOCAL, LIBRARYSCAN_INTERVAL, DOWNLOAD_SCAN_INTERVAL, SAB_HOST, SAB_USERNAME, SAB_PASSWORD, SAB_APIKEY, SAB_CATEGORY, SAB_PRIORITY, BLACKHOLE, BLACKHOLE_DIR, NZBSU, NZBSU_APIKEY, DOGNZB, DOGNZB_APIKEY, NZBX, NEWZNAB, NEWZNAB_HOST, NEWZNAB_APIKEY, NEWZNAB_ENABLED, EXTRA_NEWZNABS, RAW, RAW_PROVIDER, RAW_USERNAME, RAW_PASSWORD, RAW_GROUPS, EXPERIMENTAL, PREFERRED_QUALITY, MOVE_FILES, RENAME_FILES, USE_MINSIZE, MINSIZE, USE_MAXSIZE, MAXSIZE, CORRECT_METADATA, FOLDER_FORMAT, FILE_FORMAT, REPLACE_CHAR, REPLACE_SPACES, COMIC_LOCATION, QUAL_ALTVERS, QUAL_SCANNER, QUAL_TYPE, QUAL_QUALITY, ENABLE_EXTRA_SCRIPTS, EXTRA_SCRIPTS, ENABLE_PRE_SCRIPTS, PRE_SCRIPTS

        if __INITIALIZED__:
            return False

        # Make sure all the config sections exist
        CheckSection("General")
        CheckSection("SABnzbd")
        CheckSection("NZBsu")
        CheckSection("DOGnzb")
        CheckSection("Raw")
        CheckSection("Experimental")
        CheckSection("Newznab")
        # Set global variables based on config file or use defaults
        try:
            HTTP_PORT = check_setting_int(CFG, "General", "http_port", 8090)
        except:
            HTTP_PORT = 8090

        if HTTP_PORT < 21 or HTTP_PORT > 65535:
            HTTP_PORT = 8090

        #        CONFIG_VERSION = check_setting_str(CFG, 'General', 'config_version', '')
        HTTP_HOST = check_setting_str(CFG, "General", "http_host", "0.0.0.0")
        HTTP_USERNAME = check_setting_str(CFG, "General", "http_username", "")
        HTTP_PASSWORD = check_setting_str(CFG, "General", "http_password", "")
        HTTP_ROOT = check_setting_str(CFG, "General", "http_root", "/")
        LAUNCH_BROWSER = bool(check_setting_int(CFG, "General", "launch_browser", 1))
        LOGVERBOSE = bool(check_setting_int(CFG, "General", "logverbose", 1))
        GIT_PATH = check_setting_str(CFG, "General", "git_path", "")
        LOG_DIR = check_setting_str(CFG, "General", "log_dir", "")

        CHECK_GITHUB = bool(check_setting_int(CFG, "General", "check_github", 1))
        CHECK_GITHUB_ON_STARTUP = bool(check_setting_int(CFG, "General", "check_github_on_startup", 1))
        CHECK_GITHUB_INTERVAL = check_setting_int(CFG, "General", "check_github_interval", 360)

        DESTINATION_DIR = check_setting_str(CFG, "General", "destination_dir", "")
        USENET_RETENTION = check_setting_int(CFG, "General", "usenet_retention", "1500")

        SEARCH_INTERVAL = check_setting_int(CFG, "General", "search_interval", 360)
        LIBRARYSCAN_INTERVAL = check_setting_int(CFG, "General", "libraryscan_interval", 300)
        DOWNLOAD_SCAN_INTERVAL = check_setting_int(CFG, "General", "download_scan_interval", 5)
        INTERFACE = check_setting_str(CFG, "General", "interface", "default")
        AUTOWANT_ALL = bool(check_setting_int(CFG, "General", "autowant_all", 0))
        AUTOWANT_UPCOMING = bool(check_setting_int(CFG, "General", "autowant_upcoming", 1))
        COMIC_COVER_LOCAL = bool(check_setting_int(CFG, "General", "comic_cover_local", 0))
        PREFERRED_QUALITY = check_setting_int(CFG, "General", "preferred_quality", 0)
        CORRECT_METADATA = bool(check_setting_int(CFG, "General", "correct_metadata", 0))
        MOVE_FILES = bool(check_setting_int(CFG, "General", "move_files", 0))
        RENAME_FILES = bool(check_setting_int(CFG, "General", "rename_files", 0))
        FOLDER_FORMAT = check_setting_str(CFG, "General", "folder_format", "$Series-($Year)")
        FILE_FORMAT = check_setting_str(CFG, "General", "file_format", "$Series $Issue ($Year)")
        BLACKHOLE = bool(check_setting_int(CFG, "General", "blackhole", 0))
        BLACKHOLE_DIR = check_setting_str(CFG, "General", "blackhole_dir", "")
        REPLACE_SPACES = bool(check_setting_int(CFG, "General", "replace_spaces", 0))
        REPLACE_CHAR = check_setting_str(CFG, "General", "replace_char", "")
        ZERO_LEVEL = bool(check_setting_int(CFG, "General", "zero_level", 0))
        ZERO_LEVEL_N = check_setting_str(CFG, "General", "zero_level_n", "")
        USE_MINSIZE = bool(check_setting_int(CFG, "General", "use_minsize", 0))
        MINSIZE = check_setting_str(CFG, "General", "minsize", "")
        USE_MAXSIZE = bool(check_setting_int(CFG, "General", "use_maxsize", 0))
        MAXSIZE = check_setting_str(CFG, "General", "maxsize", "")

        ENABLE_EXTRA_SCRIPTS = bool(check_setting_int(CFG, "General", "enable_extra_scripts", 0))
        EXTRA_SCRIPTS = check_setting_str(CFG, "General", "extra_scripts", "")

        ENABLE_PRE_SCRIPTS = bool(check_setting_int(CFG, "General", "enable_pre_scripts", 0))
        PRE_SCRIPTS = check_setting_str(CFG, "General", "pre_scripts", "")

        SAB_HOST = check_setting_str(CFG, "SABnzbd", "sab_host", "")
        SAB_USERNAME = check_setting_str(CFG, "SABnzbd", "sab_username", "")
        SAB_PASSWORD = check_setting_str(CFG, "SABnzbd", "sab_password", "")
        SAB_APIKEY = check_setting_str(CFG, "SABnzbd", "sab_apikey", "")
        SAB_CATEGORY = check_setting_str(CFG, "SABnzbd", "sab_category", "")
        SAB_PRIORITY = check_setting_str(CFG, "SABnzbd", "sab_priority", "")
        if SAB_PRIORITY.isdigit():
            if SAB_PRIORITY == "0":
                SAB_PRIORITY = "Default"
            elif SAB_PRIORITY == "1":
                SAB_PRIORITY = "Low"
            elif SAB_PRIORITY == "2":
                SAB_PRIORITY = "Normal"
            elif SAB_PRIORITY == "3":
                SAB_PRIORITY = "High"
            elif SAB_PRIORITY == "4":
                SAB_PRIORITY = "Paused"
            else:
                SAB_PRIORITY = "Default"
        NZBSU = bool(check_setting_int(CFG, "NZBsu", "nzbsu", 0))
        NZBSU_APIKEY = check_setting_str(CFG, "NZBsu", "nzbsu_apikey", "")

        DOGNZB = bool(check_setting_int(CFG, "DOGnzb", "dognzb", 0))
        DOGNZB_APIKEY = check_setting_str(CFG, "DOGnzb", "dognzb_apikey", "")

        NZBX = bool(check_setting_int(CFG, "nzbx", "nzbx", 0))

        RAW = bool(check_setting_int(CFG, "Raw", "raw", 0))
        RAW_PROVIDER = check_setting_str(CFG, "Raw", "raw_provider", "")
        RAW_USERNAME = check_setting_str(CFG, "Raw", "raw_username", "")
        RAW_PASSWORD = check_setting_str(CFG, "Raw", "raw_password", "")
        RAW_GROUPS = check_setting_str(CFG, "Raw", "raw_groups", "")

        EXPERIMENTAL = bool(check_setting_int(CFG, "Experimental", "experimental", 0))

        NEWZNAB = bool(check_setting_int(CFG, "Newznab", "newznab", 0))
        NEWZNAB_HOST = check_setting_str(CFG, "Newznab", "newznab_host", "")
        NEWZNAB_APIKEY = check_setting_str(CFG, "Newznab", "newznab_apikey", "")
        NEWZNAB_ENABLED = bool(check_setting_int(CFG, "Newznab", "newznab_enabled", 1))

        # Need to pack the extra newznabs back into a list of tuples
        flattened_newznabs = check_setting_str(CFG, "Newznab", "extra_newznabs", [], log=False)
        EXTRA_NEWZNABS = list(itertools.izip(*[itertools.islice(flattened_newznabs, i, None, 3) for i in range(3)]))

        # update folder formats in the config & bump up config version
        if CONFIG_VERSION == "0":
            from mylar.helpers import replace_all

            file_values = {"issue": "Issue", "title": "Title", "series": "Series", "year": "Year"}
            folder_values = {
                "series": "Series",
                "publisher": "Publisher",
                "year": "Year",
                "first": "First",
                "lowerfirst": "first",
            }
            FILE_FORMAT = replace_all(FILE_FORMAT, file_values)
            FOLDER_FORMAT = replace_all(FOLDER_FORMAT, folder_values)

            CONFIG_VERSION = "1"

        if CONFIG_VERSION == "1":

            from mylar.helpers import replace_all

            file_values = {
                "Issue": "$Issue",
                "Title": "$Title",
                "Series": "$Series",
                "Year": "$Year",
                "title": "$title",
                "series": "$series",
                "year": "$year",
            }
            folder_values = {
                "Series": "$Series",
                "Publisher": "$Publisher",
                "Year": "$Year",
                "First": "$First",
                "series": "$series",
                "publisher": "$publisher",
                "year": "$year",
                "first": "$first",
            }
            FILE_FORMAT = replace_all(FILE_FORMAT, file_values)
            FOLDER_FORMAT = replace_all(FOLDER_FORMAT, folder_values)

            CONFIG_VERSION = "2"

        if "http://" not in SAB_HOST[:7] and "https://" not in SAB_HOST[:8]:
            SAB_HOST = "http://" + SAB_HOST
            # print ("SAB_HOST:" + SAB_HOST)

        if not LOG_DIR:
            LOG_DIR = os.path.join(DATA_DIR, "logs")

        if not os.path.exists(LOG_DIR):
            try:
                os.makedirs(LOG_DIR)
            except OSError:
                if VERBOSE:
                    print "Unable to create the log directory. Logging to screen only."

        # Start the logger, silence console logging if we need to
        logger.mylar_log.initLogger(verbose=VERBOSE)

        # Put the cache dir in the data dir for now
        CACHE_DIR = os.path.join(DATA_DIR, "cache")
        if not os.path.exists(CACHE_DIR):
            try:
                os.makedirs(CACHE_DIR)
            except OSError:
                logger.error("Could not create cache dir. Check permissions of datadir: " + DATA_DIR)

        # Sanity check for search interval. Set it to at least 6 hours
        if SEARCH_INTERVAL < 360:
            logger.info("Search interval too low. Resetting to 6 hour minimum")
            SEARCH_INTERVAL = 360

        # Initialize the database
        logger.info("Checking to see if the database has all tables....")
        try:
            dbcheck()
        except Exception, e:
            logger.error("Can't connect to the database: %s" % e)

        # Get the currently installed version - returns None, 'win32' or the git hash
        # Also sets INSTALL_TYPE variable to 'win', 'git' or 'source'
        CURRENT_VERSION = versioncheck.getVersion()

        # Check for new versions
        if CHECK_GITHUB_ON_STARTUP:
            try:
                LATEST_VERSION = versioncheck.checkGithub()
            except:
                LATEST_VERSION = CURRENT_VERSION
        else:
            LATEST_VERSION = CURRENT_VERSION

        __INITIALIZED__ = True
        return True
Exemple #20
0
def initialize():

    with INIT_LOCK:
    
        global __INITIALIZED__, FULL_PATH, PROG_DIR, VERBOSE, DAEMON, DATA_DIR, CONFIG_FILE, CFG, CONFIG_VERSION, LOG_DIR, CACHE_DIR, \
                HTTP_PORT, HTTP_HOST, HTTP_USERNAME, HTTP_PASSWORD, HTTP_ROOT, LAUNCH_BROWSER, GIT_PATH, \
                CURRENT_VERSION, LATEST_VERSION, CHECK_GITHUB, CHECK_GITHUB_ON_STARTUP, CHECK_GITHUB_INTERVAL, MUSIC_DIR, DESTINATION_DIR, \
                DOWNLOAD_DIR, USENET_RETENTION, SEARCH_INTERVAL, INTERFACE, AUTOWANT_ALL, AUTOWANT_UPCOMING, \
                LIBRARYSCAN_INTERVAL, DOWNLOAD_SCAN_INTERVAL, SAB_HOST, SAB_USERNAME, SAB_PASSWORD, SAB_APIKEY, SAB_CATEGORY, BLACKHOLE, BLACKHOLE_DIR, \
                NZBSU, NZBSU_APIKEY, DOGNZB, DOGNZB_APIKEY, \
                RAW, RAW_PROVIDER, RAW_USERNAME, RAW_PASSWORD, RAW_GROUPS, EXPERIMENTAL,\
                PREFERRED_QUALITY, MOVE_FILES, RENAME_FILES, CORRECT_METADATA, FOLDER_FORMAT, FILE_FORMAT, \
                COMIC_LOCATION, QUAL_ALTVERS, QUAL_SCANNER, QUAL_TYPE, QUAL_QUALITY
                
        if __INITIALIZED__:
            return False
                
        # Make sure all the config sections exist
        CheckSection('General')
        CheckSection('SABnzbd')
        CheckSection('NZBsu')
        CheckSection('DOGnzb')
        CheckSection('Raw')
        CheckSection('Experimental')        
        # Set global variables based on config file or use defaults
        try:
            HTTP_PORT = check_setting_int(CFG, 'General', 'http_port', 8090)
        except:
            HTTP_PORT = 8090
            
        if HTTP_PORT < 21 or HTTP_PORT > 65535:
            HTTP_PORT = 8090
            
        HTTP_HOST = check_setting_str(CFG, 'General', 'http_host', '0.0.0.0')
        HTTP_USERNAME = check_setting_str(CFG, 'General', 'http_username', '')
        HTTP_PASSWORD = check_setting_str(CFG, 'General', 'http_password', '')
        HTTP_ROOT = check_setting_str(CFG, 'General', 'http_root', '/')
        LAUNCH_BROWSER = bool(check_setting_int(CFG, 'General', 'launch_browser', 1))
        GIT_PATH = check_setting_str(CFG, 'General', 'git_path', '')
        LOG_DIR = check_setting_str(CFG, 'General', 'log_dir', '')
        
        CHECK_GITHUB = bool(check_setting_int(CFG, 'General', 'check_github', 1))
        CHECK_GITHUB_ON_STARTUP = bool(check_setting_int(CFG, 'General', 'check_github_on_startup', 1))
        CHECK_GITHUB_INTERVAL = check_setting_int(CFG, 'General', 'check_github_interval', 360)
        
        DESTINATION_DIR = check_setting_str(CFG, 'General', 'destination_dir', '')
        USENET_RETENTION = check_setting_int(CFG, 'General', 'usenet_retention', '1500')
        
        SEARCH_INTERVAL = check_setting_int(CFG, 'General', 'search_interval', 360)
        LIBRARYSCAN_INTERVAL = check_setting_int(CFG, 'General', 'libraryscan_interval', 300)
        DOWNLOAD_SCAN_INTERVAL = check_setting_int(CFG, 'General', 'download_scan_interval', 5)
        INTERFACE = check_setting_str(CFG, 'General', 'interface', 'default')
        AUTOWANT_ALL = bool(check_setting_int(CFG, 'General', 'autowant_all', 0))
        AUTOWANT_UPCOMING = bool(check_setting_int(CFG, 'General', 'autowant_upcoming', 0))
        PREFERRED_QUALITY = check_setting_int(CFG, 'General', 'preferred_quality', 0)
        CORRECT_METADATA = bool(check_setting_int(CFG, 'General', 'correct_metadata', 0))
        MOVE_FILES = bool(check_setting_int(CFG, 'General', 'move_files', 0))
        RENAME_FILES = bool(check_setting_int(CFG, 'General', 'rename_files', 0))
        FOLDER_FORMAT = check_setting_str(CFG, 'General', 'folder_format', 'Artist/Album [Year]')
        FILE_FORMAT = check_setting_str(CFG, 'General', 'file_format', 'Track Artist - Album [Year]- Title')
        BLACKHOLE = bool(check_setting_int(CFG, 'General', 'blackhole', 0))
        BLACKHOLE_DIR = check_setting_str(CFG, 'General', 'blackhole_dir', '')
        
        SAB_HOST = check_setting_str(CFG, 'SABnzbd', 'sab_host', '')
        SAB_USERNAME = check_setting_str(CFG, 'SABnzbd', 'sab_username', '')
        SAB_PASSWORD = check_setting_str(CFG, 'SABnzbd', 'sab_password', '')
        SAB_APIKEY = check_setting_str(CFG, 'SABnzbd', 'sab_apikey', '')
        SAB_CATEGORY = check_setting_str(CFG, 'SABnzbd', 'sab_category', '')
        
        NZBSU = bool(check_setting_int(CFG, 'NZBsu', 'nzbsu', 0))
        NZBSU_APIKEY = check_setting_str(CFG, 'NZBsu', 'nzbsu_apikey', '')

        DOGNZB = bool(check_setting_int(CFG, 'DOGnzb', 'dognzb', 0))
        DOGNZB_APIKEY = check_setting_str(CFG, 'DOGnzb', 'dognzb_apikey', '')

        RAW = bool(check_setting_int(CFG, 'Raw', 'raw', 0))
        RAW_PROVIDER = check_setting_str(CFG, 'Raw', 'raw_provider', '')
        RAW_USERNAME = check_setting_str(CFG, 'Raw', 'raw_username', '')
        RAW_PASSWORD  = check_setting_str(CFG, 'Raw', 'raw_password', '')
        RAW_GROUPS = check_setting_str(CFG, 'Raw', 'raw_groups', '')

        EXPERIMENTAL = bool(check_setting_int(CFG, 'Experimental', 'experimental', 0))
        
        # update folder formats in the config & bump up config version
        if CONFIG_VERSION == '0':
            from mylar.helpers import replace_all
            file_values = { 'tracknumber':  'Track', 'title': 'Title','artist' : 'Artist', 'album' : 'Album', 'year' : 'Year' }
            folder_values = { 'artist' : 'Artist', 'album':'Album', 'year' : 'Year', 'releasetype' : 'Type', 'first' : 'First', 'lowerfirst' : 'first' }
            FILE_FORMAT = replace_all(FILE_FORMAT, file_values)
            FOLDER_FORMAT = replace_all(FOLDER_FORMAT, folder_values)
            
            CONFIG_VERSION = '1'
            
        if CONFIG_VERSION == '1':

            from mylar.helpers import replace_all

            file_values = { 'Track':        '$Track',
                            'Title':        '$Title',
                            'Artist':       '$Artist',
                            'Album':        '$Album',
                            'Year':         '$Year',
                            'track':        '$track',
                            'title':        '$title',
                            'artist':       '$artist',
                            'album':        '$album',
                            'year':         '$year'
                            }
            folder_values = {   'Artist':   '$Artist',
                                'Album':    '$Album',
                                'Year':     '$Year',
                                'Type':     '$Type',
                                'First':    '$First',
                                'artist':   '$artist',
                                'album':    '$album',
                                'year':     '$year',
                                'type':     '$type',
                                'first':    '$first'
                            }   
            FILE_FORMAT = replace_all(FILE_FORMAT, file_values)
            FOLDER_FORMAT = replace_all(FOLDER_FORMAT, folder_values)
            
            CONFIG_VERSION = '2'
        
        if not LOG_DIR:
            LOG_DIR = os.path.join(DATA_DIR, 'logs')
        
        if not os.path.exists(LOG_DIR):
            try:
                os.makedirs(LOG_DIR)
            except OSError:
                if VERBOSE:
                    print 'Unable to create the log directory. Logging to screen only.'
        
        # Start the logger, silence console logging if we need to
        logger.mylar_log.initLogger(verbose=VERBOSE)
        
        # Put the cache dir in the data dir for now
        CACHE_DIR = os.path.join(DATA_DIR, 'cache')
        if not os.path.exists(CACHE_DIR):
            try:
                os.makedirs(CACHE_DIR)
            except OSError:
                logger.error('Could not create cache dir. Check permissions of datadir: ' + DATA_DIR)
        
        # Initialize the database
        logger.info('Checking to see if the database has all tables....')
        try:
            dbcheck()
        except Exception, e:
            logger.error("Can't connect to the database: %s" % e)
            
        # Get the currently installed version - returns None, 'win32' or the git hash
        # Also sets INSTALL_TYPE variable to 'win', 'git' or 'source'
        CURRENT_VERSION = versioncheck.getVersion()
        
        # Check for new versions
        if CHECK_GITHUB_ON_STARTUP:
            try:
                LATEST_VERSION = versioncheck.checkGithub()
            except:
                LATEST_VERSION = CURRENT_VERSION
        else:
            LATEST_VERSION = CURRENT_VERSION

        __INITIALIZED__ = True
        return True
Exemple #21
0
def PostProcess(nzb_name, nzb_folder):
        log2screen = ""
        log2screen = log2screen + "Nzb Name:" + nzb_name + "\n"
        log2screen = log2screen + "Nzb Folder:"  + nzb_folder + "\n"
                #lookup nzb_name in nzblog table to get issueid
        myDB = db.DBConnection()
        nzbiss = myDB.action("SELECT * from nzblog WHERE nzbname=?", [nzb_name]).fetchone()
        if nzbiss is None:
            log2screen = log2screen + "Epic failure - could not locate file to rename." + "\n"
            logger.error(u"Unable to locate downloaded file to rename. PostProcessing aborted.")
            return
        else: 
            issueid = nzbiss['IssueID']
        #log2screen = log2screen + "IssueID: " + issueid + "\n"
                #use issueid to get publisher, series, year, issue number
        issuenzb = myDB.action("SELECT * from issues WHERE issueid=?", [issueid]).fetchone()
        comicid = issuenzb['ComicID']
        #log2screen = log2screen + "ComicID: " + comicid + "\n"
        issuenum = issuenzb['Issue_Number']
        issueno = str(issuenum).split('.')[0]
        log2screen = log2screen + "Issue Number: " + str(issueno) + "\n"
        # issue zero-suppression here
        if mylar.ZERO_LEVEL == "0": 
            zeroadd = ""
        else:
            if mylar.ZERO_LEVEL_N  == "none": zeroadd = ""
            elif mylar.ZERO_LEVEL_N == "0x": zeroadd = "0"
            elif mylar.ZERO_LEVEL_N == "00x": zeroadd = "00"


        if str(len(issueno)) > 1:
            if int(issueno) < 10:
                log2screen = log2screen + "issue detected less than 10" + "\n"
                prettycomiss = str(zeroadd) + str(int(issueno))
                log2screen = log2screen + "Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ". Issue will be set as : " + str(prettycomiss) + "\n"
            elif int(issueno) >= 10 and int(issueno) < 100:
                log2screen = log2screen + "issue detected greater than 10, but less than 100" + "\n"
                if mylar.ZERO_LEVEL_N == "none":
                    zeroadd = ""
                else:
                    zeroadd = "0"
                prettycomiss = str(zeroadd) + str(int(issueno))
                log2screen = log2screen + "Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ".Issue will be set as : " + str(prettycomiss) + "\n"
            else:
                log2screen = log2screen + "issue detected greater than 100" + "\n"
                prettycomiss = str(issueno)
                log2screen = log2screen + "Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ". Issue will be set as : " + str(prettycomiss) + "\n"
        else:
            prettycomiss = str(issueno)
            log2screen = log2screen + "issue length error - cannot determine length. Defaulting to None:  " + str(prettycomiss) + "\n"

        issueyear = issuenzb['IssueDate'][:4]
        log2screen = log2screen + "Issue Year: " + str(issueyear) + "\n"
        comicnzb= myDB.action("SELECT * from comics WHERE comicid=?", [comicid]).fetchone()
        publisher = comicnzb['ComicPublisher']
        log2screen = log2screen + "Publisher: " + publisher + "\n"
        series = comicnzb['ComicName']
        log2screen = log2screen + "Series: " + series + "\n"
        seriesyear = comicnzb['ComicYear']
        log2screen = log2screen + "Year: " + seriesyear + "\n"
        comlocation = comicnzb['ComicLocation']
        log2screen = log2screen + "Comic Location: " + comlocation + "\n"
#---move to importer.py
                #get output path format
#        if ':' in series:
#            series = series.replace(':','')
                #do work to generate folder path
#        values = {'$Series':    series,
#              '$Publisher': publisher,
#              '$Year':      seriesyear
#              }
#        comlocation = mylar.DESTINATION_DIR + "/" + helpers.replace_all(mylar.FOLDER_FORMAT, values)
            #last perform space replace
#        if mylar.REPLACE_SPACES:
            #mylar.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
#            comlocation = comlocation.replace(' ', mylar.REPLACE_CHAR)
#        log2screen = log2screen + "Final Location: " + comlocation + "\n"
#---
        #rename file and move to new path
        #nfilename = series + " " + issueno + " (" + seriesyear + ")"
        file_values = {'$Series':    series,
                       '$Issue':     prettycomiss,
                       '$Year':      issueyear
                      }

        extensions = ('.cbr', '.cbz')

        for root, dirnames, filenames in os.walk(nzb_folder):
            for filename in filenames:
                if filename.lower().endswith(extensions):
                    ofilename = filename
                    path, ext = os.path.splitext(ofilename)
        log2screen = log2screen + "Original Filename: " + ofilename + "\n"
        log2screen = log2screen + "Original Extension: " + ext + "\n"
        if mylar.FILE_FORMAT == '':
            log2screen = log2screen + "Rename Files isn't enabled...keeping original filename." + "\n"
            #check if extension is in nzb_name - will screw up otherwise
            if ofilename.lower().endswith(extensions):
                nfilename = ofilename[:-4]
            else:
                nfilename = ofilename
        else:
            nfilename = helpers.replace_all(mylar.FILE_FORMAT, file_values)
            if mylar.REPLACE_SPACES:
                #mylar.REPLACE_CHAR ...determines what to replace spaces with underscore or dot
                nfilename = nfilename.replace(' ', mylar.REPLACE_CHAR)
        #TODO - sort issue numbering 12.00 should be 12
        log2screen = log2screen + "New Filename: " + nfilename + "\n"

        src = nzb_folder + "/" + ofilename
        dst = comlocation + "/" + nfilename + ext
        log2screen = log2screen + "Source:" + src + "\n"
        log2screen = log2screen + "Destination:" +  dst + "\n"
        os.rename(nzb_folder + "/" + ofilename, nzb_folder + "/" + nfilename + ext)
        src = nzb_folder + "/" + nfilename + ext
        try:
            shutil.move(src, dst)
        except (OSError, IOError):
            log2screen = log2screen + "Failed to move directory - check directories and manually re-run." + "\n"
            log2screen = log2screen + "Post-Processing ABORTED." + "\n"
            return log2screen
        #tidyup old path
        try:
            shutil.rmtree(nzb_folder)
        except (OSError, IOError):
            log2screen = log2screen + "Failed to remove temporary directory - check directory and manually re-run." + "\n"
            log2screen = log2screen + "Post-Processing ABORTED." + "\n"
            return log2screen

        log2screen = log2screen + "Removed temporary directory : " + str(nzb_folder) + "\n"
                #delete entry from nzblog table
        myDB.action('DELETE from nzblog WHERE issueid=?', [issueid])
                #force rescan of files
        updater.forceRescan(comicid)
        logger.info(u"Post-Processing completed for: " + series + " issue: " + str(issuenum) )
        log2screen = log2screen + "Post Processing SUCCESSFULL!" + "\n"
        #print log2screen
        return log2screen