def Process_next(self,comicid,issueid,issuenumOG,ml=None): annchk = "no" extensions = ('.cbr', '.cbz') snatchedtorrent = False myDB = db.DBConnection() comicnzb = myDB.selectone("SELECT * from comics WHERE comicid=?", [comicid]).fetchone() issuenzb = myDB.selectone("SELECT * from issues WHERE issueid=? AND comicid=? AND ComicName NOT NULL", [issueid,comicid]).fetchone() if ml is not None and mylar.SNATCHEDTORRENT_NOTIFY: snatchnzb = myDB.selectone("SELECT * from snatched WHERE IssueID=? AND ComicID=? AND (provider=? OR provider=?) AND Status='Snatched'", [issueid,comicid,'KAT','CBT']).fetchone() if snatchnzb is None: logger.fdebug('Was not downloaded with Mylar and the usage of torrents. Disabling torrent manual post-processing completion notification.') else: logger.fdebug('Was downloaded from ' + snatchnzb['Provider'] + '. Enabling torrent manual post-processing completion notification.') snatchedtorrent = True logger.fdebug('issueid: ' + str(issueid)) logger.fdebug('issuenumOG: ' + str(issuenumOG)) if issuenzb is None: issuenzb = myDB.selectone("SELECT * from annuals WHERE issueid=? and comicid=?", [issueid,comicid]).fetchone() annchk = "yes" #issueno = str(issuenum).split('.')[0] #new CV API - removed all decimals...here we go AGAIN! issuenum = issuenzb['Issue_Number'] issue_except = 'None' if 'au' in issuenum.lower() and issuenum[:1].isdigit(): issuenum = re.sub("[^0-9]", "", issuenum) issue_except = ' AU' elif 'ai' in issuenum.lower() and issuenum[:1].isdigit(): issuenum = re.sub("[^0-9]", "", issuenum) issue_except = ' AI' elif 'inh' in issuenum.lower() and issuenum[:1].isdigit(): issuenum = re.sub("[^0-9]", "", issuenum) issue_except = '.INH' elif 'now' in issuenum.lower() and issuenum[:1].isdigit(): if '!' in issuenum: issuenum = re.sub('\!', '', issuenum) issuenum = re.sub("[^0-9]", "", issuenum) issue_except = '.NOW' if '.' in issuenum: iss_find = issuenum.find('.') iss_b4dec = issuenum[:iss_find] iss_decval = issuenum[iss_find+1:] if int(iss_decval) == 0: iss = iss_b4dec issdec = int(iss_decval) issueno = str(iss) self._log("Issue Number: " + str(issueno)) logger.fdebug("Issue Number: " + str(issueno)) else: if len(iss_decval) == 1: iss = iss_b4dec + "." + iss_decval issdec = int(iss_decval) * 10 else: iss = iss_b4dec + "." + iss_decval.rstrip('0') issdec = int(iss_decval.rstrip('0')) * 10 issueno = iss_b4dec self._log("Issue Number: " + str(iss)) logger.fdebug("Issue Number: " + str(iss)) else: iss = issuenum issueno = str(iss) # issue zero-suppression here if mylar.ZERO_LEVEL == "0": zeroadd = "" else: if mylar.ZERO_LEVEL_N == "none": zeroadd = "" elif mylar.ZERO_LEVEL_N == "0x": zeroadd = "0" elif mylar.ZERO_LEVEL_N == "00x": zeroadd = "00" logger.fdebug("Zero Suppression set to : " + str(mylar.ZERO_LEVEL_N)) if str(len(issueno)) > 1: if int(issueno) < 0: self._log("issue detected is a negative") prettycomiss = '-' + str(zeroadd) + str(abs(issueno)) elif int(issueno) < 10: self._log("issue detected less than 10") if '.' in iss: if int(iss_decval) > 0: issueno = str(iss) prettycomiss = str(zeroadd) + str(iss) else: prettycomiss = str(zeroadd) + str(int(issueno)) else: prettycomiss = str(zeroadd) + str(iss) if issue_except != 'None': prettycomiss = str(prettycomiss) + issue_except self._log("Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ". Issue will be set as : " + str(prettycomiss)) elif int(issueno) >= 10 and int(issueno) < 100: self._log("issue detected greater than 10, but less than 100") if mylar.ZERO_LEVEL_N == "none": zeroadd = "" else: zeroadd = "0" if '.' in iss: if int(iss_decval) > 0: issueno = str(iss) prettycomiss = str(zeroadd) + str(iss) else: prettycomiss = str(zeroadd) + str(int(issueno)) else: prettycomiss = str(zeroadd) + str(iss) if issue_except != 'None': prettycomiss = str(prettycomiss) + issue_except self._log("Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ".Issue will be set as : " + str(prettycomiss)) else: self._log("issue detected greater than 100") if '.' in iss: if int(iss_decval) > 0: issueno = str(iss) prettycomiss = str(issueno) if issue_except != 'None': prettycomiss = str(prettycomiss) + issue_except self._log("Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ". Issue will be set as : " + str(prettycomiss)) else: prettycomiss = str(issueno) self._log("issue length error - cannot determine length. Defaulting to None: " + str(prettycomiss)) if annchk == "yes": self._log("Annual detected.") logger.fdebug("Pretty Comic Issue is : " + str(prettycomiss)) issueyear = issuenzb['IssueDate'][:4] self._log("Issue Year: " + str(issueyear)) logger.fdebug("Issue Year : " + str(issueyear)) month = issuenzb['IssueDate'][5:7].replace('-','').strip() month_name = helpers.fullmonth(month) # comicnzb= myDB.action("SELECT * from comics WHERE comicid=?", [comicid]).fetchone() publisher = comicnzb['ComicPublisher'] self._log("Publisher: " + publisher) logger.fdebug("Publisher: " + str(publisher)) #we need to un-unicode this to make sure we can write the filenames properly for spec.chars series = comicnzb['ComicName'].encode('ascii', 'ignore').strip() self._log("Series: " + series) logger.fdebug("Series: " + str(series)) seriesyear = comicnzb['ComicYear'] self._log("Year: " + seriesyear) logger.fdebug("Year: " + str(seriesyear)) comlocation = comicnzb['ComicLocation'] self._log("Comic Location: " + comlocation) logger.fdebug("Comic Location: " + str(comlocation)) comversion = comicnzb['ComicVersion'] self._log("Comic Version: " + str(comversion)) logger.fdebug("Comic Version: " + str(comversion)) if comversion is None: comversion = 'None' #if comversion is None, remove it so it doesn't populate with 'None' if comversion == 'None': chunk_f_f = re.sub('\$VolumeN','',mylar.FILE_FORMAT) chunk_f = re.compile(r'\s+') chunk_file_format = chunk_f.sub(' ', chunk_f_f) self._log("No version # found for series - tag will not be available for renaming.") logger.fdebug("No version # found for series, removing from filename") logger.fdebug("new format is now: " + str(chunk_file_format)) else: chunk_file_format = mylar.FILE_FORMAT if annchk == "no": chunk_f_f = re.sub('\$Annual','',chunk_file_format) chunk_f = re.compile(r'\s+') chunk_file_format = chunk_f.sub(' ', chunk_f_f) logger.fdebug('not an annual - removing from filename paramaters') logger.fdebug('new format: ' + str(chunk_file_format)) else: logger.fdebug('chunk_file_format is: ' + str(chunk_file_format)) if '$Annual' not in chunk_file_format: #if it's an annual, but $Annual isn't specified in file_format, we need to #force it in there, by default in the format of $Annual $Issue prettycomiss = "Annual " + str(prettycomiss) logger.fdebug('prettycomiss: ' + str(prettycomiss)) ofilename = None #if meta-tagging is not enabled, we need to declare the check as being fail #if meta-tagging is enabled, it gets changed just below to a default of pass pcheck = "fail" #tag the meta. if mylar.ENABLE_META: self._log("Metatagging enabled - proceeding...") logger.fdebug("Metatagging enabled - proceeding...") pcheck = "pass" try: import cmtagmylar if ml is None: pcheck = cmtagmylar.run(self.nzb_folder, issueid=issueid) else: pcheck = cmtagmylar.run(self.nzb_folder, issueid=issueid, manual="yes", filename=ml['ComicLocation']) except ImportError: logger.fdebug("comictaggerlib not found on system. Ensure the ENTIRE lib directory is located within mylar/lib/comictaggerlib/") logger.fdebug("continuing with PostProcessing, but I'm not using metadata.") pcheck = "fail" if pcheck == "fail": self._log("Unable to write metadata successfully - check mylar.log file. Attempting to continue without tagging...") logger.fdebug("Unable to write metadata successfully - check mylar.log file. Attempting to continue without tagging...") elif pcheck == "unrar error": self._log("This is a corrupt archive - whether CRC errors or it's incomplete. Marking as BAD, and retrying a different copy.") logger.error("This is a corrupt archive - whether CRC errors or it's incomplete. Marking as BAD, and retrying a different copy.") return self.log else: otofilename = pcheck self._log("Sucessfully wrote metadata to .cbz - Continuing..") logger.fdebug("Sucessfully wrote metadata to .cbz (" + str(otofilename) + ") - Continuing..") #Run Pre-script if mylar.ENABLE_PRE_SCRIPTS: nzbn = self.nzb_name #original nzb name nzbf = self.nzb_folder #original nzb folder #name, comicyear, comicid , issueid, issueyear, issue, publisher #create the dic and send it. seriesmeta = [] seriesmetadata = {} seriesmeta.append({ 'name': series, 'comicyear': seriesyear, 'comicid': comicid, 'issueid': issueid, 'issueyear': issueyear, 'issue': issuenum, 'publisher': publisher }) seriesmetadata['seriesmeta'] = seriesmeta self._run_pre_scripts(nzbn, nzbf, seriesmetadata ) #rename file and move to new path #nfilename = series + " " + issueno + " (" + seriesyear + ")" file_values = {'$Series': series, '$Issue': prettycomiss, '$Year': issueyear, '$series': series.lower(), '$Publisher': publisher, '$publisher': publisher.lower(), '$VolumeY': 'V' + str(seriesyear), '$VolumeN': comversion, '$monthname': month_name, '$month': month, '$Annual': 'Annual' } #if it's a Manual Run, use the ml['ComicLocation'] for the exact filename. if ml is None: for root, dirnames, filenames in os.walk(self.nzb_folder): for filename in filenames: if filename.lower().endswith(extensions): odir = root ofilename = filename path, ext = os.path.splitext(ofilename) if odir is None: logger.fdebug('no root folder set.') odir = self.nzb_folder logger.fdebug('odir: ' + str(odir)) logger.fdebug('ofilename: ' + str(ofilename)) else: if pcheck == "fail": otofilename = ml['ComicLocation'] logger.fdebug('otofilename:' + str(otofilename)) odir, ofilename = os.path.split(otofilename) logger.fdebug('odir: ' + str(odir)) logger.fdebug('ofilename: ' + str(ofilename)) path, ext = os.path.splitext(ofilename) logger.fdebug('path: ' + str(path)) logger.fdebug('ext:' + str(ext)) if ofilename is None: logger.error(u"Aborting PostProcessing - the filename doesn't exist in the location given. Make sure that " + str(self.nzb_folder) + " exists and is the correct location.") return self._log("Original Filename: " + ofilename) self._log("Original Extension: " + ext) logger.fdebug("Original Filname: " + str(ofilename)) logger.fdebug("Original Extension: " + str(ext)) if mylar.FILE_FORMAT == '' or not mylar.RENAME_FILES: self._log("Rename Files isn't enabled...keeping original filename.") logger.fdebug("Rename Files isn't enabled - keeping original filename.") #check if extension is in nzb_name - will screw up otherwise if ofilename.lower().endswith(extensions): nfilename = ofilename[:-4] else: nfilename = ofilename else: nfilename = helpers.replace_all(chunk_file_format, file_values) if mylar.REPLACE_SPACES: #mylar.REPLACE_CHAR ...determines what to replace spaces with underscore or dot nfilename = nfilename.replace(' ', mylar.REPLACE_CHAR) nfilename = re.sub('[\,\:\?]', '', nfilename) nfilename = re.sub('[\/]', '-', nfilename) self._log("New Filename: " + nfilename) logger.fdebug("New Filename: " + str(nfilename)) #src = os.path.join(self.nzb_folder, ofilename) src = os.path.join(odir, ofilename) filechecker.validateAndCreateDirectory(comlocation, True) if mylar.LOWERCASE_FILENAMES: dst = (comlocation + "/" + nfilename + ext).lower() else: dst = comlocation + "/" + nfilename + ext.lower() self._log("Source:" + src) self._log("Destination:" + dst) logger.fdebug("Source: " + str(src)) logger.fdebug("Destination: " + str(dst)) if ml is None: #downtype = for use with updater on history table to set status to 'Downloaded' downtype = 'True' #non-manual run moving/deleting... logger.fdebug('self.nzb_folder: ' + self.nzb_folder) logger.fdebug('odir: ' + str(odir)) logger.fdebug('ofilename:' + str(ofilename)) logger.fdebug('nfilename:' + str(nfilename + ext)) if mylar.RENAME_FILES: if str(ofilename) != str(nfilename + ext): logger.fdebug("Renaming " + os.path.join(odir, str(ofilename)) + " ..to.. " + os.path.join(odir,str(nfilename + ext))) os.rename(os.path.join(odir, str(ofilename)), os.path.join(odir,str(nfilename + ext))) else: logger.fdebug('filename is identical as original, not renaming.') #src = os.path.join(self.nzb_folder, str(nfilename + ext)) src = os.path.join(odir, str(nfilename + ext)) try: shutil.move(src, dst) except (OSError, IOError): self._log("Failed to move directory - check directories and manually re-run.") self._log("Post-Processing ABORTED.") return #tidyup old path try: shutil.rmtree(self.nzb_folder) except (OSError, IOError): self._log("Failed to remove temporary directory - check directory and manually re-run.") self._log("Post-Processing ABORTED.") return self._log("Removed temporary directory : " + str(self.nzb_folder)) else: #downtype = for use with updater on history table to set status to 'Post-Processed' downtype = 'PP' #Manual Run, this is the portion. if mylar.RENAME_FILES: if str(ofilename) != str(nfilename + ext): logger.fdebug("Renaming " + os.path.join(self.nzb_folder, str(ofilename)) + " ..to.. " + os.path.join(self.nzb_folder,str(nfilename + ext))) os.rename(os.path.join(odir, str(ofilename)), os.path.join(odir ,str(nfilename + ext))) else: logger.fdebug('filename is identical as original, not renaming.') src = os.path.join(odir, str(nfilename + ext)) logger.fdebug('odir rename: ' + os.path.join(odir, str(ofilename)) + ' TO ' + os.path.join(odir, str(nfilename + ext))) logger.fdebug('odir src : ' + os.path.join(odir, str(nfilename + ext))) logger.fdebug("Moving " + src + " ... to ... " + dst) try: shutil.move(src, dst) except (OSError, IOError): logger.fdebug("Failed to move directory - check directories and manually re-run.") logger.fdebug("Post-Processing ABORTED.") return logger.fdebug("Successfully moved to : " + dst) #tidyup old path #try: # os.remove(os.path.join(self.nzb_folder, str(ofilename))) # logger.fdebug("Deleting : " + os.path.join(self.nzb_folder, str(ofilename))) #except (OSError, IOError): # logger.fdebug("Failed to remove temporary directory - check directory and manually re-run.") # logger.fdebug("Post-Processing ABORTED.") # return #logger.fdebug("Removed temporary directory : " + str(self.nzb_folder)) #Hopefully set permissions on downloaded file try: permission = int(mylar.CHMOD_FILE, 8) os.umask(0) os.chmod(dst.rstrip(), permission) except OSError: logger.error('Failed to change file permissions. Ensure that the user running Mylar has proper permissions to change permissions in : ' + dst) logger.fdebug('Continuing post-processing but unable to change file permissions in ' + dst) #delete entry from nzblog table myDB.action('DELETE from nzblog WHERE issueid=?', [issueid]) #update snatched table to change status to Downloaded if annchk == "no": updater.foundsearch(comicid, issueid, down=downtype) dispiss = 'issue: ' + str(issuenumOG) else: updater.foundsearch(comicid, issueid, mode='want_ann', down=downtype) dispiss = 'annual issue: ' + str(issuenumOG) #force rescan of files updater.forceRescan(comicid) logger.info(u"Post-Processing completed for: " + series + " " + dispiss ) self._log(u"Post Processing SUCCESSFUL! ") if mylar.WEEKFOLDER: #if enabled, will *copy* the post-processed file to the weeklypull list folder for the given week. weeklypull.weekly_singlecopy(comicid,issuenum,str(nfilename+ext),dst) # retrieve/create the corresponding comic objects if mylar.ENABLE_EXTRA_SCRIPTS: folderp = str(dst) #folder location after move/rename nzbn = self.nzb_name #original nzb name filen = str(nfilename + ext) #new filename #name, comicyear, comicid , issueid, issueyear, issue, publisher #create the dic and send it. seriesmeta = [] seriesmetadata = {} seriesmeta.append({ 'name': series, 'comicyear': seriesyear, 'comicid': comicid, 'issueid': issueid, 'issueyear': issueyear, 'issue': issuenum, 'publisher': publisher }) seriesmetadata['seriesmeta'] = seriesmeta self._run_extra_scripts(nzbn, self.nzb_folder, filen, folderp, seriesmetadata ) if ml is not None: #we only need to return self.log if it's a manual run and it's not a snatched torrent if snatchedtorrent: #manual run + snatched torrent pass else: #manual run + not snatched torrent (or normal manual-run) return self.log if annchk == "no": prline = series + '(' + issueyear + ') - issue #' + issuenumOG else: prline = series + ' Annual (' + issueyear + ') - issue #' + issuenumOG prline2 = 'Mylar has downloaded and post-processed: ' + prline if mylar.PROWL_ENABLED: pushmessage = prline logger.info(u"Prowl request") prowl = notifiers.PROWL() prowl.notify(pushmessage,"Download and Postprocessing completed") if mylar.NMA_ENABLED: nma = notifiers.NMA() nma.notify(prline=prline, prline2=prline2) if mylar.PUSHOVER_ENABLED: logger.info(u"Pushover request") pushover = notifiers.PUSHOVER() pushover.notify(prline, "Download and Post-Processing completed") if mylar.BOXCAR_ENABLED: boxcar = notifiers.BOXCAR() boxcar.notify(prline=prline, prline2=prline2) if mylar.PUSHBULLET_ENABLED: pushbullet = notifiers.PUSHBULLET() pushbullet.notify(prline=prline, prline2=prline2) return self.log
def addComictoDB(comicid,mismatch=None,pullupd=None,imported=None,ogcname=None): # Putting this here to get around the circular import. Will try to use this to update images at later date. # from mylar import cache myDB = db.DBConnection() # We need the current minimal info in the database instantly # so we don't throw a 500 error when we redirect to the artistPage controlValueDict = {"ComicID": comicid} dbcomic = myDB.action('SELECT * FROM comics WHERE ComicID=?', [comicid]).fetchone() if dbcomic is None: newValueDict = {"ComicName": "Comic ID: %s" % (comicid), "Status": "Loading"} comlocation = None oldcomversion = None else: newValueDict = {"Status": "Loading"} comlocation = dbcomic['ComicLocation'] filechecker.validateAndCreateDirectory(comlocation, True) oldcomversion = dbcomic['ComicVersion'] #store the comicversion and chk if it exists before hammering. myDB.upsert("comics", newValueDict, controlValueDict) #run the re-sortorder here in order to properly display the page if pullupd is None: helpers.ComicSort(comicorder=mylar.COMICSORT, imported=comicid) # we need to lookup the info for the requested ComicID in full now comic = cv.getComic(comicid,'comic') #comic = myDB.action('SELECT * FROM comics WHERE ComicID=?', [comicid]).fetchone() if not comic: logger.warn("Error fetching comic. ID for : " + comicid) if dbcomic is None: newValueDict = {"ComicName": "Fetch failed, try refreshing. (%s)" % (comicid), "Status": "Active"} else: newValueDict = {"Status": "Active"} myDB.upsert("comics", newValueDict, controlValueDict) return if comic['ComicName'].startswith('The '): sortname = comic['ComicName'][4:] else: sortname = comic['ComicName'] logger.info(u"Now adding/updating: " + comic['ComicName']) #--Now that we know ComicName, let's try some scraping #--Start # gcd will return issue details (most importantly publishing date) if not mylar.CV_ONLY: if mismatch == "no" or mismatch is None: gcdinfo=parseit.GCDScraper(comic['ComicName'], comic['ComicYear'], comic['ComicIssues'], comicid) #print ("gcdinfo: " + str(gcdinfo)) mismatch_com = "no" if gcdinfo == "No Match": updater.no_searchresults(comicid) nomatch = "true" logger.info(u"There was an error when trying to add " + comic['ComicName'] + " (" + comic['ComicYear'] + ")" ) return nomatch else: mismatch_com = "yes" #print ("gcdinfo:" + str(gcdinfo)) elif mismatch == "yes": CV_EXcomicid = myDB.action("SELECT * from exceptions WHERE ComicID=?", [comicid]).fetchone() if CV_EXcomicid['variloop'] is None: pass else: vari_loop = CV_EXcomicid['variloop'] NewComicID = CV_EXcomicid['NewComicID'] gcomicid = CV_EXcomicid['GComicID'] resultURL = "/series/" + str(NewComicID) + "/" #print ("variloop" + str(CV_EXcomicid['variloop'])) #if vari_loop == '99': gcdinfo = parseit.GCDdetails(comseries=None, resultURL=resultURL, vari_loop=0, ComicID=comicid, TotalIssues=0, issvariation="no", resultPublished=None) logger.info(u"Sucessfully retrieved details for " + comic['ComicName'] ) # print ("Series Published" + parseit.resultPublished) CV_NoYearGiven = "no" #if the SeriesYear returned by CV is blank or none (0000), let's use the gcd one. if comic['ComicYear'] is None or comic['ComicYear'] == '0000': if mylar.CV_ONLY: #we'll defer this until later when we grab all the issues and then figure it out logger.info("Uh-oh. I can't find a Series Year for this series. I'm going to try analyzing deeper.") SeriesYear = cv.getComic(comicid,'firstissue',comic['FirstIssueID']) if SeriesYear == '0000': logger.info("Ok - I couldn't find a Series Year at all. Loading in the issue data now and will figure out the Series Year.") CV_NoYearGiven = "yes" issued = cv.getComic(comicid,'issue') SeriesYear = issued['firstdate'][:4] else: SeriesYear = gcdinfo['SeriesYear'] else: SeriesYear = comic['ComicYear'] #let's do the Annual check here. if mylar.ANNUALS_ON: annualcomicname = re.sub('[\,\:]', '', comic['ComicName']) annuals = comicbookdb.cbdb(annualcomicname, SeriesYear) print ("Number of Annuals returned: " + str(annuals['totalissues'])) nb = 0 while (nb <= int(annuals['totalissues'])): try: annualval = annuals['annualslist'][nb] except IndexError: break newCtrl = {"IssueID": str(annualval['AnnualIssue'] + annualval['AnnualDate'])} newVals = {"Issue_Number": annualval['AnnualIssue'], "IssueDate": annualval['AnnualDate'], "IssueName": annualval['AnnualTitle'], "ComicID": comicid, "Status": "Skipped"} myDB.upsert("annuals", newVals, newCtrl) nb+=1 #parseit.annualCheck(gcomicid=gcdinfo['GCDComicID'], comicid=comicid, comicname=comic['ComicName'], comicyear=SeriesYear) #comic book location on machine # setup default location here if comlocation is None: # let's remove the non-standard characters here. u_comicnm = comic['ComicName'] u_comicname = u_comicnm.encode('ascii', 'ignore').strip() if ':' in u_comicname or '/' in u_comicname or ',' in u_comicname or '?' in u_comicname: comicdir = u_comicname if ':' in comicdir: comicdir = comicdir.replace(':','') if '/' in comicdir: comicdir = comicdir.replace('/','-') if ',' in comicdir: comicdir = comicdir.replace(',','') if '?' in comicdir: comicdir = comicdir.replace('?','') else: comicdir = u_comicname series = comicdir publisher = re.sub('!','',comic['ComicPublisher']) # thanks Boom! year = SeriesYear comversion = comic['ComicVersion'] if comversion is None: comversion = 'None' #if comversion is None, remove it so it doesn't populate with 'None' if comversion == 'None': chunk_f_f = re.sub('\$VolumeN','',mylar.FILE_FORMAT) chunk_f = re.compile(r'\s+') mylar.FILE_FORMAT = chunk_f.sub(' ', chunk_f_f) #do work to generate folder path values = {'$Series': series, '$Publisher': publisher, '$Year': year, '$series': series.lower(), '$publisher': publisher.lower(), '$VolumeY': 'V' + str(year), '$VolumeN': comversion } #print mylar.FOLDER_FORMAT #print 'working dir:' #print helpers.replace_all(mylar.FOLDER_FORMAT, values) if mylar.FOLDER_FORMAT == '': comlocation = mylar.DESTINATION_DIR + "/" + comicdir + " (" + SeriesYear + ")" else: comlocation = mylar.DESTINATION_DIR + "/" + helpers.replace_all(mylar.FOLDER_FORMAT, values) #comlocation = mylar.DESTINATION_DIR + "/" + comicdir + " (" + comic['ComicYear'] + ")" if mylar.DESTINATION_DIR == "": logger.error(u"There is no general directory specified - please specify in Config/Post-Processing.") return if mylar.REPLACE_SPACES: #mylar.REPLACE_CHAR ...determines what to replace spaces with underscore or dot comlocation = comlocation.replace(' ', mylar.REPLACE_CHAR) #moved this out of the above loop so it will chk for existance of comlocation in case moved #if it doesn't exist - create it (otherwise will bugger up later on) if os.path.isdir(str(comlocation)): logger.info(u"Directory (" + str(comlocation) + ") already exists! Continuing...") else: #print ("Directory doesn't exist!") #try: # os.makedirs(str(comlocation)) # logger.info(u"Directory successfully created at: " + str(comlocation)) #except OSError: # logger.error(u"Could not create comicdir : " + str(comlocation)) filechecker.validateAndCreateDirectory(comlocation, True) #try to account for CV not updating new issues as fast as GCD #seems CV doesn't update total counts #comicIssues = gcdinfo['totalissues'] comicIssues = comic['ComicIssues'] if not mylar.CV_ONLY: if gcdinfo['gcdvariation'] == "cv": comicIssues = str(int(comic['ComicIssues']) + 1) #let's download the image... if os.path.exists(mylar.CACHE_DIR):pass else: #let's make the dir. try: os.makedirs(str(mylar.CACHE_DIR)) logger.info(u"Cache Directory successfully created at: " + str(mylar.CACHE_DIR)) except OSError: logger.error('Could not create cache dir. Check permissions of cache dir: ' + str(mylar.CACHE_DIR)) coverfile = os.path.join(mylar.CACHE_DIR, str(comicid) + ".jpg") #try: urllib.urlretrieve(str(comic['ComicImage']), str(coverfile)) try: with open(str(coverfile)) as f: ComicImage = os.path.join('cache',str(comicid) + ".jpg") #this is for Firefox when outside the LAN...it works, but I don't know how to implement it #without breaking the normal flow for inside the LAN (above) #ComicImage = "http://" + str(mylar.HTTP_HOST) + ":" + str(mylar.HTTP_PORT) + "/cache/" + str(comicid) + ".jpg" logger.info(u"Sucessfully retrieved cover for " + comic['ComicName']) #if the comic cover local is checked, save a cover.jpg to the series folder. if mylar.COMIC_COVER_LOCAL: comiclocal = os.path.join(str(comlocation) + "/cover.jpg") shutil.copy(ComicImage,comiclocal) except IOError as e: logger.error(u"Unable to save cover locally at this time.") if oldcomversion is None: if comic['ComicVersion'].isdigit(): comicVol = "v" + comic['ComicVersion'] else: comicVol = None else: comicVol = oldcomversion #for description ... #Cdesc = helpers.cleanhtml(comic['ComicDescription']) #cdes_find = Cdesc.find("Collected") #cdes_removed = Cdesc[:cdes_find] #print cdes_removed controlValueDict = {"ComicID": comicid} newValueDict = {"ComicName": comic['ComicName'], "ComicSortName": sortname, "ComicYear": SeriesYear, "ComicImage": ComicImage, "Total": comicIssues, "ComicVersion": comicVol, "ComicLocation": comlocation, "ComicPublisher": comic['ComicPublisher'], #"Description": Cdesc.decode('utf-8', 'replace'), "DetailURL": comic['ComicURL'], # "ComicPublished": gcdinfo['resultPublished'], "ComicPublished": 'Unknown', "DateAdded": helpers.today(), "Status": "Loading"} myDB.upsert("comics", newValueDict, controlValueDict) #comicsort here... #run the re-sortorder here in order to properly display the page if pullupd is None: helpers.ComicSort(sequence='update') if CV_NoYearGiven == 'no': #if set to 'no' then we haven't pulled down the issues, otherwise we did it already issued = cv.getComic(comicid,'issue') logger.info(u"Sucessfully retrieved issue details for " + comic['ComicName'] ) n = 0 iscnt = int(comicIssues) issid = [] issnum = [] issname = [] issdate = [] int_issnum = [] #let's start issue #'s at 0 -- thanks to DC for the new 52 reboot! :) latestiss = "0" latestdate = "0000-00-00" firstiss = "10000000" firstdate = "2099-00-00" #print ("total issues:" + str(iscnt)) #---removed NEW code here--- logger.info(u"Now adding/updating issues for " + comic['ComicName']) if not mylar.CV_ONLY: #fccnt = int(fc['comiccount']) #logger.info(u"Found " + str(fccnt) + "/" + str(iscnt) + " issues of " + comic['ComicName'] + "...verifying") #fcnew = [] if iscnt > 0: #if a series is brand new, it wont have any issues/details yet so skip this part while (n <= iscnt): #---NEW.code try: firstval = issued['issuechoice'][n] except IndexError: break cleanname = helpers.cleanName(firstval['Issue_Name']) issid = str(firstval['Issue_ID']) issnum = str(firstval['Issue_Number']) #print ("issnum: " + str(issnum)) issname = cleanname if '.' in str(issnum): issn_st = str(issnum).find('.') issn_b4dec = str(issnum)[:issn_st] #if the length of decimal is only 1 digit, assume it's a tenth dec_is = str(issnum)[issn_st + 1:] if len(dec_is) == 1: dec_nisval = int(dec_is) * 10 iss_naftdec = str(dec_nisval) if len(dec_is) == 2: dec_nisval = int(dec_is) iss_naftdec = str(dec_nisval) iss_issue = issn_b4dec + "." + iss_naftdec issis = (int(issn_b4dec) * 1000) + dec_nisval elif 'au' in issnum.lower(): print ("au detected") stau = issnum.lower().find('au') issnum_au = issnum[:stau] print ("issnum_au: " + str(issnum_au)) #account for Age of Ultron mucked up numbering issis = str(int(issnum_au) * 1000) + 'AU' else: issis = int(issnum) * 1000 bb = 0 while (bb <= iscnt): try: gcdval = gcdinfo['gcdchoice'][bb] #print ("gcdval: " + str(gcdval)) except IndexError: #account for gcd variation here if gcdinfo['gcdvariation'] == 'gcd': #logger.fdebug("gcd-variation accounted for.") issdate = '0000-00-00' int_issnum = int ( issis / 1000 ) break if 'nn' in str(gcdval['GCDIssue']): #no number detected - GN, TP or the like logger.warn(u"Non Series detected (Graphic Novel, etc) - cannot proceed at this time.") updater.no_searchresults(comicid) return elif 'au' in gcdval['GCDIssue'].lower(): #account for Age of Ultron mucked up numbering - this is in format of 5AU.00 gstau = gcdval['GCDIssue'].lower().find('au') gcdis_au = gcdval['GCDIssue'][:gstau] gcdis = str(int(gcdis_au) * 1000) + 'AU' elif '.' in str(gcdval['GCDIssue']): #logger.fdebug("g-issue:" + str(gcdval['GCDIssue'])) issst = str(gcdval['GCDIssue']).find('.') #logger.fdebug("issst:" + str(issst)) issb4dec = str(gcdval['GCDIssue'])[:issst] #logger.fdebug("issb4dec:" + str(issb4dec)) #if the length of decimal is only 1 digit, assume it's a tenth decis = str(gcdval['GCDIssue'])[issst+1:] #logger.fdebug("decis:" + str(decis)) if len(decis) == 1: decisval = int(decis) * 10 issaftdec = str(decisval) if len(decis) == 2: decisval = int(decis) issaftdec = str(decisval) gcd_issue = issb4dec + "." + issaftdec #logger.fdebug("gcd_issue:" + str(gcd_issue)) try: gcdis = (int(issb4dec) * 1000) + decisval except ValueError: logger.error("This has no issue #'s for me to get - Either a Graphic Novel or one-shot. This feature to allow these will be added in the near future.") updater.no_searchresults(comicid) return else: gcdis = int(str(gcdval['GCDIssue'])) * 1000 if gcdis == issis: issdate = str(gcdval['GCDDate']) if str(issis).isdigit(): int_issnum = int( gcdis / 1000 ) else: if 'au' in issis.lower(): int_issnum = str(int(gcdis[:-2]) / 1000) + 'AU' else: logger.error("this has an alpha-numeric in the issue # which I cannot account for. Get on github and log the issue for evilhero.") return #get the latest issue / date using the date. if gcdval['GCDDate'] > latestdate: latestiss = str(issnum) latestdate = str(gcdval['GCDDate']) break #bb = iscnt bb+=1 #print("(" + str(n) + ") IssueID: " + str(issid) + " IssueNo: " + str(issnum) + " Date" + str(issdate)) #---END.NEW. # check if the issue already exists iss_exists = myDB.action('SELECT * from issues WHERE IssueID=?', [issid]).fetchone() # Only change the status & add DateAdded if the issue is already in the database if iss_exists is None: newValueDict['DateAdded'] = helpers.today() controlValueDict = {"IssueID": issid} newValueDict = {"ComicID": comicid, "ComicName": comic['ComicName'], "IssueName": issname, "Issue_Number": issnum, "IssueDate": issdate, "Int_IssueNumber": int_issnum } if mylar.AUTOWANT_ALL: newValueDict['Status'] = "Wanted" elif issdate > helpers.today() and mylar.AUTOWANT_UPCOMING: newValueDict['Status'] = "Wanted" else: newValueDict['Status'] = "Skipped" if iss_exists: #print ("Existing status : " + str(iss_exists['Status'])) newValueDict['Status'] = iss_exists['Status'] try: myDB.upsert("issues", newValueDict, controlValueDict) except sqlite3.InterfaceError, e: #raise sqlite3.InterfaceError(e) logger.error("MAJOR error trying to get issue data, this is most likey a MULTI-VOLUME series and you need to use the custom_exceptions.csv file.") myDB.action("DELETE FROM comics WHERE ComicID=?", [comicid]) return n+=1
else: mylar.SAFESTART = False if args.noweekly: mylar.NOWEEKLY = True else: mylar.NOWEEKLY = False # Try to create the DATA_DIR if it doesn't exist #if not os.path.exists(mylar.DATA_DIR): # try: # os.makedirs(mylar.DATA_DIR) # except OSError: # raise SystemExit('Could not create data directory: ' + mylar.DATA_DIR + '. Exiting....') filechecker.validateAndCreateDirectory(mylar.DATA_DIR, True) # Make sure the DATA_DIR is writeable if not os.access(mylar.DATA_DIR, os.W_OK): raise SystemExit('Cannot write to the data directory: ' + mylar.DATA_DIR + '. Exiting...') # Put the database in the DATA_DIR mylar.DB_FILE = os.path.join(mylar.DATA_DIR, 'mylar.db') # backup the db and configs before they load. if args.backup: print '[AUTO-BACKUP] Backing up .db and config.ini files for safety.' backupdir = os.path.join(mylar.DATA_DIR, 'backup') try: os.makedirs(backupdir)
if filename.lower().endswith(extensions): ofilename = filename path, ext = os.path.splitext(ofilename) if 'S' in sandwich: if mylar.STORYARCDIR: grdst = storyarcd else: grdst = mylar.DESTINATION_DIR else: if mylar.GRABBAG_DIR: grdst = mylar.GRABBAG_DIR else: grdst = mylar.DESTINATION_DIR filechecker.validateAndCreateDirectory(grdst, True) if 'S' in sandwich: #if from a StoryArc, check to see if we're appending the ReadingOrder to the filename if mylar.READ2FILENAME: issuearcid = re.sub('S', '', issueid) logger.fdebug('issuearcid:' + str(issuearcid)) arcdata = myDB.selectone("SELECT * FROM readinglist WHERE IssueArcID=?",[issuearcid]).fetchone() logger.fdebug('readingorder#: ' + str(arcdata['ReadingOrder'])) if int(arcdata['ReadingOrder']) < 10: readord = "00" + str(arcdata['ReadingOrder']) elif int(arcdata['ReadingOrder']) > 10 and int(arcdata['ReadingOrder']) < 99: readord = "0" + str(arcdata['ReadingOrder']) else: readord = str(arcdata['ReadingOrder']) dfilename = str(readord) + "-" + ofilename else: dfilename = ofilename grab_dst = os.path.join(grdst, dfilename)
def Process_next(self, comicid, issueid, issuenumOG, ml=None): annchk = "no" extensions = ('.cbr', '.cbz') myDB = db.DBConnection() comicnzb = myDB.action("SELECT * from comics WHERE comicid=?", [comicid]).fetchone() issuenzb = myDB.action( "SELECT * from issues WHERE issueid=? AND comicid=? AND ComicName NOT NULL", [issueid, comicid]).fetchone() logger.fdebug('issueid: ' + str(issueid)) logger.fdebug('issuenumOG: ' + str(issuenumOG)) if issuenzb is None: issuenzb = myDB.action( "SELECT * from annuals WHERE issueid=? and comicid=?", [issueid, comicid]).fetchone() annchk = "yes" #issueno = str(issuenum).split('.')[0] #new CV API - removed all decimals...here we go AGAIN! issuenum = issuenzb['Issue_Number'] issue_except = 'None' if 'au' in issuenum.lower() and issuenum[:1].isdigit(): issuenum = re.sub("[^0-9]", "", issuenum) issue_except = ' AU' elif 'ai' in issuenum.lower() and issuenum[:1].isdigit(): issuenum = re.sub("[^0-9]", "", issuenum) issue_except = ' AI' elif 'inh' in issuenum.lower() and issuenum[:1].isdigit(): issuenum = re.sub("[^0-9]", "", issuenum) issue_except = '.INH' elif 'now' in issuenum.lower() and issuenum[:1].isdigit(): if '!' in issuenum: issuenum = re.sub('\!', '', issuenum) issuenum = re.sub("[^0-9]", "", issuenum) issue_except = '.NOW' if '.' in issuenum: iss_find = issuenum.find('.') iss_b4dec = issuenum[:iss_find] iss_decval = issuenum[iss_find + 1:] if int(iss_decval) == 0: iss = iss_b4dec issdec = int(iss_decval) issueno = str(iss) self._log("Issue Number: " + str(issueno), logger.DEBUG) logger.fdebug("Issue Number: " + str(issueno)) else: if len(iss_decval) == 1: iss = iss_b4dec + "." + iss_decval issdec = int(iss_decval) * 10 else: iss = iss_b4dec + "." + iss_decval.rstrip('0') issdec = int(iss_decval.rstrip('0')) * 10 issueno = iss_b4dec self._log("Issue Number: " + str(iss), logger.DEBUG) logger.fdebug("Issue Number: " + str(iss)) else: iss = issuenum issueno = str(iss) # issue zero-suppression here if mylar.ZERO_LEVEL == "0": zeroadd = "" else: if mylar.ZERO_LEVEL_N == "none": zeroadd = "" elif mylar.ZERO_LEVEL_N == "0x": zeroadd = "0" elif mylar.ZERO_LEVEL_N == "00x": zeroadd = "00" logger.fdebug("Zero Suppression set to : " + str(mylar.ZERO_LEVEL_N)) if str(len(issueno)) > 1: if int(issueno) < 10: self._log("issue detected less than 10", logger.DEBUG) if '.' in iss: if int(iss_decval) > 0: issueno = str(iss) prettycomiss = str(zeroadd) + str(iss) else: prettycomiss = str(zeroadd) + str(int(issueno)) else: prettycomiss = str(zeroadd) + str(iss) if issue_except != 'None': prettycomiss = str(prettycomiss) + issue_except self._log( "Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ". Issue will be set as : " + str(prettycomiss), logger.DEBUG) elif int(issueno) >= 10 and int(issueno) < 100: self._log("issue detected greater than 10, but less than 100", logger.DEBUG) if mylar.ZERO_LEVEL_N == "none": zeroadd = "" else: zeroadd = "0" if '.' in iss: if int(iss_decval) > 0: issueno = str(iss) prettycomiss = str(zeroadd) + str(iss) else: prettycomiss = str(zeroadd) + str(int(issueno)) else: prettycomiss = str(zeroadd) + str(iss) if issue_except != 'None': prettycomiss = str(prettycomiss) + issue_except self._log( "Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ".Issue will be set as : " + str(prettycomiss), logger.DEBUG) else: self._log("issue detected greater than 100", logger.DEBUG) if '.' in iss: if int(iss_decval) > 0: issueno = str(iss) prettycomiss = str(issueno) if issue_except != 'None': prettycomiss = str(prettycomiss) + issue_except self._log( "Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ". Issue will be set as : " + str(prettycomiss), logger.DEBUG) else: prettycomiss = str(issueno) self._log( "issue length error - cannot determine length. Defaulting to None: " + str(prettycomiss), logger.DEBUG) if annchk == "yes": self._log("Annual detected.") logger.fdebug("Pretty Comic Issue is : " + str(prettycomiss)) issueyear = issuenzb['IssueDate'][:4] self._log("Issue Year: " + str(issueyear), logger.DEBUG) logger.fdebug("Issue Year : " + str(issueyear)) month = issuenzb['IssueDate'][5:7].replace('-', '').strip() month_name = helpers.fullmonth(month) # comicnzb= myDB.action("SELECT * from comics WHERE comicid=?", [comicid]).fetchone() publisher = comicnzb['ComicPublisher'] self._log("Publisher: " + publisher, logger.DEBUG) logger.fdebug("Publisher: " + str(publisher)) #we need to un-unicode this to make sure we can write the filenames properly for spec.chars series = comicnzb['ComicName'].encode('ascii', 'ignore').strip() self._log("Series: " + series, logger.DEBUG) logger.fdebug("Series: " + str(series)) seriesyear = comicnzb['ComicYear'] self._log("Year: " + seriesyear, logger.DEBUG) logger.fdebug("Year: " + str(seriesyear)) comlocation = comicnzb['ComicLocation'] self._log("Comic Location: " + comlocation, logger.DEBUG) logger.fdebug("Comic Location: " + str(comlocation)) comversion = comicnzb['ComicVersion'] self._log("Comic Version: " + str(comversion), logger.DEBUG) logger.fdebug("Comic Version: " + str(comversion)) if comversion is None: comversion = 'None' #if comversion is None, remove it so it doesn't populate with 'None' if comversion == 'None': chunk_f_f = re.sub('\$VolumeN', '', mylar.FILE_FORMAT) chunk_f = re.compile(r'\s+') chunk_file_format = chunk_f.sub(' ', chunk_f_f) self._log( "No version # found for series - tag will not be available for renaming.", logger.DEBUG) logger.fdebug( "No version # found for series, removing from filename") logger.fdebug("new format is now: " + str(chunk_file_format)) else: chunk_file_format = mylar.FILE_FORMAT if annchk == "no": chunk_f_f = re.sub('\$Annual', '', chunk_file_format) chunk_f = re.compile(r'\s+') chunk_file_format = chunk_f.sub(' ', chunk_f_f) logger.fdebug('not an annual - removing from filename paramaters') logger.fdebug('new format: ' + str(chunk_file_format)) else: logger.fdebug('chunk_file_format is: ' + str(chunk_file_format)) if '$Annual' not in chunk_file_format: #if it's an annual, but $Annual isn't specified in file_format, we need to #force it in there, by default in the format of $Annual $Issue prettycomiss = "Annual " + str(prettycomiss) logger.fdebug('prettycomiss: ' + str(prettycomiss)) ofilename = None #if meta-tagging is not enabled, we need to declare the check as being fail #if meta-tagging is enabled, it gets changed just below to a default of pass pcheck = "fail" #tag the meta. if mylar.ENABLE_META: self._log("Metatagging enabled - proceeding...") logger.fdebug("Metatagging enabled - proceeding...") pcheck = "pass" try: import cmtagmylar if ml is None: pcheck = cmtagmylar.run(self.nzb_folder, issueid=issueid) else: pcheck = cmtagmylar.run(self.nzb_folder, issueid=issueid, manual="yes", filename=ml['ComicLocation']) except ImportError: logger.fdebug( "comictaggerlib not found on system. Ensure the ENTIRE lib directory is located within mylar/lib/comictaggerlib/" ) logger.fdebug( "continuing with PostProcessing, but I'm not using metadata." ) pcheck = "fail" if pcheck == "fail": self._log( "Unable to write metadata successfully - check mylar.log file. Attempting to continue without tagging..." ) logger.fdebug( "Unable to write metadata successfully - check mylar.log file. Attempting to continue without tagging..." ) elif pcheck == "unrar error": self._log( "This is a corrupt archive - whether CRC errors or it's incomplete. Marking as BAD, and retrying a different copy." ) logger.error( "This is a corrupt archive - whether CRC errors or it's incomplete. Marking as BAD, and retrying a different copy." ) return self.log else: otofilename = pcheck self._log("Sucessfully wrote metadata to .cbz - Continuing..") logger.fdebug("Sucessfully wrote metadata to .cbz (" + str(otofilename) + ") - Continuing..") #Run Pre-script if mylar.ENABLE_PRE_SCRIPTS: nzbn = self.nzb_name #original nzb name nzbf = self.nzb_folder #original nzb folder #name, comicyear, comicid , issueid, issueyear, issue, publisher #create the dic and send it. seriesmeta = [] seriesmetadata = {} seriesmeta.append({ 'name': series, 'comicyear': seriesyear, 'comicid': comicid, 'issueid': issueid, 'issueyear': issueyear, 'issue': issuenum, 'publisher': publisher }) seriesmetadata['seriesmeta'] = seriesmeta self._run_pre_scripts(nzbn, nzbf, seriesmetadata) #rename file and move to new path #nfilename = series + " " + issueno + " (" + seriesyear + ")" file_values = { '$Series': series, '$Issue': prettycomiss, '$Year': issueyear, '$series': series.lower(), '$Publisher': publisher, '$publisher': publisher.lower(), '$VolumeY': 'V' + str(seriesyear), '$VolumeN': comversion, '$monthname': month_name, '$month': month, '$Annual': 'Annual' } #if it's a Manual Run, use the ml['ComicLocation'] for the exact filename. if ml is None: for root, dirnames, filenames in os.walk(self.nzb_folder): for filename in filenames: if filename.lower().endswith(extensions): ofilename = filename path, ext = os.path.splitext(ofilename) else: if pcheck == "fail": otofilename = ml['ComicLocation'] logger.fdebug('otofilename:' + str(otofilename)) odir, ofilename = os.path.split(otofilename) logger.fdebug('ofilename: ' + str(ofilename)) path, ext = os.path.splitext(ofilename) logger.fdebug('path: ' + str(path)) logger.fdebug('ext:' + str(ext)) if ofilename is None: logger.error( u"Aborting PostProcessing - the filename doesn't exist in the location given. Make sure that " + str(self.nzb_folder) + " exists and is the correct location.") return self._log("Original Filename: " + ofilename, logger.DEBUG) self._log("Original Extension: " + ext, logger.DEBUG) logger.fdebug("Original Filname: " + str(ofilename)) logger.fdebug("Original Extension: " + str(ext)) if mylar.FILE_FORMAT == '' or not mylar.RENAME_FILES: self._log( "Rename Files isn't enabled...keeping original filename.", logger.DEBUG) logger.fdebug( "Rename Files isn't enabled - keeping original filename.") #check if extension is in nzb_name - will screw up otherwise if ofilename.lower().endswith(extensions): nfilename = ofilename[:-4] else: nfilename = ofilename else: nfilename = helpers.replace_all(chunk_file_format, file_values) if mylar.REPLACE_SPACES: #mylar.REPLACE_CHAR ...determines what to replace spaces with underscore or dot nfilename = nfilename.replace(' ', mylar.REPLACE_CHAR) nfilename = re.sub('[\,\:\?]', '', nfilename) nfilename = re.sub('[\/]', '-', nfilename) self._log("New Filename: " + nfilename, logger.DEBUG) logger.fdebug("New Filename: " + str(nfilename)) src = os.path.join(self.nzb_folder, ofilename) filechecker.validateAndCreateDirectory(comlocation, True) if mylar.LOWERCASE_FILENAMES: dst = (comlocation + "/" + nfilename + ext).lower() else: dst = comlocation + "/" + nfilename + ext.lower() self._log("Source:" + src, logger.DEBUG) self._log("Destination:" + dst, logger.DEBUG) logger.fdebug("Source: " + str(src)) logger.fdebug("Destination: " + str(dst)) if ml is None: #non-manual run moving/deleting... logger.fdebug('self.nzb_folder: ' + self.nzb_folder) logger.fdebug('ofilename:' + str(ofilename)) logger.fdebug('nfilename:' + str(nfilename + ext)) os.rename(os.path.join(self.nzb_folder, str(ofilename)), os.path.join(self.nzb_folder, str(nfilename + ext))) src = os.path.join(self.nzb_folder, str(nfilename + ext)) try: shutil.move(src, dst) except (OSError, IOError): self._log( "Failed to move directory - check directories and manually re-run.", logger.DEBUG) self._log("Post-Processing ABORTED.", logger.DEBUG) return #tidyup old path try: shutil.rmtree(self.nzb_folder) except (OSError, IOError): self._log( "Failed to remove temporary directory - check directory and manually re-run.", logger.DEBUG) self._log("Post-Processing ABORTED.", logger.DEBUG) return self._log("Removed temporary directory : " + str(self.nzb_folder), logger.DEBUG) else: #Manual Run, this is the portion. logger.fdebug("Renaming " + os.path.join(self.nzb_folder, str(ofilename)) + " ..to.. " + os.path.join(self.nzb_folder, str(nfilename + ext))) os.rename(os.path.join(self.nzb_folder, str(ofilename)), os.path.join(self.nzb_folder, str(nfilename + ext))) src = os.path.join(self.nzb_folder, str(nfilename + ext)) logger.fdebug("Moving " + src + " ... to ... " + dst) try: shutil.move(src, dst) except (OSError, IOError): logger.fdebug( "Failed to move directory - check directories and manually re-run." ) logger.fdebug("Post-Processing ABORTED.") return logger.fdebug("Successfully moved to : " + dst) #tidyup old path #try: # os.remove(os.path.join(self.nzb_folder, str(ofilename))) # logger.fdebug("Deleting : " + os.path.join(self.nzb_folder, str(ofilename))) #except (OSError, IOError): # logger.fdebug("Failed to remove temporary directory - check directory and manually re-run.") # logger.fdebug("Post-Processing ABORTED.") # return #logger.fdebug("Removed temporary directory : " + str(self.nzb_folder)) #delete entry from nzblog table myDB.action('DELETE from nzblog WHERE issueid=?', [issueid]) #update snatched table to change status to Downloaded if annchk == "no": updater.foundsearch(comicid, issueid, down='True') dispiss = 'issue: ' + str(issuenumOG) else: updater.foundsearch(comicid, issueid, mode='want_ann', down='True') dispiss = 'annual issue: ' + str(issuenumOG) #force rescan of files updater.forceRescan(comicid) logger.info(u"Post-Processing completed for: " + series + " " + dispiss) self._log(u"Post Processing SUCCESSFULL! ", logger.DEBUG) # retrieve/create the corresponding comic objects if mylar.ENABLE_EXTRA_SCRIPTS: folderp = str(dst) #folder location after move/rename nzbn = self.nzb_name #original nzb name filen = str(nfilename + ext) #new filename #name, comicyear, comicid , issueid, issueyear, issue, publisher #create the dic and send it. seriesmeta = [] seriesmetadata = {} seriesmeta.append({ 'name': series, 'comicyear': seriesyear, 'comicid': comicid, 'issueid': issueid, 'issueyear': issueyear, 'issue': issuenum, 'publisher': publisher }) seriesmetadata['seriesmeta'] = seriesmeta self._run_extra_scripts(nzbn, self.nzb_folder, filen, folderp, seriesmetadata) if ml is not None: return self.log else: if mylar.PROWL_ENABLED: pushmessage = series + '(' + issueyear + ') - issue #' + issuenumOG logger.info(u"Prowl request") prowl = notifiers.PROWL() prowl.notify(pushmessage, "Download and Postprocessing completed") if mylar.NMA_ENABLED: nma = notifiers.NMA() nma.notify(series, str(issueyear), str(issuenumOG)) if mylar.PUSHOVER_ENABLED: pushmessage = series + ' (' + str( issueyear) + ') - issue #' + str(issuenumOG) logger.info(u"Pushover request") pushover = notifiers.PUSHOVER() pushover.notify(pushmessage, "Download and Post-Processing completed") if mylar.BOXCAR_ENABLED: boxcar = notifiers.BOXCAR() boxcar.notify(series, str(issueyear), str(issuenumOG)) return self.log
def update_metadata(self): for cid in self.comiclist: if self.refreshSeries is True: updater.dbupdate(cid, calledfrom='json_api') myDB = db.DBConnection() comic = myDB.selectone('SELECT * FROM comics WHERE ComicID=?', [cid]).fetchone() if comic: description_load = None if not os.path.exists( comic['ComicLocation'] ) and mylar.CONFIG.CREATE_FOLDERS is True: try: checkdirectory = filechecker.validateAndCreateDirectory( comic['ComicLocation'], True) except Exception as e: logger.warn( '[%s] Unable to create series directory @ %s. Aborting updating of series.json' % (e, comic['ComicLocation'])) continue else: if checkdirectory is False: logger.warn( 'Unable to create series directory @ %s. Aborting updating of series.json' % (comic['ComicLocation'])) continue if os.path.exists( os.path.join(comic['ComicLocation'], 'series.json')): try: with open( os.path.join(comic['ComicLocation'], 'series.json')) as j_file: metainfo = json.load(j_file) logger.fdebug('metainfo_loaded: %s' % (metainfo, )) try: # series.json version 1.0.1 description_load = metainfo['metadata'][ 'description_text'] except Exception as e: try: # series.json version 1.0 description_load = metainfo['metadata'][0][ 'description_text'] except Exception as e: description_load = metainfo['metadata'][0][ 'description'] except Exception as e: try: description_load = metainfo['metadata'][ 'description_formatted'] except Exception as e: try: description_load = metainfo['metadata'][0][ 'description_formatted'] except Exception as e: logger.info( 'No description found in metadata. Reloading from dB if available.[error: %s]' % e) c_date = datetime.date(int(comic['LatestDate'][:4]), int(comic['LatestDate'][5:7]), 1) n_date = datetime.date.today() recentchk = (n_date - c_date).days if comic['NewPublish'] is True: seriesStatus = 'Continuing' else: #do this just incase and as an extra measure of accuracy hopefully. if recentchk < 55: seriesStatus = 'Continuing' else: seriesStatus = 'Ended' clean_issue_list = None if comic['Collects'] != 'None': clean_issue_list = comic['Collects'] if mylar.CONFIG.SERIESJSON_FILE_PRIORITY is True: if description_load is not None: cdes_removed = re.sub(r'\n', '', description_load).strip() cdes_formatted = description_load elif comic['DescriptionEdit'] is not None: cdes_removed = re.sub( r'\n', ' ', comic['DescriptionEdit']).strip() cdes_formatted = comic['DescriptionEdit'] else: if comic['Description'] is not None: cdes_removed = re.sub( r'\n', '', comic['Description']).strip() else: cdes_removed = comic['Description'] logger.warn( 'Series does not have a description. Not populating, but you might need to do a Refresh Series to fix this' ) cdes_formatted = comic['Description'] else: if comic['DescriptionEdit'] is not None: cdes_removed = re.sub( r'\n', ' ', comic['DescriptionEdit']).strip() cdes_formatted = comic['DescriptionEdit'] elif description_load is not None: cdes_removed = re.sub(r'\n', '', description_load).strip() cdes_formatted = description_load else: if comic['Description'] is not None: cdes_removed = re.sub( r'\n', '', comic['Description']).strip() else: cdes_removed = comic['Description'] logger.warn( 'Series does not have a description. Not populating, but you might need to do a Refresh Series to fix this' ) cdes_formatted = comic['Description'] comicVol = comic['ComicVersion'] if all( [mylar.CONFIG.SETDEFAULTVOLUME is True, comicVol is None]): comicVol = 1 elif comicVol is not None: if comicVol.isdigit(): comicVol = int(comicVol) logger.info('Updated version to :' + str(comicVol)) if all([ mylar.CONFIG.SETDEFAULTVOLUME is False, comicVol == 'v1' ]): comicVol = None else: comicVol = int(re.sub('[^0-9]', '', comicVol).strip()) if any([ comic['ComicYear'] is None, comic['ComicYear'] == '0000', comic['ComicYear'][-1:] == '-' ]): SeriesYear = int(issued['firstdate'][:4]) else: SeriesYear = int(comic['ComicYear']) csyear = comic['Corrected_SeriesYear'] if any([ SeriesYear > int(datetime.datetime.now().year) + 1, SeriesYear == 2099 ]) and csyear is not None: logger.info( 'Corrected year of ' + str(SeriesYear) + ' to corrected year for series that was manually entered previously of ' + str(csyear)) SeriesYear = int(csyear) if all([ int(comic['Total']) == 1, SeriesYear < int(helpers.today()[:4]), comic['Type'] != 'One-Shot', comic['Type'] != 'TPB' ]): logger.info( 'Determined to be a one-shot issue. Forcing Edition to One-Shot' ) booktype = 'One-Shot' else: booktype = comic['Type'] if comic['Corrected_Type'] and comic[ 'Corrected_Type'] != booktype: booktype = comic['Corrected_Type'] c_image = comic metadata = {} metadata['version'] = '1.0.1' metadata['metadata'] = ({ 'type': 'comicSeries', 'publisher': comic['ComicPublisher'], 'imprint': comic['PublisherImprint'], 'name': comic['ComicName'], 'cid': int(cid), 'year': SeriesYear, 'description_text': cdes_removed, 'description_formatted': cdes_formatted, 'volume': comicVol, 'booktype': booktype, 'age_rating': comic['AgeRating'], 'collects': clean_issue_list, 'ComicImage': comic['ComicImageURL'], 'total_issues': comic['Total'], 'publication_run': comic['ComicPublished'], 'status': seriesStatus }) try: with open(os.path.join(comic['ComicLocation'], 'series.json'), 'w', encoding='utf-8') as outfile: json.dump(metadata, outfile, indent=4, ensure_ascii=False) except Exception as e: logger.error( 'Unable to write series.json to %s. Error returned: %s' % (comic['ComicLocation'], e)) continue else: logger.fdebug( 'Successfully written series.json file to %s' % comic['ComicLocation']) myDB.upsert("comics", {"seriesjsonPresent": int(True)}, {"ComicID": cid}) return
def Process_next(self,comicid,issueid,issuenumOG,ml=None): annchk = "no" extensions = ('.cbr', '.cbz') myDB = db.DBConnection() comicnzb = myDB.action("SELECT * from comics WHERE comicid=?", [comicid]).fetchone() issuenzb = myDB.action("SELECT * from issues WHERE issueid=? AND comicid=? AND ComicName NOT NULL", [issueid,comicid]).fetchone() print "issueid: " + str(issueid) print "issuenumOG: " + str(issuenumOG) if issuenzb is None: print "chk1" issuenzb = myDB.action("SELECT * from annuals WHERE issueid=? and comicid=?", [issueid,comicid]).fetchone() print "chk2" annchk = "yes" print issuenzb #issueno = str(issuenum).split('.')[0] #new CV API - removed all decimals...here we go AGAIN! issuenum = issuenzb['Issue_Number'] issue_except = 'None' if 'au' in issuenum.lower(): issuenum = re.sub("[^0-9]", "", issuenum) issue_except = ' AU' elif 'ai' in issuenum.lower(): issuenum = re.sub("[^0-9]", "", issuenum) issue_except = ' AI' if '.' in issuenum: iss_find = issuenum.find('.') iss_b4dec = issuenum[:iss_find] iss_decval = issuenum[iss_find+1:] if int(iss_decval) == 0: iss = iss_b4dec issdec = int(iss_decval) issueno = str(iss) self._log("Issue Number: " + str(issueno), logger.DEBUG) logger.fdebug("Issue Number: " + str(issueno)) else: if len(iss_decval) == 1: iss = iss_b4dec + "." + iss_decval issdec = int(iss_decval) * 10 else: iss = iss_b4dec + "." + iss_decval.rstrip('0') issdec = int(iss_decval.rstrip('0')) * 10 issueno = iss_b4dec self._log("Issue Number: " + str(iss), logger.DEBUG) logger.fdebug("Issue Number: " + str(iss)) else: iss = issuenum issueno = str(iss) # issue zero-suppression here if mylar.ZERO_LEVEL == "0": zeroadd = "" else: if mylar.ZERO_LEVEL_N == "none": zeroadd = "" elif mylar.ZERO_LEVEL_N == "0x": zeroadd = "0" elif mylar.ZERO_LEVEL_N == "00x": zeroadd = "00" logger.fdebug("Zero Suppression set to : " + str(mylar.ZERO_LEVEL_N)) if str(len(issueno)) > 1: if int(issueno) < 10: self._log("issue detected less than 10", logger.DEBUG) if '.' in iss: if int(iss_decval) > 0: issueno = str(iss) prettycomiss = str(zeroadd) + str(iss) else: prettycomiss = str(zeroadd) + str(int(issueno)) else: prettycomiss = str(zeroadd) + str(iss) if issue_except != 'None': prettycomiss = str(prettycomiss) + issue_except self._log("Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ". Issue will be set as : " + str(prettycomiss), logger.DEBUG) elif int(issueno) >= 10 and int(issueno) < 100: self._log("issue detected greater than 10, but less than 100", logger.DEBUG) if mylar.ZERO_LEVEL_N == "none": zeroadd = "" else: zeroadd = "0" if '.' in iss: if int(iss_decval) > 0: issueno = str(iss) prettycomiss = str(zeroadd) + str(iss) else: prettycomiss = str(zeroadd) + str(int(issueno)) else: prettycomiss = str(zeroadd) + str(iss) if issue_except != 'None': prettycomiss = str(prettycomiss) + issue_except self._log("Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ".Issue will be set as : " + str(prettycomiss), logger.DEBUG) else: self._log("issue detected greater than 100", logger.DEBUG) if '.' in iss: if int(iss_decval) > 0: issueno = str(iss) prettycomiss = str(issueno) if issue_except != 'None': prettycomiss = str(prettycomiss) + issue_except self._log("Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ". Issue will be set as : " + str(prettycomiss), logger.DEBUG) else: prettycomiss = str(issueno) self._log("issue length error - cannot determine length. Defaulting to None: " + str(prettycomiss), logger.DEBUG) if annchk == "yes": prettycomiss = "Annual " + str(prettycomiss) self._log("Annual detected.") logger.fdebug("Pretty Comic Issue is : " + str(prettycomiss)) issueyear = issuenzb['IssueDate'][:4] self._log("Issue Year: " + str(issueyear), logger.DEBUG) logger.fdebug("Issue Year : " + str(issueyear)) # comicnzb= myDB.action("SELECT * from comics WHERE comicid=?", [comicid]).fetchone() publisher = comicnzb['ComicPublisher'] self._log("Publisher: " + publisher, logger.DEBUG) logger.fdebug("Publisher: " + str(publisher)) #we need to un-unicode this to make sure we can write the filenames properly for spec.chars series = comicnzb['ComicName'].encode('ascii', 'ignore').strip() self._log("Series: " + series, logger.DEBUG) logger.fdebug("Series: " + str(series)) seriesyear = comicnzb['ComicYear'] self._log("Year: " + seriesyear, logger.DEBUG) logger.fdebug("Year: " + str(seriesyear)) comlocation = comicnzb['ComicLocation'] self._log("Comic Location: " + comlocation, logger.DEBUG) logger.fdebug("Comic Location: " + str(comlocation)) comversion = comicnzb['ComicVersion'] self._log("Comic Version: " + str(comversion), logger.DEBUG) logger.fdebug("Comic Version: " + str(comversion)) if comversion is None: comversion = 'None' #if comversion is None, remove it so it doesn't populate with 'None' if comversion == 'None': chunk_f_f = re.sub('\$VolumeN','',mylar.FILE_FORMAT) chunk_f = re.compile(r'\s+') chunk_file_format = chunk_f.sub(' ', chunk_f_f) self._log("No version # found for series - tag will not be available for renaming.", logger.DEBUG) logger.fdebug("No version # found for series, removing from filename") logger.fdebug("new format is now: " + str(chunk_file_format)) else: chunk_file_format = mylar.FILE_FORMAT ofilename = None #if meta-tagging is not enabled, we need to declare the check as being fail #if meta-tagging is enabled, it gets changed just below to a default of pass pcheck = "fail" #tag the meta. if mylar.ENABLE_META: self._log("Metatagging enabled - proceeding...") logger.fdebug("Metatagging enabled - proceeding...") pcheck = "pass" try: import cmtagmylar if ml is None: pcheck = cmtagmylar.run(self.nzb_folder, issueid=issueid) else: pcheck = cmtagmylar.run(self.nzb_folder, issueid=issueid, manual="yes", filename=ml['ComicLocation']) except ImportError: logger.fdebug("comictaggerlib not found on system. Ensure the ENTIRE lib directory is located within mylar/lib/comictaggerlib/") logger.fdebug("continuing with PostProcessing, but I'm not using metadata.") pcheck = "fail" if pcheck == "fail": self._log("Unable to write metadata successfully - check mylar.log file. Attempting to continue without tagging...") logger.fdebug("Unable to write metadata successfully - check mylar.log file. Attempting to continue without tagging...") elif pcheck == "unrar error": self._log("This is a corrupt archive - whether CRC errors or it's incomplete. Marking as BAD, and retrying a different copy.") logger.error("This is a corrupt archive - whether CRC errors or it's incomplete. Marking as BAD, and retrying a different copy.") return self.log else: otofilename = pcheck self._log("Sucessfully wrote metadata to .cbz - Continuing..") logger.fdebug("Sucessfully wrote metadata to .cbz (" + str(otofilename) + ") - Continuing..") #Run Pre-script if mylar.ENABLE_PRE_SCRIPTS: nzbn = self.nzb_name #original nzb name nzbf = self.nzb_folder #original nzb folder #name, comicyear, comicid , issueid, issueyear, issue, publisher #create the dic and send it. seriesmeta = [] seriesmetadata = {} seriesmeta.append({ 'name': series, 'comicyear': seriesyear, 'comicid': comicid, 'issueid': issueid, 'issueyear': issueyear, 'issue': issuenum, 'publisher': publisher }) seriesmetadata['seriesmeta'] = seriesmeta self._run_pre_scripts(nzbn, nzbf, seriesmetadata ) #rename file and move to new path #nfilename = series + " " + issueno + " (" + seriesyear + ")" file_values = {'$Series': series, '$Issue': prettycomiss, '$Year': issueyear, '$series': series.lower(), '$Publisher': publisher, '$publisher': publisher.lower(), '$VolumeY': 'V' + str(seriesyear), '$VolumeN': comversion } #if it's a Manual Run, use the ml['ComicLocation'] for the exact filename. if ml is None: for root, dirnames, filenames in os.walk(self.nzb_folder): for filename in filenames: if filename.lower().endswith(extensions): ofilename = filename path, ext = os.path.splitext(ofilename) else: if pcheck == "fail": otofilename = ml['ComicLocation'] print "otofilename:" + str(otofilename) odir, ofilename = os.path.split(otofilename) print "ofilename: " + str(ofilename) path, ext = os.path.splitext(ofilename) print "path: " + str(path) print "ext:" + str(ext) if ofilename is None: logger.error(u"Aborting PostProcessing - the filename doesn't exist in the location given. Make sure that " + str(self.nzb_folder) + " exists and is the correct location.") return self._log("Original Filename: " + ofilename, logger.DEBUG) self._log("Original Extension: " + ext, logger.DEBUG) logger.fdebug("Original Filname: " + str(ofilename)) logger.fdebug("Original Extension: " + str(ext)) if mylar.FILE_FORMAT == '' or not mylar.RENAME_FILES: self._log("Rename Files isn't enabled...keeping original filename.", logger.DEBUG) logger.fdebug("Rename Files isn't enabled - keeping original filename.") #check if extension is in nzb_name - will screw up otherwise if ofilename.lower().endswith(extensions): nfilename = ofilename[:-4] else: nfilename = ofilename else: nfilename = helpers.replace_all(chunk_file_format, file_values) if mylar.REPLACE_SPACES: #mylar.REPLACE_CHAR ...determines what to replace spaces with underscore or dot nfilename = nfilename.replace(' ', mylar.REPLACE_CHAR) nfilename = re.sub('[\,\:\?]', '', nfilename) self._log("New Filename: " + nfilename, logger.DEBUG) logger.fdebug("New Filename: " + str(nfilename)) src = os.path.join(self.nzb_folder, ofilename) filechecker.validateAndCreateDirectory(comlocation, True) if mylar.LOWERCASE_FILENAMES: dst = (comlocation + "/" + nfilename + ext).lower() else: dst = comlocation + "/" + nfilename + ext.lower() self._log("Source:" + src, logger.DEBUG) self._log("Destination:" + dst, logger.DEBUG) logger.fdebug("Source: " + str(src)) logger.fdebug("Destination: " + str(dst)) if ml is None: #non-manual run moving/deleting... os.rename(os.path.join(self.nzb_folder, str(ofilename)), os.path.join(self.nzb_folder,str(nfilename + ext))) src = os.path.join(self.nzb_folder, str(nfilename + ext)) try: shutil.move(src, dst) except (OSError, IOError): self._log("Failed to move directory - check directories and manually re-run.", logger.DEBUG) self._log("Post-Processing ABORTED.", logger.DEBUG) return #tidyup old path try: shutil.rmtree(self.nzb_folder) except (OSError, IOError): self._log("Failed to remove temporary directory - check directory and manually re-run.", logger.DEBUG) self._log("Post-Processing ABORTED.", logger.DEBUG) return self._log("Removed temporary directory : " + str(self.nzb_folder), logger.DEBUG) else: #Manual Run, this is the portion. logger.fdebug("Renaming " + os.path.join(self.nzb_folder, str(ofilename)) + " ..to.. " + os.path.join(self.nzb_folder,str(nfilename + ext))) os.rename(os.path.join(self.nzb_folder, str(ofilename)), os.path.join(self.nzb_folder,str(nfilename + ext))) src = os.path.join(self.nzb_folder, str(nfilename + ext)) logger.fdebug("Moving " + src + " ... to ... " + dst) try: shutil.move(src, dst) except (OSError, IOError): logger.fdebug("Failed to move directory - check directories and manually re-run.") logger.fdebug("Post-Processing ABORTED.") return logger.fdebug("Successfully moved to : " + dst) #tidyup old path #try: # os.remove(os.path.join(self.nzb_folder, str(ofilename))) # logger.fdebug("Deleting : " + os.path.join(self.nzb_folder, str(ofilename))) #except (OSError, IOError): # logger.fdebug("Failed to remove temporary directory - check directory and manually re-run.") # logger.fdebug("Post-Processing ABORTED.") # return #logger.fdebug("Removed temporary directory : " + str(self.nzb_folder)) #delete entry from nzblog table myDB.action('DELETE from nzblog WHERE issueid=?', [issueid]) #update snatched table to change status to Downloaded if annchk == "no": updater.foundsearch(comicid, issueid, down='True') else: updater.foundsearch(comicid, issueid, mode='want_ann', down='True') #force rescan of files updater.forceRescan(comicid) logger.info(u"Post-Processing completed for: " + series + " issue: " + str(issuenumOG) ) self._log(u"Post Processing SUCCESSFULL! ", logger.DEBUG) if ml is not None: return else: if mylar.PROWL_ENABLED: pushmessage = series + '(' + issueyear + ') - issue #' + issuenumOG logger.info(u"Prowl request") prowl = notifiers.PROWL() prowl.notify(pushmessage,"Download and Postprocessing completed") if mylar.NMA_ENABLED: nma = notifiers.NMA() nma.notify(series, str(issueyear), str(issuenumOG)) if mylar.PUSHOVER_ENABLED: pushmessage = series + ' (' + str(issueyear) + ') - issue #' + str(issuenumOG) logger.info(u"Pushover request") pushover = notifiers.PUSHOVER() pushover.notify(pushmessage, "Download and Post-Processing completed") if mylar.BOXCAR_ENABLED: boxcar = notifiers.BOXCAR() boxcar.notify(series, str(issueyear), str(issuenumOG)) # retrieve/create the corresponding comic objects if mylar.ENABLE_EXTRA_SCRIPTS: folderp = str(dst) #folder location after move/rename nzbn = self.nzb_name #original nzb name filen = str(nfilename + ext) #new filename #name, comicyear, comicid , issueid, issueyear, issue, publisher #create the dic and send it. seriesmeta = [] seriesmetadata = {} seriesmeta.append({ 'name': series, 'comicyear': seriesyear, 'comicid': comicid, 'issueid': issueid, 'issueyear': issueyear, 'issue': issuenum, 'publisher': publisher }) seriesmetadata['seriesmeta'] = seriesmeta self._run_extra_scripts(nzbname, self.nzb_folder, filen, folderp, seriesmetadata ) return self.log
def main(): # Fixed paths to mylar if hasattr(sys, 'frozen'): mylar.FULL_PATH = os.path.abspath(sys.executable) else: mylar.FULL_PATH = os.path.abspath(__file__) mylar.PROG_DIR = os.path.dirname(mylar.FULL_PATH) mylar.ARGS = sys.argv[1:] # From sickbeard mylar.SYS_ENCODING = None try: locale.setlocale(locale.LC_ALL, "") mylar.SYS_ENCODING = locale.getpreferredencoding() except (locale.Error, IOError): pass # for OSes that are poorly configured I'll just force UTF-8 if not mylar.SYS_ENCODING or mylar.SYS_ENCODING in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'): mylar.SYS_ENCODING = 'UTF-8' # Set up and gather command line arguments parser = argparse.ArgumentParser(description='Comic Book add-on for SABnzbd+') parser.add_argument('-v', '--verbose', action='store_true', help='Increase console logging verbosity') parser.add_argument('-q', '--quiet', action='store_true', help='Turn off console logging') parser.add_argument('-d', '--daemon', action='store_true', help='Run as a daemon') parser.add_argument('-p', '--port', type=int, help='Force mylar to run on a specified port') parser.add_argument('--datadir', help='Specify a directory where to store your data files') parser.add_argument('--config', help='Specify a config file to use') parser.add_argument('--nolaunch', action='store_true', help='Prevent browser from launching on startup') parser.add_argument('--pidfile', help='Create a pid file (only relevant when running as a daemon)') args = parser.parse_args() if args.verbose: mylar.VERBOSE = 2 elif args.quiet: mylar.VERBOSE = 0 if args.daemon: mylar.DAEMON=True mylar.VERBOSE = 0 if args.pidfile : mylar.PIDFILE = args.pidfile if args.datadir: mylar.DATA_DIR = args.datadir else: mylar.DATA_DIR = mylar.PROG_DIR if args.config: mylar.CONFIG_FILE = args.config else: mylar.CONFIG_FILE = os.path.join(mylar.DATA_DIR, 'config.ini') # Try to create the DATA_DIR if it doesn't exist #if not os.path.exists(mylar.DATA_DIR): # try: # os.makedirs(mylar.DATA_DIR) # except OSError: # raise SystemExit('Could not create data directory: ' + mylar.DATA_DIR + '. Exiting....') filechecker.validateAndCreateDirectory(mylar.DATA_DIR, True) # Make sure the DATA_DIR is writeable if not os.access(mylar.DATA_DIR, os.W_OK): raise SystemExit('Cannot write to the data directory: ' + mylar.DATA_DIR + '. Exiting...') # Put the database in the DATA_DIR mylar.DB_FILE = os.path.join(mylar.DATA_DIR, 'mylar.db') mylar.CFG = ConfigObj(mylar.CONFIG_FILE, encoding='utf-8') # Read config & start logging mylar.initialize() if mylar.DAEMON: mylar.daemonize() # Force the http port if neccessary if args.port: http_port = args.port logger.info('Starting Mylar on foced port: %i' % http_port) else: http_port = int(mylar.HTTP_PORT) # Try to start the server. webstart.initialize({ 'http_port': http_port, 'http_host': mylar.HTTP_HOST, 'http_root': mylar.HTTP_ROOT, 'http_username': mylar.HTTP_USERNAME, 'http_password': mylar.HTTP_PASSWORD, }) logger.info('Starting Mylar on port: %i' % http_port) if mylar.LAUNCH_BROWSER and not args.nolaunch: mylar.launch_browser(mylar.HTTP_HOST, http_port, mylar.HTTP_ROOT) # Start the background threads mylar.start() while True: if not mylar.SIGNAL: time.sleep(1) else: logger.info('Received signal: ' + mylar.SIGNAL) if mylar.SIGNAL == 'shutdown': mylar.shutdown() elif mylar.SIGNAL == 'restart': mylar.shutdown(restart=True) else: mylar.shutdown(restart=True, update=True) mylar.SIGNAL = None return
def main(): # Fixed paths to mylar if hasattr(sys, 'frozen'): mylar.FULL_PATH = os.path.abspath(sys.executable) else: mylar.FULL_PATH = os.path.abspath(__file__) mylar.PROG_DIR = os.path.dirname(mylar.FULL_PATH) mylar.ARGS = sys.argv[1:] # From sickbeard mylar.SYS_ENCODING = None try: locale.setlocale(locale.LC_ALL, "") mylar.SYS_ENCODING = locale.getpreferredencoding() except (locale.Error, IOError): pass # for OSes that are poorly configured I'll just force UTF-8 if not mylar.SYS_ENCODING or mylar.SYS_ENCODING in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'): mylar.SYS_ENCODING = 'UTF-8' if not logger.LOG_LANG.startswith('en'): print( 'language detected as non-English (%s). Forcing specific logging module - errors WILL NOT be captured in the logs' % logger.LOG_LANG) else: print('log language set to %s' % logger.LOG_LANG) # Set up and gather command line arguments parser = argparse.ArgumentParser( description='Automated Comic Book Downloader') subparsers = parser.add_subparsers(title='Subcommands', dest='maintenance') parser_maintenance = subparsers.add_parser( 'maintenance', help= 'Enter maintenance mode (no GUI). Additional commands are available (maintenance --help)' ) #main parser parser.add_argument('-v', '--verbose', action='store_true', help='Increase console logging verbosity') parser.add_argument('-q', '--quiet', action='store_true', help='Turn off console logging') parser.add_argument('-d', '--daemon', action='store_true', help='Run as a daemon') parser.add_argument('-p', '--port', type=int, help='Force mylar to run on a specified port') parser.add_argument( '-b', '--backup', action='store_true', help= 'Will automatically backup & keep the last 2 copies of the .db & ini files prior to startup' ) parser.add_argument( '-w', '--noweekly', action='store_true', help= 'Turn off weekly pull list check on startup (quicker boot sequence)') parser.add_argument( '--datadir', help='Specify a directory where to store your data files') parser.add_argument('--config', help='Specify a config file to use') parser.add_argument('--nolaunch', action='store_true', help='Prevent browser from launching on startup') parser.add_argument( '--pidfile', help='Create a pid file (only relevant when running as a daemon)') parser.add_argument( '--safe', action='store_true', help= 'redirect the startup page to point to the Manage Comics screen on startup' ) parser_maintenance.add_argument( '-xj', '--exportjson', action='store', help='Export existing mylar.db to json file') parser_maintenance.add_argument('-id', '--importdatabase', action='store', help='Import a mylar.db into current db') parser_maintenance.add_argument( '-ij', '--importjson', action='store', help= 'Import a specified json file containing just {"ComicID": "XXXXX"} into current db' ) parser_maintenance.add_argument('-st', '--importstatus', action='store_true', help='Provide current maintenance status') parser_maintenance.add_argument( '-u', '--update', action='store_true', help='force mylar to perform an update as if in GUI') parser_maintenance.add_argument( '-fs', '--fixslashes', action='store_true', help='remove double-slashes from within paths in db') #parser_maintenance.add_argument('-it', '--importtext', action='store', help='Import a specified text file into current db') args = parser.parse_args() if args.maintenance: if all([ args.exportjson is None, args.importdatabase is None, args.importjson is None, args.importstatus is False, args.update is False, args.fixslashes is False ]): print( 'Expecting subcommand with the maintenance positional argumeent' ) sys.exit() mylar.MAINTENANCE = True else: mylar.MAINTENANCE = False if args.verbose: print('Verbose/Debugging mode enabled...') mylar.LOG_LEVEL = 2 elif args.quiet: mylar.QUIET = True print('Quiet logging mode enabled...') mylar.LOG_LEVEL = 0 else: mylar.LOG_LEVEL = 1 if args.daemon: if sys.platform == 'win32': print("Daemonize not supported under Windows, starting normally") else: mylar.DAEMON = True if args.pidfile: mylar.PIDFILE = str(args.pidfile) # If the pidfile already exists, mylar may still be running, so exit if os.path.exists(mylar.PIDFILE): sys.exit("PID file '" + mylar.PIDFILE + "' already exists. Exiting.") # The pidfile is only useful in daemon mode, make sure we can write the file properly if mylar.DAEMON: mylar.CREATEPID = True try: open(mylar.PIDFILE, 'w').write("pid\n") except IOError as e: raise SystemExit("Unable to write PID file: %s [%d]" % (e.strerror, e.errno)) else: print("Not running in daemon mode. PID file creation disabled.") if args.datadir: mylar.DATA_DIR = args.datadir else: mylar.DATA_DIR = mylar.PROG_DIR if args.config: mylar.CONFIG_FILE = args.config else: mylar.CONFIG_FILE = os.path.join(mylar.DATA_DIR, 'config.ini') if args.safe: mylar.SAFESTART = True else: mylar.SAFESTART = False if args.noweekly: mylar.NOWEEKLY = True else: mylar.NOWEEKLY = False # Put the database in the DATA_DIR mylar.DB_FILE = os.path.join(mylar.DATA_DIR, 'mylar.db') # Read config and start logging if mylar.MAINTENANCE is False: print('Initializing startup sequence....') #try: mylar.initialize(mylar.CONFIG_FILE) #except Exception as e: # print e # raise SystemExit('FATAL ERROR') if mylar.MAINTENANCE is False: filechecker.validateAndCreateDirectory(mylar.DATA_DIR, True) # Make sure the DATA_DIR is writeable if not os.access(mylar.DATA_DIR, os.W_OK): raise SystemExit('Cannot write to the data directory: ' + mylar.DATA_DIR + '. Exiting...') # backup the db and configs before they load. if args.backup: print('[AUTO-BACKUP] Backing up .db and config.ini files for safety.') backupdir = os.path.join(mylar.DATA_DIR, 'backup') try: os.makedirs(backupdir) print( '[AUTO-BACKUP] Directory does not exist for backup - creating : ' + backupdir) except OSError as exception: if exception.errno != errno.EEXIST: print('[AUTO-BACKUP] Directory already exists.') raise i = 0 while (i < 2): if i == 0: ogfile = mylar.DB_FILE back = os.path.join(backupdir, 'mylar.db') back_1 = os.path.join(backupdir, 'mylar.db.1') else: ogfile = mylar.CONFIG_FILE back = os.path.join(backupdir, 'config.ini') back_1 = os.path.join(backupdir, 'config.ini.1') try: print('[AUTO-BACKUP] Now Backing up mylar.db file') if os.path.isfile(back_1): print('[AUTO-BACKUP] ' + back_1 + ' exists. Deleting and keeping new.') os.remove(back_1) if os.path.isfile(back): print('[AUTO-BACKUP] Now renaming ' + back + ' to ' + back_1) shutil.move(back, back_1) print('[AUTO-BACKUP] Now copying db file to ' + back) shutil.copy(ogfile, back) except OSError as exception: if exception.errno != errno.EXIST: raise i += 1 # Rename the main thread threading.currentThread().name = "MAIN" if mylar.DAEMON: mylar.daemonize() if mylar.MAINTENANCE is True and any([ args.exportjson, args.importjson, args.update is True, args.importstatus is True, args.fixslashes is True ]): loggermode = '[MAINTENANCE-MODE]' if args.importstatus: #mylar.MAINTENANCE is True: cs = maintenance.Maintenance('status') cstat = cs.check_status() else: logger.info('%s Initializing maintenance mode' % loggermode) if args.update is True: logger.info( '%s Attempting to update Mylar so things can work again...' % loggermode) try: mylar.shutdown(restart=True, update=True, maintenance=True) except Exception as e: sys.exit('%s Mylar failed to update: %s' % (loggermode, e)) elif args.importdatabase: #for attempted db import. maintenance_path = args.importdatabase logger.info('%s db path accepted as %s' % (loggermode, maintenance_path)) di = maintenance.Maintenance('database-import', file=maintenance_path) d = di.database_import() elif args.importjson: #for attempted file re-import (json format) maintenance_path = args.importjson logger.info( '%s file indicated as being in json format - path accepted as %s' % (loggermode, maintenance_path)) ij = maintenance.Maintenance('json-import', file=maintenance_path) j = ij.json_import() #elif args.importtext: # #for attempted file re-import (list format) # maintenance_path = args.importtext # logger.info('%s file indicated as being in list format - path accepted as %s' % (loggermode, maintenance_path)) # it = maintenance.Maintenance('list-import', file=maintenance_path) # t = it.list_import() elif args.exportjson: #for export of db comicid's in json format maintenance_path = args.exportjson logger.info( '%s file indicated as being written to json format - destination accepted as %s' % (loggermode, maintenance_path)) ej = maintenance.Maintenance('json-export', output=maintenance_path) j = ej.json_export() elif args.fixslashes: #for running the fix slashes on the db manually logger.info('%s method indicated as fix slashes' % loggermode) fs = maintenance.Maintenance('fixslashes') j = fs.fix_slashes() else: logger.info('%s Not a valid command: %s' % (loggermode, maintenance_info)) sys.exit() logger.info('%s Exiting Maintenance mode' % (loggermode)) #possible option to restart automatically after maintenance has completed... sys.exit() # Force the http port if neccessary if args.port: http_port = args.port logger.info('Starting Mylar on forced port: %i' % http_port) else: http_port = int(mylar.CONFIG.HTTP_PORT) # Check if pyOpenSSL is installed. It is required for certificate generation # and for cherrypy. if mylar.CONFIG.ENABLE_HTTPS: try: import OpenSSL except ImportError: logger.warn("The pyOpenSSL module is missing. Install this " \ "module to enable HTTPS. HTTPS will be disabled.") mylar.CONFIG.ENABLE_HTTPS = False # Try to start the server. Will exit here is address is already in use. web_config = { 'http_port': http_port, 'http_host': mylar.CONFIG.HTTP_HOST, 'http_root': mylar.CONFIG.HTTP_ROOT, 'enable_https': mylar.CONFIG.ENABLE_HTTPS, 'https_cert': mylar.CONFIG.HTTPS_CERT, 'https_key': mylar.CONFIG.HTTPS_KEY, 'https_chain': mylar.CONFIG.HTTPS_CHAIN, 'http_username': mylar.CONFIG.HTTP_USERNAME, 'http_password': mylar.CONFIG.HTTP_PASSWORD, 'authentication': mylar.CONFIG.AUTHENTICATION, 'login_timeout': mylar.CONFIG.LOGIN_TIMEOUT, 'opds_enable': mylar.CONFIG.OPDS_ENABLE, 'opds_authentication': mylar.CONFIG.OPDS_AUTHENTICATION, 'opds_username': mylar.CONFIG.OPDS_USERNAME, 'opds_password': mylar.CONFIG.OPDS_PASSWORD, 'opds_pagesize': mylar.CONFIG.OPDS_PAGESIZE, } # Try to start the server. webstart.initialize(web_config) #check for version here after web server initialized so it doesn't try to repeatidly hit github #for version info if it's already running versioncheck.versionload() if mylar.CONFIG.LAUNCH_BROWSER and not args.nolaunch: mylar.launch_browser(mylar.CONFIG.HTTP_HOST, http_port, mylar.CONFIG.HTTP_ROOT) # Start the background threads mylar.start() signal.signal(signal.SIGTERM, handler_sigterm) while True: if not mylar.SIGNAL: try: time.sleep(1) except KeyboardInterrupt: mylar.SIGNAL = 'shutdown' else: logger.info('Received signal: ' + mylar.SIGNAL) if mylar.SIGNAL == 'shutdown': mylar.shutdown() elif mylar.SIGNAL == 'restart': mylar.shutdown(restart=True) else: mylar.shutdown(restart=True, update=True) mylar.SIGNAL = None return
def Process(self): self._log("nzb name: " + str(self.nzb_name), logger.DEBUG) self._log("nzb folder: " + str(self.nzb_folder), logger.DEBUG) logger.fdebug("nzb name: " + str(self.nzb_name)) logger.fdebug("nzb folder: " + str(self.nzb_folder)) if mylar.USE_SABNZBD==0: logger.fdebug("Not using SABNzbd") else: # if the SAB Directory option is enabled, let's use that folder name and append the jobname. if mylar.SAB_DIRECTORY is not None and mylar.SAB_DIRECTORY is not 'None' and len(mylar.SAB_DIRECTORY) > 4: self.nzb_folder = os.path.join(mylar.SAB_DIRECTORY, self.nzb_name).encode(mylar.SYS_ENCODING) #lookup nzb_name in nzblog table to get issueid #query SAB to find out if Replace Spaces enabled / not as well as Replace Decimals #http://localhost:8080/sabnzbd/api?mode=set_config§ion=misc&keyword=dirscan_speed&value=5 querysab = str(mylar.SAB_HOST) + "/api?mode=get_config§ion=misc&output=xml&apikey=" + str(mylar.SAB_APIKEY) #logger.info("querysab_string:" + str(querysab)) file = urllib2.urlopen(querysab) data = file.read() file.close() dom = parseString(data) try: sabreps = dom.getElementsByTagName('replace_spaces')[0].firstChild.wholeText except: errorm = dom.getElementsByTagName('error')[0].firstChild.wholeText logger.error(u"Error detected attempting to retrieve SAB data : " + errorm) return sabrepd = dom.getElementsByTagName('replace_dots')[0].firstChild.wholeText logger.fdebug("SAB Replace Spaces: " + str(sabreps)) logger.fdebug("SAB Replace Dots: " + str(sabrepd)) if mylar.USE_NZBGET==1: logger.fdebug("Using NZBGET") logger.fdebug("NZB name as passed from NZBGet: " + self.nzb_name) myDB = db.DBConnection() nzbname = self.nzb_name #remove extensions from nzb_name if they somehow got through (Experimental most likely) extensions = ('.cbr', '.cbz') if nzbname.lower().endswith(extensions): fd, ext = os.path.splitext(nzbname) self._log("Removed extension from nzb: " + ext, logger.DEBUG) nzbname = re.sub(str(ext), '', str(nzbname)) #replace spaces nzbname = re.sub(' ', '.', str(nzbname)) nzbname = re.sub('[\,\:\?]', '', str(nzbname)) nzbname = re.sub('[\&]', 'and', str(nzbname)) logger.fdebug("After conversions, nzbname is : " + str(nzbname)) # if mylar.USE_NZBGET==1: # nzbname=self.nzb_name self._log("nzbname: " + str(nzbname), logger.DEBUG) nzbiss = myDB.action("SELECT * from nzblog WHERE nzbname=?", [nzbname]).fetchone() if nzbiss is None: self._log("Failure - could not initially locate nzbfile in my database to rename.", logger.DEBUG) logger.fdebug("Failure - could not locate nzbfile initially.") # if failed on spaces, change it all to decimals and try again. nzbname = re.sub('_', '.', str(nzbname)) self._log("trying again with this nzbname: " + str(nzbname), logger.DEBUG) logger.fdebug("trying again with nzbname of : " + str(nzbname)) nzbiss = myDB.action("SELECT * from nzblog WHERE nzbname=?", [nzbname]).fetchone() if nzbiss is None: logger.error(u"Unable to locate downloaded file to rename. PostProcessing aborted.") return else: self._log("I corrected and found the nzb as : " + str(nzbname)) logger.fdebug("auto-corrected and found the nzb as : " + str(nzbname)) issueid = nzbiss['IssueID'] else: issueid = nzbiss['IssueID'] logger.fdebug("issueid:" + str(issueid)) sarc = nzbiss['SARC'] #use issueid to get publisher, series, year, issue number issuenzb = myDB.action("SELECT * from issues WHERE issueid=?", [issueid]).fetchone() if issuenzb is not None: if helpers.is_number(issueid): sandwich = int(issuenzb['IssueID']) else: #if it's non-numeric, it contains a 'G' at the beginning indicating it's a multi-volume #using GCD data. Set sandwich to 1 so it will bypass and continue post-processing. if 'S' in issueid: sandwich = issueid elif 'G' in issueid: sandwich = 1 if helpers.is_number(sandwich): if sandwich < 900000: # if sandwich is less than 900000 it's a normal watchlist download. Bypass. pass else: if issuenzb is None or 'S' in sandwich or int(sandwich) >= 900000: # this has no issueID, therefore it's a one-off or a manual post-proc. # At this point, let's just drop it into the Comic Location folder and forget about it.. if 'S' in sandwich: self._log("One-off STORYARC mode enabled for Post-Processing for " + str(sarc)) logger.info("One-off STORYARC mode enabled for Post-Processing for " + str(sarc)) if mylar.STORYARCDIR: storyarcd = os.path.join(mylar.DESTINATION_DIR, "StoryArcs", sarc) self._log("StoryArc Directory set to : " + storyarcd, logger.DEBUG) else: self._log("Grab-Bag Directory set to : " + mylar.GRABBAG_DIR, logger.DEBUG) else: self._log("One-off mode enabled for Post-Processing. All I'm doing is moving the file untouched into the Grab-bag directory.", logger.DEBUG) logger.info("One-off mode enabled for Post-Processing. Will move into Grab-bag directory.") self._log("Grab-Bag Directory set to : " + mylar.GRABBAG_DIR, logger.DEBUG) for root, dirnames, filenames in os.walk(self.nzb_folder): for filename in filenames: if filename.lower().endswith(extensions): ofilename = filename path, ext = os.path.splitext(ofilename) if 'S' in sandwich: if mylar.STORYARCDIR: grdst = storyarcd else: grdst = mylar.DESTINATION_DIR else: if mylar.GRABBAG_DIR: grdst = mylar.GRABBAG_DIR else: grdst = mylar.DESTINATION_DIR filechecker.validateAndCreateDirectory(grdst, True) grab_dst = os.path.join(grdst, ofilename) self._log("Destination Path : " + grab_dst, logger.DEBUG) logger.info("Destination Path : " + grab_dst) grab_src = os.path.join(self.nzb_folder, ofilename) self._log("Source Path : " + grab_src, logger.DEBUG) logger.info("Source Path : " + grab_src) logger.info("Moving " + str(ofilename) + " into directory : " + str(grdst)) try: shutil.move(grab_src, grab_dst) except (OSError, IOError): self._log("Failed to move directory - check directories and manually re-run.", logger.DEBUG) logger.debug("Failed to move directory - check directories and manually re-run.") return #tidyup old path try: shutil.rmtree(self.nzb_folder) except (OSError, IOError): self._log("Failed to remove temporary directory.", logger.DEBUG) logger.debug("Failed to remove temporary directory - check directory and manually re-run.") return logger.debug("Removed temporary directory : " + str(self.nzb_folder)) self._log("Removed temporary directory : " + self.nzb_folder, logger.DEBUG) #delete entry from nzblog table myDB.action('DELETE from nzblog WHERE issueid=?', [issueid]) if 'S' in issueid: issuearcid = re.sub('S', '', issueid) logger.info("IssueArcID is : " + str(issuearcid)) ctrlVal = {"IssueArcID": issuearcid} newVal = {"Status": "Downloaded", "Location": grab_dst } myDB.upsert("readinglist",newVal,ctrlVal) logger.info("updated status to Downloaded") return self.log comicid = issuenzb['ComicID'] issuenumOG = issuenzb['Issue_Number'] #issueno = str(issuenum).split('.')[0] #new CV API - removed all decimals...here we go AGAIN! issuenum = issuenumOG issue_except = 'None' if 'au' in issuenum.lower(): issuenum = re.sub("[^0-9]", "", issuenum) issue_except = ' AU' if '.' in issuenum: iss_find = issuenum.find('.') iss_b4dec = issuenum[:iss_find] iss_decval = issuenum[iss_find+1:] if int(iss_decval) == 0: iss = iss_b4dec issdec = int(iss_decval) issueno = str(iss) self._log("Issue Number: " + str(issueno), logger.DEBUG) logger.fdebug("Issue Number: " + str(issueno)) else: if len(iss_decval) == 1: iss = iss_b4dec + "." + iss_decval issdec = int(iss_decval) * 10 else: iss = iss_b4dec + "." + iss_decval.rstrip('0') issdec = int(iss_decval.rstrip('0')) * 10 issueno = iss_b4dec self._log("Issue Number: " + str(iss), logger.DEBUG) logger.fdebug("Issue Number: " + str(iss)) else: iss = issuenum issueno = str(iss) # issue zero-suppression here if mylar.ZERO_LEVEL == "0": zeroadd = "" else: if mylar.ZERO_LEVEL_N == "none": zeroadd = "" elif mylar.ZERO_LEVEL_N == "0x": zeroadd = "0" elif mylar.ZERO_LEVEL_N == "00x": zeroadd = "00" logger.fdebug("Zero Suppression set to : " + str(mylar.ZERO_LEVEL_N)) if str(len(issueno)) > 1: if int(issueno) < 10: self._log("issue detected less than 10", logger.DEBUG) if '.' in iss: if int(iss_decval) > 0: issueno = str(iss) prettycomiss = str(zeroadd) + str(iss) else: prettycomiss = str(zeroadd) + str(int(issueno)) else: prettycomiss = str(zeroadd) + str(iss) if issue_except != 'None': prettycomiss = str(prettycomiss) + issue_except self._log("Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ". Issue will be set as : " + str(prettycomiss), logger.DEBUG) elif int(issueno) >= 10 and int(issueno) < 100: self._log("issue detected greater than 10, but less than 100", logger.DEBUG) if mylar.ZERO_LEVEL_N == "none": zeroadd = "" else: zeroadd = "0" if '.' in iss: if int(iss_decval) > 0: issueno = str(iss) prettycomiss = str(zeroadd) + str(iss) else: prettycomiss = str(zeroadd) + str(int(issueno)) else: prettycomiss = str(zeroadd) + str(iss) if issue_except != 'None': prettycomiss = str(prettycomiss) + issue_except self._log("Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ".Issue will be set as : " + str(prettycomiss), logger.DEBUG) else: self._log("issue detected greater than 100", logger.DEBUG) if '.' in iss: if int(iss_decval) > 0: issueno = str(iss) prettycomiss = str(issueno) if issue_except != 'None': prettycomiss = str(prettycomiss) + issue_except self._log("Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ". Issue will be set as : " + str(prettycomiss), logger.DEBUG) else: prettycomiss = str(issueno) self._log("issue length error - cannot determine length. Defaulting to None: " + str(prettycomiss), logger.DEBUG) logger.fdebug("Pretty Comic Issue is : " + str(prettycomiss)) issueyear = issuenzb['IssueDate'][:4] self._log("Issue Year: " + str(issueyear), logger.DEBUG) logger.fdebug("Issue Year : " + str(issueyear)) comicnzb= myDB.action("SELECT * from comics WHERE comicid=?", [comicid]).fetchone() publisher = comicnzb['ComicPublisher'] self._log("Publisher: " + publisher, logger.DEBUG) logger.fdebug("Publisher: " + str(publisher)) #we need to un-unicode this to make sure we can write the filenames properly for spec.chars series = comicnzb['ComicName'].encode('ascii', 'ignore').strip() self._log("Series: " + series, logger.DEBUG) logger.fdebug("Series: " + str(series)) seriesyear = comicnzb['ComicYear'] self._log("Year: " + seriesyear, logger.DEBUG) logger.fdebug("Year: " + str(seriesyear)) comlocation = comicnzb['ComicLocation'] self._log("Comic Location: " + comlocation, logger.DEBUG) logger.fdebug("Comic Location: " + str(comlocation)) comversion = comicnzb['ComicVersion'] self._log("Comic Version: " + str(comversion), logger.DEBUG) logger.fdebug("Comic Version: " + str(comversion)) if comversion is None: comversion = 'None' #if comversion is None, remove it so it doesn't populate with 'None' if comversion == 'None': chunk_f_f = re.sub('\$VolumeN','',mylar.FILE_FORMAT) chunk_f = re.compile(r'\s+') chunk_file_format = chunk_f.sub(' ', chunk_f_f) self._log("No version # found for series - tag will not be available for renaming.", logger.DEBUG) logger.fdebug("No version # found for series, removing from filename") logger.fdebug("new format is now: " + str(chunk_file_format)) else: chunk_file_format = mylar.FILE_FORMAT #Run Pre-script if mylar.ENABLE_PRE_SCRIPTS: nzbn = self.nzb_name #original nzb name nzbf = self.nzb_folder #original nzb folder #name, comicyear, comicid , issueid, issueyear, issue, publisher #create the dic and send it. seriesmeta = [] seriesmetadata = {} seriesmeta.append({ 'name': series, 'comicyear': seriesyear, 'comicid': comicid, 'issueid': issueid, 'issueyear': issueyear, 'issue': issuenum, 'publisher': publisher }) seriesmetadata['seriesmeta'] = seriesmeta self._run_pre_scripts(nzbn, nzbf, seriesmetadata ) #rename file and move to new path #nfilename = series + " " + issueno + " (" + seriesyear + ")" file_values = {'$Series': series, '$Issue': prettycomiss, '$Year': issueyear, '$series': series.lower(), '$Publisher': publisher, '$publisher': publisher.lower(), '$VolumeY': 'V' + str(seriesyear), '$VolumeN': comversion } ofilename = None for root, dirnames, filenames in os.walk(self.nzb_folder): for filename in filenames: if filename.lower().endswith(extensions): ofilename = filename path, ext = os.path.splitext(ofilename) if ofilename is None: logger.error(u"Aborting PostProcessing - the filename doesn't exist in the location given. Make sure that " + str(self.nzb_folder) + " exists and is the correct location.") return self._log("Original Filename: " + ofilename, logger.DEBUG) self._log("Original Extension: " + ext, logger.DEBUG) logger.fdebug("Original Filname: " + str(ofilename)) logger.fdebug("Original Extension: " + str(ext)) if mylar.FILE_FORMAT == '' or not mylar.RENAME_FILES: self._log("Rename Files isn't enabled...keeping original filename.", logger.DEBUG) logger.fdebug("Rename Files isn't enabled - keeping original filename.") #check if extension is in nzb_name - will screw up otherwise if ofilename.lower().endswith(extensions): nfilename = ofilename[:-4] else: nfilename = ofilename else: nfilename = helpers.replace_all(chunk_file_format, file_values) if mylar.REPLACE_SPACES: #mylar.REPLACE_CHAR ...determines what to replace spaces with underscore or dot nfilename = nfilename.replace(' ', mylar.REPLACE_CHAR) nfilename = re.sub('[\,\:\?]', '', nfilename) self._log("New Filename: " + nfilename, logger.DEBUG) logger.fdebug("New Filename: " + str(nfilename)) src = os.path.join(self.nzb_folder, ofilename) filechecker.validateAndCreateDirectory(comlocation, True) if mylar.LOWERCASE_FILENAMES: dst = (comlocation + "/" + nfilename + ext).lower() else: dst = comlocation + "/" + nfilename + ext.lower() self._log("Source:" + src, logger.DEBUG) self._log("Destination:" + dst, logger.DEBUG) logger.fdebug("Source: " + str(src)) logger.fdebug("Destination: " + str(dst)) os.rename(os.path.join(self.nzb_folder, str(ofilename)), os.path.join(self.nzb_folder,str(nfilename + ext))) src = os.path.join(self.nzb_folder, str(nfilename + ext)) try: shutil.move(src, dst) except (OSError, IOError): self._log("Failed to move directory - check directories and manually re-run.", logger.DEBUG) self._log("Post-Processing ABORTED.", logger.DEBUG) return #tidyup old path try: shutil.rmtree(self.nzb_folder) except (OSError, IOError): self._log("Failed to remove temporary directory - check directory and manually re-run.", logger.DEBUG) self._log("Post-Processing ABORTED.", logger.DEBUG) return self._log("Removed temporary directory : " + str(self.nzb_folder), logger.DEBUG) #delete entry from nzblog table myDB.action('DELETE from nzblog WHERE issueid=?', [issueid]) #update snatched table to change status to Downloaded updater.foundsearch(comicid, issueid, down='True') #force rescan of files updater.forceRescan(comicid) logger.info(u"Post-Processing completed for: " + series + " issue: " + str(issuenumOG) ) self._log(u"Post Processing SUCCESSFULL! ", logger.DEBUG) if mylar.PROWL_ENABLED: pushmessage = series + '(' + issueyear + ') - issue #' + issuenumOG logger.info(u"Prowl request") prowl = notifiers.PROWL() prowl.notify(pushmessage,"Download and Postprocessing completed") if mylar.NMA_ENABLED: nma = notifiers.NMA() nma.notify(series, str(issueyear), str(issuenumOG)) if mylar.PUSHOVER_ENABLED: pushmessage = series + ' (' + str(issueyear) + ') - issue #' + str(issuenumOG) logger.info(u"Pushover request") pushover = notifiers.PUSHOVER() pushover.notify(pushmessage, "Download and Post-Processing completed") # retrieve/create the corresponding comic objects if mylar.ENABLE_EXTRA_SCRIPTS: folderp = str(dst) #folder location after move/rename nzbn = self.nzb_name #original nzb name filen = str(nfilename + ext) #new filename #name, comicyear, comicid , issueid, issueyear, issue, publisher #create the dic and send it. seriesmeta = [] seriesmetadata = {} seriesmeta.append({ 'name': series, 'comicyear': seriesyear, 'comicid': comicid, 'issueid': issueid, 'issueyear': issueyear, 'issue': issuenum, 'publisher': publisher }) seriesmetadata['seriesmeta'] = seriesmeta self._run_extra_scripts(nzbname, self.nzb_folder, filen, folderp, seriesmetadata ) return self.log
def main(): # Fixed paths to mylar if hasattr(sys, 'frozen'): mylar.FULL_PATH = os.path.abspath(sys.executable) else: mylar.FULL_PATH = os.path.abspath(__file__) mylar.PROG_DIR = os.path.dirname(mylar.FULL_PATH) mylar.ARGS = sys.argv[1:] # From sickbeard mylar.SYS_ENCODING = None try: locale.setlocale(locale.LC_ALL, "") mylar.SYS_ENCODING = locale.getpreferredencoding() except (locale.Error, IOError): pass # for OSes that are poorly configured I'll just force UTF-8 if not mylar.SYS_ENCODING or mylar.SYS_ENCODING in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'): mylar.SYS_ENCODING = 'UTF-8' if not logger.LOG_LANG.startswith('en'): print( 'language detected as non-English (%s). Forcing specific logging module - errors WILL NOT be captured in the logs' % logger.LOG_LANG) else: print('log language set to %s' % logger.LOG_LANG) # Set up and gather command line arguments parser = argparse.ArgumentParser( description='Automated Comic Book Downloader') subparsers = parser.add_subparsers(title='Subcommands', dest='maintenance') #main parser parser.add_argument('-v', '--verbose', action='store_true', default=False, help='Increase console logging verbosity') parser.add_argument('-q', '--quiet', action='store_true', default=False, help='Turn off console logging') parser.add_argument('-d', '--daemon', action='store_true', default=False, help='Run as a daemon') parser.add_argument('-p', '--port', type=int, default=0, help='Force mylar to run on a specified port') parser.add_argument( '-b', '--backup', nargs='?', const='both', help='Will automatically backup & keep the last 4 rolling copies.') parser.add_argument( '-w', '--noweekly', action='store_true', default=False, help= 'Turn off weekly pull list check on startup (quicker boot sequence)') parser.add_argument( '-iu', '--ignoreupdate', action='store_true', default=False, help='Do not update db if required (for problem bypass)') parser.add_argument( '--datadir', default=None, help='Specify a directory where to store your data files') parser.add_argument('--config', default=None, help='Specify a config file to use') parser.add_argument('--nolaunch', action='store_true', default=False, help='Prevent browser from launching on startup') parser.add_argument( '--pidfile', default=None, help='Create a pid file (only relevant when running as a daemon)') parser.add_argument( '--safe', action='store_true', default=False, help= 'redirect the startup page to point to the Manage Comics screen on startup' ) parser_maintenance = subparsers.add_parser( 'maintenance', help= 'Enter maintenance mode (no GUI). Additional commands are available (maintenance --help)' ) parser_maintenance.add_argument( '-xj', '--exportjson', default=None, action='store', help='Export existing mylar.db to json file' ) #, default=argparse.SUPPRESS) parser_maintenance.add_argument('-id', '--importdatabase', default=None, action='store', help='Import a mylar.db into current db' ) # , default=argparse.SUPPRESS) parser_maintenance.add_argument( '-ij', '--importjson', default=None, action='store', help= 'Import a specified json file containing just {"ComicID": "XXXXX"} into current db' ) #, default=argparse.SUPPRESS) parser_maintenance.add_argument('-st', '--importstatus', default=False, action='store_true', help='Provide current maintenance status' ) #, default=argparse.SUPPRESS) parser_maintenance.add_argument( '-u', '--update', default=False, action='store_true', help='force mylar to perform an update as if in GUI' ) #, default=argparse.SUPPRESS) parser_maintenance.add_argument( '-fs', '--fixslashes', default=False, action='store_true', help='remove double-slashes from within paths in db' ) #, default=argparse.SUPPRESS) parser_maintenance.add_argument( '-cp', '--clearprovidertable', default=False, action='store_true', help='clear out the provider_searches table in db' ) #, default=argparse.SUPPRESS) #parser_maintenance.add_argument('-it', '--importtext', action='store', help='Import a specified text file into current db') args = vars(parser.parse_args()) #these need to be set for things to register args_exportjson = args.get('exportjson') args_importdatabase = args.get('importdatabase') args_importjson = args.get('importjson') args_importstatus = args.get('importstatus') args_update = args.get('update') args_fixslashes = args.get('fixslashes') args_clearprovidertable = args.get('clearprovidertable') args_maintenance = args.get('maintenance') args_verbose = args.get('verbose') args_quiet = args.get('quiet') args_ignoreupdate = args.get('ignoreupdate') args_daemon = args.get('daemon') args_pidfile = args.get('pidfile') args_datadir = args.get('datadir') args_config = args.get('config') args_safe = args.get('safe') args_noweekly = args.get('noweekly') args_port = args.get('port') args_nolaunch = args.get('nolaunch') args_backup = args.get('backup') if not any( [args_backup == 'ini', args_backup == 'db', args_backup == 'both']): args_backup = False if args_maintenance: if all([ args_exportjson is None, args_importdatabase is None, args_importjson is None, args_importstatus is False, args_update is False, args_fixslashes is False, args_clearprovidertable is False ]): print( 'Expecting subcommand with the maintenance positional argumeent' ) sys.exit() mylar.MAINTENANCE = True else: mylar.MAINTENANCE = False if args_verbose: print('Verbose/Debugging mode enabled...') mylar.LOG_LEVEL = 2 elif args_quiet: mylar.QUIET = True print('Quiet logging mode enabled...') mylar.LOG_LEVEL = 0 else: mylar.LOG_LEVEL = None if args_ignoreupdate: mylar.MAINTENANCE = False if args_daemon: if sys.platform == 'win32': print("Daemonize not supported under Windows, starting normally") else: mylar.DAEMON = True if args_pidfile: mylar.PIDFILE = str(args_pidfile) # If the pidfile already exists, mylar may still be running, so exit if os.path.exists(mylar.PIDFILE): if check_stale_pidfile(mylar.PIDFILE): os.unlink(mylar.PIDFILE) else: sys.exit("PID file '" + mylar.PIDFILE + "' already exists. Exiting.") # The pidfile is only useful in daemon mode, make sure we can write the file properly if mylar.DAEMON: mylar.CREATEPID = True curpid = os.getpid() try: open(mylar.PIDFILE, 'w').write(f"{curpid}\n") except IOError as e: raise SystemExit("Unable to write PID file: %s [%d]" % (e.strerror, e.errno)) else: print("Not running in daemon mode. PID file creation disabled.") if args_datadir: mylar.DATA_DIR = args_datadir else: mylar.DATA_DIR = mylar.PROG_DIR if args_config: mylar.CONFIG_FILE = args_config else: mylar.CONFIG_FILE = os.path.join(mylar.DATA_DIR, 'config.ini') if args_safe: mylar.SAFESTART = True else: mylar.SAFESTART = False if args_noweekly: mylar.NOWEEKLY = True else: mylar.NOWEEKLY = False try: backup = False backup_db = False backup_cfg = False if args_backup: backup = True if args_backup == 'ini': backup_cfg = True elif args_backup == 'db': backup_db = True elif args_backup == 'both': backup_cfg = True backup_db = True else: backup = False except Exception as e: backup = False # Put the database in the DATA_DIR mylar.DB_FILE = os.path.join(mylar.DATA_DIR, 'mylar.db') # Read config and start logging if mylar.MAINTENANCE is False: print('Initializing startup sequence....') #try: mylar.initialize(mylar.CONFIG_FILE) #except Exception as e: # print e # raise SystemExit('FATAL ERROR') # check for clearprovidertable value after ini load if mylar.CONFIG.CLEAR_PROVIDER_TABLE is True: logger.info( '[CLEAR_PROVIDER_TABLE] forcing over-ride value from config.ini') args_clearprovidertable = True mylar.MAINTENANCE = True if mylar.MAINTENANCE is False: filechecker.validateAndCreateDirectory(mylar.DATA_DIR, True, dmode='DATA') # Make sure the DATA_DIR is writeable if not os.access(mylar.DATA_DIR, os.W_OK): raise SystemExit('Cannot write to the data directory: ' + mylar.DATA_DIR + '. Exiting...') # backup the db and configs before they load. if (backup is True and any([backup_cfg is True, backup_db is True ])) or mylar.CONFIG.BACKUP_ON_START: if mylar.CONFIG.BACKUP_ON_START: backup_cfg = True backup_db = True if mylar.CONFIG.BACKUP_ON_START or all( [backup_cfg is True, backup_db is True]): logger.info( '[AUTO-BACKUP] Backing up mylar.db & config.ini files for safety.' ) elif backup_cfg is True: logger.info('[AUTO-BACKUP] Backing up config.ini file for safety.') elif backup_db is True: logger.info('[AUTO-BACKUP] Backing up mylar.db file for safety.') mm = maintenance.Maintenance('backup') back_check = mm.backup_files(cfg=backup_cfg, dbs=backup_db) failures = [ re.sub('mylar database', 'mylar.db', x['file']) for x in back_check if x['status'] == 'failure' ] successes = [ re.sub('mylar database', 'mylar.db', x['file']) for x in back_check if x['status'] == 'success' ] if failures: logger.warn('[AUTO-BACKUP] Failure backing up %s files [%s]' % (len(failures), failures)) if successes: logger.info('[AUTO-BACKUP] Successful backup of %s files [%s]' % (len(successes), successes)) # Rename the main thread threading.current_thread().name = "MAIN" if mylar.DAEMON: mylar.daemonize() #print('mylar.MAINTENANCE: %s'% mylar.MAINTENANCE) #print('mylar.MAINTENANCE_TOTAL: %s'% mylar.MAINTENANCE_DB_TOTAL) if mylar.MAINTENANCE is True and (mylar.MAINTENANCE_UPDATE or any([ args_exportjson, args_importjson, args_update is True, args_importstatus is True, args_fixslashes is True, args_clearprovidertable is True ])): # Start up a temporary maintenance server for GUI display only. maint_config = { 'http_port': int(mylar.CONFIG.HTTP_PORT), 'http_host': mylar.CONFIG.HTTP_HOST, 'http_root': mylar.CONFIG.HTTP_ROOT, 'enable_https': mylar.CONFIG.ENABLE_HTTPS, 'https_cert': mylar.CONFIG.HTTPS_CERT, 'https_key': mylar.CONFIG.HTTPS_KEY, 'https_chain': mylar.CONFIG.HTTPS_CHAIN, 'http_username': mylar.CONFIG.HTTP_USERNAME, 'http_password': mylar.CONFIG.HTTP_PASSWORD, 'authentication': mylar.CONFIG.AUTHENTICATION, 'login_timeout': mylar.CONFIG.LOGIN_TIMEOUT } # Try to start the server. maintenance_webstart.initialize(maint_config) versioncheck.versionload() print("started") loggermode = '[MAINTENANCE-MODE]' restart_method = True #True will restart, False will shutdown. if mylar.MAINTENANCE_UPDATE: ur = maintenance.Maintenance('db update') restart_method = ur.update_db() if restart_method is None: restart_method = True elif args_importstatus: cs = maintenance.Maintenance('status') cstat = cs.check_status() else: logger.info('%s Initializing maintenance mode' % loggermode) if args_update is True: logger.info( '%s Attempting to update Mylar so things can work again...' % loggermode) try: mylar.shutdown(restart=True, update=True, maintenance=True) except Exception as e: sys.exit('%s Mylar failed to update: %s' % (loggermode, e)) elif args_importdatabase: #for attempted db import. maintenance_path = args_importdatabase logger.info('%s db path accepted as %s' % (loggermode, maintenance_path)) di = maintenance.Maintenance('database-import', file=maintenance_path) d = di.database_import() elif args_importjson: #for attempted file re-import (json format) maintenance_path = args_importjson logger.info( '%s file indicated as being in json format - path accepted as %s' % (loggermode, maintenance_path)) ij = maintenance.Maintenance('json-import', file=maintenance_path) j = ij.json_import() #elif args_importtext: # #for attempted file re-import (list format) # maintenance_path = args_importtext # logger.info('%s file indicated as being in list format - path accepted as %s' % (loggermode, maintenance_path)) # it = maintenance.Maintenance('list-import', file=maintenance_path) # t = it.list_import() elif args_exportjson: #for export of db comicid's in json format maintenance_path = args_exportjson logger.info( '%s file indicated as being written to json format - destination accepted as %s' % (loggermode, maintenance_path)) ej = maintenance.Maintenance('json-export', output=maintenance_path) j = ej.json_export() elif args_fixslashes: #for running the fix slashes on the db manually logger.info('%s method indicated as fix slashes' % loggermode) fs = maintenance.Maintenance('fixslashes') j = fs.fix_slashes() elif args_clearprovidertable: #for running the clearprovidertable on the db manually logger.info('%s method indicated as fix clearprovidertable' % loggermode) fs = maintenance.Maintenance('clearprovidertable') j = fs.clear_provider_table() else: logger.info('%s Not a valid command: %s' % (loggermode, args_maintenance)) sys.exit() logger.info('%s Exiting Maintenance mode' % (loggermode)) #restart automatically after maintenance has completed... maintenance_webstart.shutdown() logger.info('%s Maintenance webserver has been shut down.' % (loggermode)) mylar.shutdown(restart=restart_method, maintenance=True) # Force the http port if neccessary if args_port > 0: http_port = args_port logger.info('Starting Mylar on forced port: %i' % http_port) else: http_port = int(mylar.CONFIG.HTTP_PORT) # Check if pyOpenSSL is installed. It is required for certificate generation # and for cherrypy. if mylar.CONFIG.ENABLE_HTTPS: try: import OpenSSL except ImportError: logger.warn("The pyOpenSSL module is missing. Install this " \ "module to enable HTTPS. HTTPS will be disabled.") mylar.CONFIG.ENABLE_HTTPS = False # Try to start the server. Will exit here is address is already in use. web_config = { 'http_port': http_port, 'http_host': mylar.CONFIG.HTTP_HOST, 'http_root': mylar.CONFIG.HTTP_ROOT, 'enable_https': mylar.CONFIG.ENABLE_HTTPS, 'https_cert': mylar.CONFIG.HTTPS_CERT, 'https_key': mylar.CONFIG.HTTPS_KEY, 'https_chain': mylar.CONFIG.HTTPS_CHAIN, 'http_username': mylar.CONFIG.HTTP_USERNAME, 'http_password': mylar.CONFIG.HTTP_PASSWORD, 'authentication': mylar.CONFIG.AUTHENTICATION, 'login_timeout': mylar.CONFIG.LOGIN_TIMEOUT, 'cherrypy_logging': mylar.CONFIG.CHERRYPY_LOGGING, 'opds_enable': mylar.CONFIG.OPDS_ENABLE, 'opds_authentication': mylar.CONFIG.OPDS_AUTHENTICATION, 'opds_username': mylar.CONFIG.OPDS_USERNAME, 'opds_password': mylar.CONFIG.OPDS_PASSWORD, 'opds_pagesize': mylar.CONFIG.OPDS_PAGESIZE, } # Try to start the server. webstart.initialize(web_config) #check for version here after web server initialized so it doesn't try to repeatidly hit github #for version info if it's already running versioncheck.versionload() if mylar.CONFIG.LAUNCH_BROWSER and not args_nolaunch: mylar.launch_browser(mylar.CONFIG.HTTP_HOST, http_port, mylar.CONFIG.HTTP_ROOT) # Start the background threads mylar.start() signal.signal(signal.SIGTERM, handler_sigterm) while True: if not mylar.SIGNAL: try: time.sleep(1) except KeyboardInterrupt: mylar.GLOBAL_MESSAGES = { 'status': 'success', 'event': 'shutdown', 'message': 'Now shutting down system.' } time.sleep(1) mylar.SIGNAL = 'shutdown' else: logger.info('Received signal: ' + mylar.SIGNAL) if mylar.SIGNAL == 'shutdown': mylar.GLOBAL_MESSAGES = { 'status': 'success', 'event': 'shutdown', 'message': 'Now shutting down system.' } time.sleep(2) mylar.shutdown() elif mylar.SIGNAL == 'restart': mylar.shutdown(restart=True) else: mylar.shutdown(restart=True, update=True) mylar.SIGNAL = None return
def GCDimport(gcomicid, pullupd=None, imported=None, ogcname=None): # this is for importing via GCD only and not using CV. # used when volume spanning is discovered for a Comic (and can't be added using CV). # Issue Counts are wrong (and can't be added). # because Comicvine ComicID and GCD ComicID could be identical at some random point, let's distinguish. # CV = comicid, GCD = gcomicid :) (ie. CV=2740, GCD=G3719) gcdcomicid = gcomicid myDB = db.DBConnection() # We need the current minimal info in the database instantly # so we don't throw a 500 error when we redirect to the artistPage controlValueDict = {"ComicID": gcdcomicid} comic = myDB.action( 'SELECT ComicName, ComicYear, Total, ComicPublished, ComicImage, ComicLocation, ComicPublisher FROM comics WHERE ComicID=?', [gcomicid]).fetchone() ComicName = comic[0] ComicYear = comic[1] ComicIssues = comic[2] ComicPublished = comic[3] comlocation = comic[5] ComicPublisher = comic[6] #ComicImage = comic[4] #print ("Comic:" + str(ComicName)) newValueDict = {"Status": "Loading"} myDB.upsert("comics", newValueDict, controlValueDict) # we need to lookup the info for the requested ComicID in full now #comic = cv.getComic(comicid,'comic') if not comic: logger.warn("Error fetching comic. ID for : " + gcdcomicid) if dbcomic is None: newValueDict = { "ComicName": "Fetch failed, try refreshing. (%s)" % (gcdcomicid), "Status": "Active" } else: newValueDict = {"Status": "Active"} myDB.upsert("comics", newValueDict, controlValueDict) return #run the re-sortorder here in order to properly display the page if pullupd is None: helpers.ComicSort(comicorder=mylar.COMICSORT, imported=gcomicid) if ComicName.startswith('The '): sortname = ComicName[4:] else: sortname = ComicName logger.info(u"Now adding/updating: " + ComicName) #--Now that we know ComicName, let's try some scraping #--Start # gcd will return issue details (most importantly publishing date) comicid = gcomicid[1:] resultURL = "/series/" + str(comicid) + "/" gcdinfo = parseit.GCDdetails(comseries=None, resultURL=resultURL, vari_loop=0, ComicID=gcdcomicid, TotalIssues=ComicIssues, issvariation=None, resultPublished=None) if gcdinfo == "No Match": logger.warn("No matching result found for " + ComicName + " (" + ComicYear + ")") updater.no_searchresults(gcomicid) nomatch = "true" return nomatch logger.info(u"Sucessfully retrieved details for " + ComicName) # print ("Series Published" + parseit.resultPublished) #--End ComicImage = gcdinfo['ComicImage'] #comic book location on machine # setup default location here if comlocation is None: # let's remove the non-standard characters here. u_comicnm = ComicName u_comicname = u_comicnm.encode('ascii', 'ignore').strip() if ':' in u_comicname or '/' in u_comicname or ',' in u_comicname: comicdir = u_comicname if ':' in comicdir: comicdir = comicdir.replace(':', '') if '/' in comicdir: comicdir = comicdir.replace('/', '-') if ',' in comicdir: comicdir = comicdir.replace(',', '') else: comicdir = u_comicname series = comicdir publisher = ComicPublisher year = ComicYear #do work to generate folder path values = { '$Series': series, '$Publisher': publisher, '$Year': year, '$series': series.lower(), '$publisher': publisher.lower(), '$Volume': year } if mylar.FOLDER_FORMAT == '': comlocation = mylar.DESTINATION_DIR + "/" + comicdir + " (" + comic[ 'ComicYear'] + ")" else: comlocation = mylar.DESTINATION_DIR + "/" + helpers.replace_all( mylar.FOLDER_FORMAT, values) #comlocation = mylar.DESTINATION_DIR + "/" + comicdir + " (" + ComicYear + ")" if mylar.DESTINATION_DIR == "": logger.error( u"There is no general directory specified - please specify in Config/Post-Processing." ) return if mylar.REPLACE_SPACES: #mylar.REPLACE_CHAR ...determines what to replace spaces with underscore or dot comlocation = comlocation.replace(' ', mylar.REPLACE_CHAR) #if it doesn't exist - create it (otherwise will bugger up later on) if os.path.isdir(str(comlocation)): logger.info(u"Directory (" + str(comlocation) + ") already exists! Continuing...") else: #print ("Directory doesn't exist!") #try: # os.makedirs(str(comlocation)) # logger.info(u"Directory successfully created at: " + str(comlocation)) #except OSError: # logger.error(u"Could not create comicdir : " + str(comlocation)) filechecker.validateAndCreateDirectory(comlocation, True) comicIssues = gcdinfo['totalissues'] #let's download the image... if os.path.exists(mylar.CACHE_DIR): pass else: #let's make the dir. try: os.makedirs(str(mylar.CACHE_DIR)) logger.info(u"Cache Directory successfully created at: " + str(mylar.CACHE_DIR)) except OSError: logger.error(u"Could not create cache dir : " + str(mylar.CACHE_DIR)) coverfile = os.path.join(mylar.CACHE_DIR, str(gcomicid) + ".jpg") #try: urllib.urlretrieve(str(ComicImage), str(coverfile)) try: with open(str(coverfile)) as f: ComicImage = os.path.join('cache', str(gcomicid) + ".jpg") #this is for Firefox when outside the LAN...it works, but I don't know how to implement it #without breaking the normal flow for inside the LAN (above) #ComicImage = "http://" + str(mylar.HTTP_HOST) + ":" + str(mylar.HTTP_PORT) + "/cache/" + str(comi$ logger.info(u"Sucessfully retrieved cover for " + ComicName) #if the comic cover local is checked, save a cover.jpg to the series folder. if mylar.COMIC_COVER_LOCAL: comiclocal = os.path.join(str(comlocation) + "/cover.jpg") shutil.copy(ComicImage, comiclocal) except IOError as e: logger.error(u"Unable to save cover locally at this time.") #if comic['ComicVersion'].isdigit(): # comicVol = "v" + comic['ComicVersion'] #else: # comicVol = None controlValueDict = {"ComicID": gcomicid} newValueDict = { "ComicName": ComicName, "ComicSortName": sortname, "ComicYear": ComicYear, "Total": comicIssues, "ComicLocation": comlocation, #"ComicVersion": comicVol, "ComicImage": ComicImage, #"ComicPublisher": comic['ComicPublisher'], #"ComicPublished": comicPublished, "DateAdded": helpers.today(), "Status": "Loading" } myDB.upsert("comics", newValueDict, controlValueDict) #comicsort here... #run the re-sortorder here in order to properly display the page if pullupd is None: helpers.ComicSort(sequence='update') logger.info(u"Sucessfully retrieved issue details for " + ComicName) n = 0 iscnt = int(comicIssues) issnum = [] issname = [] issdate = [] int_issnum = [] #let's start issue #'s at 0 -- thanks to DC for the new 52 reboot! :) latestiss = "0" latestdate = "0000-00-00" #print ("total issues:" + str(iscnt)) #---removed NEW code here--- logger.info(u"Now adding/updating issues for " + ComicName) bb = 0 while (bb <= iscnt): #---NEW.code try: gcdval = gcdinfo['gcdchoice'][bb] #print ("gcdval: " + str(gcdval)) except IndexError: #account for gcd variation here if gcdinfo['gcdvariation'] == 'gcd': #print ("gcd-variation accounted for.") issdate = '0000-00-00' int_issnum = int(issis / 1000) break if 'nn' in str(gcdval['GCDIssue']): #no number detected - GN, TP or the like logger.warn( u"Non Series detected (Graphic Novel, etc) - cannot proceed at this time." ) updater.no_searchresults(comicid) return elif '.' in str(gcdval['GCDIssue']): issst = str(gcdval['GCDIssue']).find('.') issb4dec = str(gcdval['GCDIssue'])[:issst] #if the length of decimal is only 1 digit, assume it's a tenth decis = str(gcdval['GCDIssue'])[issst + 1:] if len(decis) == 1: decisval = int(decis) * 10 issaftdec = str(decisval) if len(decis) == 2: decisval = int(decis) issaftdec = str(decisval) if int(issaftdec) == 0: issaftdec = "00" gcd_issue = issb4dec + "." + issaftdec gcdis = (int(issb4dec) * 1000) + decisval else: gcdis = int(str(gcdval['GCDIssue'])) * 1000 gcd_issue = str(gcdval['GCDIssue']) #get the latest issue / date using the date. int_issnum = int(gcdis / 1000) issdate = str(gcdval['GCDDate']) issid = "G" + str(gcdval['IssueID']) if gcdval['GCDDate'] > latestdate: latestiss = str(gcd_issue) latestdate = str(gcdval['GCDDate']) #print("(" + str(bb) + ") IssueID: " + str(issid) + " IssueNo: " + str(gcd_issue) + " Date" + str(issdate) ) #---END.NEW. # check if the issue already exists iss_exists = myDB.action('SELECT * from issues WHERE IssueID=?', [issid]).fetchone() # Only change the status & add DateAdded if the issue is not already in the database if iss_exists is None: newValueDict['DateAdded'] = helpers.today() #adjust for inconsistencies in GCD date format - some dates have ? which borks up things. if "?" in str(issdate): issdate = "0000-00-00" controlValueDict = {"IssueID": issid} newValueDict = { "ComicID": gcomicid, "ComicName": ComicName, "Issue_Number": gcd_issue, "IssueDate": issdate, "Int_IssueNumber": int_issnum } #print ("issueid:" + str(controlValueDict)) #print ("values:" + str(newValueDict)) if mylar.AUTOWANT_ALL: newValueDict['Status'] = "Wanted" elif issdate > helpers.today() and mylar.AUTOWANT_UPCOMING: newValueDict['Status'] = "Wanted" else: newValueDict['Status'] = "Skipped" if iss_exists: #print ("Existing status : " + str(iss_exists['Status'])) newValueDict['Status'] = iss_exists['Status'] myDB.upsert("issues", newValueDict, controlValueDict) bb += 1 # logger.debug(u"Updating comic cache for " + ComicName) # cache.getThumb(ComicID=issue['issueid']) # logger.debug(u"Updating cache for: " + ComicName) # cache.getThumb(ComicIDcomicid) controlValueStat = {"ComicID": gcomicid} newValueStat = { "Status": "Active", "LatestIssue": latestiss, "LatestDate": latestdate, "LastUpdated": helpers.now() } myDB.upsert("comics", newValueStat, controlValueStat) if mylar.CVINFO: if not os.path.exists(comlocation + "/cvinfo"): with open(comlocation + "/cvinfo", "w") as text_file: text_file.write("http://www.comicvine.com/volume/49-" + str(comicid)) logger.info(u"Updating complete for: " + ComicName) #move the files...if imported is not empty (meaning it's not from the mass importer.) if imported is None or imported == 'None': pass else: if mylar.IMP_MOVE: logger.info("Mass import - Move files") moveit.movefiles(gcomicid, comlocation, ogcname) else: logger.info( "Mass import - Moving not Enabled. Setting Archived Status for import." ) moveit.archivefiles(gcomicid, ogcname) #check for existing files... updater.forceRescan(gcomicid) if pullupd is None: # lets' check the pullist for anyting at this time as well since we're here. if mylar.AUTOWANT_UPCOMING and 'Present' in ComicPublished: logger.info(u"Checking this week's pullist for new issues of " + ComicName) updater.newpullcheck(comic['ComicName'], gcomicid) #here we grab issues that have been marked as wanted above... results = myDB.select( "SELECT * FROM issues where ComicID=? AND Status='Wanted'", [gcomicid]) if results: logger.info(u"Attempting to grab wanted issues for : " + ComicName) for result in results: foundNZB = "none" if (mylar.NZBSU or mylar.DOGNZB or mylar.EXPERIMENTAL or mylar.NEWZNAB or mylar.NZBX) and (mylar.SAB_HOST): foundNZB = search.searchforissue(result['IssueID']) if foundNZB == "yes": updater.foundsearch(result['ComicID'], result['IssueID']) else: logger.info(u"No issues marked as wanted for " + ComicName) logger.info(u"Finished grabbing what I could.")
def addComictoDB(comicid, mismatch=None, pullupd=None, imported=None, ogcname=None): # Putting this here to get around the circular import. Will try to use this to update images at later date. # from mylar import cache myDB = db.DBConnection() # We need the current minimal info in the database instantly # so we don't throw a 500 error when we redirect to the artistPage controlValueDict = {"ComicID": comicid} dbcomic = myDB.action('SELECT * FROM comics WHERE ComicID=?', [comicid]).fetchone() if dbcomic is None: newValueDict = { "ComicName": "Comic ID: %s" % (comicid), "Status": "Loading" } comlocation = None oldcomversion = None else: newValueDict = {"Status": "Loading"} comlocation = dbcomic['ComicLocation'] filechecker.validateAndCreateDirectory(comlocation, True) oldcomversion = dbcomic[ 'ComicVersion'] #store the comicversion and chk if it exists before hammering. myDB.upsert("comics", newValueDict, controlValueDict) #run the re-sortorder here in order to properly display the page if pullupd is None: helpers.ComicSort(comicorder=mylar.COMICSORT, imported=comicid) # we need to lookup the info for the requested ComicID in full now comic = cv.getComic(comicid, 'comic') #comic = myDB.action('SELECT * FROM comics WHERE ComicID=?', [comicid]).fetchone() if not comic: logger.warn("Error fetching comic. ID for : " + comicid) if dbcomic is None: newValueDict = { "ComicName": "Fetch failed, try refreshing. (%s)" % (comicid), "Status": "Active" } else: newValueDict = {"Status": "Active"} myDB.upsert("comics", newValueDict, controlValueDict) return if comic['ComicName'].startswith('The '): sortname = comic['ComicName'][4:] else: sortname = comic['ComicName'] logger.info(u"Now adding/updating: " + comic['ComicName']) #--Now that we know ComicName, let's try some scraping #--Start # gcd will return issue details (most importantly publishing date) if not mylar.CV_ONLY: if mismatch == "no" or mismatch is None: gcdinfo = parseit.GCDScraper(comic['ComicName'], comic['ComicYear'], comic['ComicIssues'], comicid) #print ("gcdinfo: " + str(gcdinfo)) mismatch_com = "no" if gcdinfo == "No Match": updater.no_searchresults(comicid) nomatch = "true" logger.info(u"There was an error when trying to add " + comic['ComicName'] + " (" + comic['ComicYear'] + ")") return nomatch else: mismatch_com = "yes" #print ("gcdinfo:" + str(gcdinfo)) elif mismatch == "yes": CV_EXcomicid = myDB.action( "SELECT * from exceptions WHERE ComicID=?", [comicid]).fetchone() if CV_EXcomicid['variloop'] is None: pass else: vari_loop = CV_EXcomicid['variloop'] NewComicID = CV_EXcomicid['NewComicID'] gcomicid = CV_EXcomicid['GComicID'] resultURL = "/series/" + str(NewComicID) + "/" #print ("variloop" + str(CV_EXcomicid['variloop'])) #if vari_loop == '99': gcdinfo = parseit.GCDdetails(comseries=None, resultURL=resultURL, vari_loop=0, ComicID=comicid, TotalIssues=0, issvariation="no", resultPublished=None) logger.info(u"Sucessfully retrieved details for " + comic['ComicName']) # print ("Series Published" + parseit.resultPublished) CV_NoYearGiven = "no" #if the SeriesYear returned by CV is blank or none (0000), let's use the gcd one. if comic['ComicYear'] is None or comic['ComicYear'] == '0000': if mylar.CV_ONLY: #we'll defer this until later when we grab all the issues and then figure it out logger.info( "Uh-oh. I can't find a Series Year for this series. I'm going to try analyzing deeper." ) SeriesYear = cv.getComic(comicid, 'firstissue', comic['FirstIssueID']) if SeriesYear == '0000': logger.info( "Ok - I couldn't find a Series Year at all. Loading in the issue data now and will figure out the Series Year." ) CV_NoYearGiven = "yes" issued = cv.getComic(comicid, 'issue') SeriesYear = issued['firstdate'][:4] else: SeriesYear = gcdinfo['SeriesYear'] else: SeriesYear = comic['ComicYear'] #let's do the Annual check here. if mylar.ANNUALS_ON: annualcomicname = re.sub('[\,\:]', '', comic['ComicName']) annuals = comicbookdb.cbdb(annualcomicname, SeriesYear) print("Number of Annuals returned: " + str(annuals['totalissues'])) nb = 0 while (nb <= int(annuals['totalissues'])): try: annualval = annuals['annualslist'][nb] except IndexError: break newCtrl = { "IssueID": str(annualval['AnnualIssue'] + annualval['AnnualDate']) } newVals = { "Issue_Number": annualval['AnnualIssue'], "IssueDate": annualval['AnnualDate'], "IssueName": annualval['AnnualTitle'], "ComicID": comicid, "Status": "Skipped" } myDB.upsert("annuals", newVals, newCtrl) nb += 1 #parseit.annualCheck(gcomicid=gcdinfo['GCDComicID'], comicid=comicid, comicname=comic['ComicName'], comicyear=SeriesYear) #comic book location on machine # setup default location here if comlocation is None: # let's remove the non-standard characters here. u_comicnm = comic['ComicName'] u_comicname = u_comicnm.encode('ascii', 'ignore').strip() if ':' in u_comicname or '/' in u_comicname or ',' in u_comicname or '?' in u_comicname: comicdir = u_comicname if ':' in comicdir: comicdir = comicdir.replace(':', '') if '/' in comicdir: comicdir = comicdir.replace('/', '-') if ',' in comicdir: comicdir = comicdir.replace(',', '') if '?' in comicdir: comicdir = comicdir.replace('?', '') else: comicdir = u_comicname series = comicdir publisher = re.sub('!', '', comic['ComicPublisher']) # thanks Boom! year = SeriesYear comversion = comic['ComicVersion'] if comversion is None: comversion = 'None' #if comversion is None, remove it so it doesn't populate with 'None' if comversion == 'None': chunk_f_f = re.sub('\$VolumeN', '', mylar.FILE_FORMAT) chunk_f = re.compile(r'\s+') mylar.FILE_FORMAT = chunk_f.sub(' ', chunk_f_f) #do work to generate folder path values = { '$Series': series, '$Publisher': publisher, '$Year': year, '$series': series.lower(), '$publisher': publisher.lower(), '$VolumeY': 'V' + str(year), '$VolumeN': comversion } #print mylar.FOLDER_FORMAT #print 'working dir:' #print helpers.replace_all(mylar.FOLDER_FORMAT, values) if mylar.FOLDER_FORMAT == '': comlocation = mylar.DESTINATION_DIR + "/" + comicdir + " (" + SeriesYear + ")" else: comlocation = mylar.DESTINATION_DIR + "/" + helpers.replace_all( mylar.FOLDER_FORMAT, values) #comlocation = mylar.DESTINATION_DIR + "/" + comicdir + " (" + comic['ComicYear'] + ")" if mylar.DESTINATION_DIR == "": logger.error( u"There is no general directory specified - please specify in Config/Post-Processing." ) return if mylar.REPLACE_SPACES: #mylar.REPLACE_CHAR ...determines what to replace spaces with underscore or dot comlocation = comlocation.replace(' ', mylar.REPLACE_CHAR) #moved this out of the above loop so it will chk for existance of comlocation in case moved #if it doesn't exist - create it (otherwise will bugger up later on) if os.path.isdir(str(comlocation)): logger.info(u"Directory (" + str(comlocation) + ") already exists! Continuing...") else: #print ("Directory doesn't exist!") #try: # os.makedirs(str(comlocation)) # logger.info(u"Directory successfully created at: " + str(comlocation)) #except OSError: # logger.error(u"Could not create comicdir : " + str(comlocation)) filechecker.validateAndCreateDirectory(comlocation, True) #try to account for CV not updating new issues as fast as GCD #seems CV doesn't update total counts #comicIssues = gcdinfo['totalissues'] comicIssues = comic['ComicIssues'] if not mylar.CV_ONLY: if gcdinfo['gcdvariation'] == "cv": comicIssues = str(int(comic['ComicIssues']) + 1) #let's download the image... if os.path.exists(mylar.CACHE_DIR): pass else: #let's make the dir. try: os.makedirs(str(mylar.CACHE_DIR)) logger.info(u"Cache Directory successfully created at: " + str(mylar.CACHE_DIR)) except OSError: logger.error( 'Could not create cache dir. Check permissions of cache dir: ' + str(mylar.CACHE_DIR)) coverfile = os.path.join(mylar.CACHE_DIR, str(comicid) + ".jpg") #try: urllib.urlretrieve(str(comic['ComicImage']), str(coverfile)) try: with open(str(coverfile)) as f: ComicImage = os.path.join('cache', str(comicid) + ".jpg") #this is for Firefox when outside the LAN...it works, but I don't know how to implement it #without breaking the normal flow for inside the LAN (above) #ComicImage = "http://" + str(mylar.HTTP_HOST) + ":" + str(mylar.HTTP_PORT) + "/cache/" + str(comicid) + ".jpg" logger.info(u"Sucessfully retrieved cover for " + comic['ComicName']) #if the comic cover local is checked, save a cover.jpg to the series folder. if mylar.COMIC_COVER_LOCAL: comiclocal = os.path.join(str(comlocation) + "/cover.jpg") shutil.copy(ComicImage, comiclocal) except IOError as e: logger.error(u"Unable to save cover locally at this time.") if oldcomversion is None: if comic['ComicVersion'].isdigit(): comicVol = "v" + comic['ComicVersion'] else: comicVol = None else: comicVol = oldcomversion #for description ... #Cdesc = helpers.cleanhtml(comic['ComicDescription']) #cdes_find = Cdesc.find("Collected") #cdes_removed = Cdesc[:cdes_find] #print cdes_removed controlValueDict = {"ComicID": comicid} newValueDict = { "ComicName": comic['ComicName'], "ComicSortName": sortname, "ComicYear": SeriesYear, "ComicImage": ComicImage, "Total": comicIssues, "ComicVersion": comicVol, "ComicLocation": comlocation, "ComicPublisher": comic['ComicPublisher'], #"Description": Cdesc.decode('utf-8', 'replace'), "DetailURL": comic['ComicURL'], # "ComicPublished": gcdinfo['resultPublished'], "ComicPublished": 'Unknown', "DateAdded": helpers.today(), "Status": "Loading" } myDB.upsert("comics", newValueDict, controlValueDict) #comicsort here... #run the re-sortorder here in order to properly display the page if pullupd is None: helpers.ComicSort(sequence='update') if CV_NoYearGiven == 'no': #if set to 'no' then we haven't pulled down the issues, otherwise we did it already issued = cv.getComic(comicid, 'issue') logger.info(u"Sucessfully retrieved issue details for " + comic['ComicName']) n = 0 iscnt = int(comicIssues) issid = [] issnum = [] issname = [] issdate = [] int_issnum = [] #let's start issue #'s at 0 -- thanks to DC for the new 52 reboot! :) latestiss = "0" latestdate = "0000-00-00" firstiss = "10000000" firstdate = "2099-00-00" #print ("total issues:" + str(iscnt)) #---removed NEW code here--- logger.info(u"Now adding/updating issues for " + comic['ComicName']) if not mylar.CV_ONLY: #fccnt = int(fc['comiccount']) #logger.info(u"Found " + str(fccnt) + "/" + str(iscnt) + " issues of " + comic['ComicName'] + "...verifying") #fcnew = [] if iscnt > 0: #if a series is brand new, it wont have any issues/details yet so skip this part while (n <= iscnt): #---NEW.code try: firstval = issued['issuechoice'][n] except IndexError: break cleanname = helpers.cleanName(firstval['Issue_Name']) issid = str(firstval['Issue_ID']) issnum = str(firstval['Issue_Number']) #print ("issnum: " + str(issnum)) issname = cleanname if '.' in str(issnum): issn_st = str(issnum).find('.') issn_b4dec = str(issnum)[:issn_st] #if the length of decimal is only 1 digit, assume it's a tenth dec_is = str(issnum)[issn_st + 1:] if len(dec_is) == 1: dec_nisval = int(dec_is) * 10 iss_naftdec = str(dec_nisval) if len(dec_is) == 2: dec_nisval = int(dec_is) iss_naftdec = str(dec_nisval) iss_issue = issn_b4dec + "." + iss_naftdec issis = (int(issn_b4dec) * 1000) + dec_nisval elif 'au' in issnum.lower(): print("au detected") stau = issnum.lower().find('au') issnum_au = issnum[:stau] print("issnum_au: " + str(issnum_au)) #account for Age of Ultron mucked up numbering issis = str(int(issnum_au) * 1000) + 'AU' else: issis = int(issnum) * 1000 bb = 0 while (bb <= iscnt): try: gcdval = gcdinfo['gcdchoice'][bb] #print ("gcdval: " + str(gcdval)) except IndexError: #account for gcd variation here if gcdinfo['gcdvariation'] == 'gcd': #logger.fdebug("gcd-variation accounted for.") issdate = '0000-00-00' int_issnum = int(issis / 1000) break if 'nn' in str(gcdval['GCDIssue']): #no number detected - GN, TP or the like logger.warn( u"Non Series detected (Graphic Novel, etc) - cannot proceed at this time." ) updater.no_searchresults(comicid) return elif 'au' in gcdval['GCDIssue'].lower(): #account for Age of Ultron mucked up numbering - this is in format of 5AU.00 gstau = gcdval['GCDIssue'].lower().find('au') gcdis_au = gcdval['GCDIssue'][:gstau] gcdis = str(int(gcdis_au) * 1000) + 'AU' elif '.' in str(gcdval['GCDIssue']): #logger.fdebug("g-issue:" + str(gcdval['GCDIssue'])) issst = str(gcdval['GCDIssue']).find('.') #logger.fdebug("issst:" + str(issst)) issb4dec = str(gcdval['GCDIssue'])[:issst] #logger.fdebug("issb4dec:" + str(issb4dec)) #if the length of decimal is only 1 digit, assume it's a tenth decis = str(gcdval['GCDIssue'])[issst + 1:] #logger.fdebug("decis:" + str(decis)) if len(decis) == 1: decisval = int(decis) * 10 issaftdec = str(decisval) if len(decis) == 2: decisval = int(decis) issaftdec = str(decisval) gcd_issue = issb4dec + "." + issaftdec #logger.fdebug("gcd_issue:" + str(gcd_issue)) try: gcdis = (int(issb4dec) * 1000) + decisval except ValueError: logger.error( "This has no issue #'s for me to get - Either a Graphic Novel or one-shot. This feature to allow these will be added in the near future." ) updater.no_searchresults(comicid) return else: gcdis = int(str(gcdval['GCDIssue'])) * 1000 if gcdis == issis: issdate = str(gcdval['GCDDate']) if str(issis).isdigit(): int_issnum = int(gcdis / 1000) else: if 'au' in issis.lower(): int_issnum = str(int(gcdis[:-2]) / 1000) + 'AU' else: logger.error( "this has an alpha-numeric in the issue # which I cannot account for. Get on github and log the issue for evilhero." ) return #get the latest issue / date using the date. if gcdval['GCDDate'] > latestdate: latestiss = str(issnum) latestdate = str(gcdval['GCDDate']) break #bb = iscnt bb += 1 #print("(" + str(n) + ") IssueID: " + str(issid) + " IssueNo: " + str(issnum) + " Date" + str(issdate)) #---END.NEW. # check if the issue already exists iss_exists = myDB.action( 'SELECT * from issues WHERE IssueID=?', [issid]).fetchone() # Only change the status & add DateAdded if the issue is already in the database if iss_exists is None: newValueDict['DateAdded'] = helpers.today() controlValueDict = {"IssueID": issid} newValueDict = { "ComicID": comicid, "ComicName": comic['ComicName'], "IssueName": issname, "Issue_Number": issnum, "IssueDate": issdate, "Int_IssueNumber": int_issnum } if mylar.AUTOWANT_ALL: newValueDict['Status'] = "Wanted" elif issdate > helpers.today() and mylar.AUTOWANT_UPCOMING: newValueDict['Status'] = "Wanted" else: newValueDict['Status'] = "Skipped" if iss_exists: #print ("Existing status : " + str(iss_exists['Status'])) newValueDict['Status'] = iss_exists['Status'] try: myDB.upsert("issues", newValueDict, controlValueDict) except sqlite3.InterfaceError, e: #raise sqlite3.InterfaceError(e) logger.error( "MAJOR error trying to get issue data, this is most likey a MULTI-VOLUME series and you need to use the custom_exceptions.csv file." ) myDB.action("DELETE FROM comics WHERE ComicID=?", [comicid]) return n += 1
def GCDimport(gcomicid, pullupd=None,imported=None,ogcname=None): # this is for importing via GCD only and not using CV. # used when volume spanning is discovered for a Comic (and can't be added using CV). # Issue Counts are wrong (and can't be added). # because Comicvine ComicID and GCD ComicID could be identical at some random point, let's distinguish. # CV = comicid, GCD = gcomicid :) (ie. CV=2740, GCD=G3719) gcdcomicid = gcomicid myDB = db.DBConnection() # We need the current minimal info in the database instantly # so we don't throw a 500 error when we redirect to the artistPage controlValueDict = {"ComicID": gcdcomicid} comic = myDB.action('SELECT ComicName, ComicYear, Total, ComicPublished, ComicImage, ComicLocation, ComicPublisher FROM comics WHERE ComicID=?', [gcomicid]).fetchone() ComicName = comic[0] ComicYear = comic[1] ComicIssues = comic[2] ComicPublished = comic[3] comlocation = comic[5] ComicPublisher = comic[6] #ComicImage = comic[4] #print ("Comic:" + str(ComicName)) newValueDict = {"Status": "Loading"} myDB.upsert("comics", newValueDict, controlValueDict) # we need to lookup the info for the requested ComicID in full now #comic = cv.getComic(comicid,'comic') if not comic: logger.warn("Error fetching comic. ID for : " + gcdcomicid) if dbcomic is None: newValueDict = {"ComicName": "Fetch failed, try refreshing. (%s)" % (gcdcomicid), "Status": "Active"} else: newValueDict = {"Status": "Active"} myDB.upsert("comics", newValueDict, controlValueDict) return #run the re-sortorder here in order to properly display the page if pullupd is None: helpers.ComicSort(comicorder=mylar.COMICSORT, imported=gcomicid) if ComicName.startswith('The '): sortname = ComicName[4:] else: sortname = ComicName logger.info(u"Now adding/updating: " + ComicName) #--Now that we know ComicName, let's try some scraping #--Start # gcd will return issue details (most importantly publishing date) comicid = gcomicid[1:] resultURL = "/series/" + str(comicid) + "/" gcdinfo=parseit.GCDdetails(comseries=None, resultURL=resultURL, vari_loop=0, ComicID=gcdcomicid, TotalIssues=ComicIssues, issvariation=None, resultPublished=None) if gcdinfo == "No Match": logger.warn("No matching result found for " + ComicName + " (" + ComicYear + ")" ) updater.no_searchresults(gcomicid) nomatch = "true" return nomatch logger.info(u"Sucessfully retrieved details for " + ComicName ) # print ("Series Published" + parseit.resultPublished) #--End ComicImage = gcdinfo['ComicImage'] #comic book location on machine # setup default location here if comlocation is None: # let's remove the non-standard characters here. u_comicnm = ComicName u_comicname = u_comicnm.encode('ascii', 'ignore').strip() if ':' in u_comicname or '/' in u_comicname or ',' in u_comicname: comicdir = u_comicname if ':' in comicdir: comicdir = comicdir.replace(':','') if '/' in comicdir: comicdir = comicdir.replace('/','-') if ',' in comicdir: comicdir = comicdir.replace(',','') else: comicdir = u_comicname series = comicdir publisher = ComicPublisher year = ComicYear #do work to generate folder path values = {'$Series': series, '$Publisher': publisher, '$Year': year, '$series': series.lower(), '$publisher': publisher.lower(), '$Volume': year } if mylar.FOLDER_FORMAT == '': comlocation = mylar.DESTINATION_DIR + "/" + comicdir + " (" + comic['ComicYear'] + ")" else: comlocation = mylar.DESTINATION_DIR + "/" + helpers.replace_all(mylar.FOLDER_FORMAT, values) #comlocation = mylar.DESTINATION_DIR + "/" + comicdir + " (" + ComicYear + ")" if mylar.DESTINATION_DIR == "": logger.error(u"There is no general directory specified - please specify in Config/Post-Processing.") return if mylar.REPLACE_SPACES: #mylar.REPLACE_CHAR ...determines what to replace spaces with underscore or dot comlocation = comlocation.replace(' ', mylar.REPLACE_CHAR) #if it doesn't exist - create it (otherwise will bugger up later on) if os.path.isdir(str(comlocation)): logger.info(u"Directory (" + str(comlocation) + ") already exists! Continuing...") else: #print ("Directory doesn't exist!") #try: # os.makedirs(str(comlocation)) # logger.info(u"Directory successfully created at: " + str(comlocation)) #except OSError: # logger.error(u"Could not create comicdir : " + str(comlocation)) filechecker.validateAndCreateDirectory(comlocation, True) comicIssues = gcdinfo['totalissues'] #let's download the image... if os.path.exists(mylar.CACHE_DIR):pass else: #let's make the dir. try: os.makedirs(str(mylar.CACHE_DIR)) logger.info(u"Cache Directory successfully created at: " + str(mylar.CACHE_DIR)) except OSError: logger.error(u"Could not create cache dir : " + str(mylar.CACHE_DIR)) coverfile = os.path.join(mylar.CACHE_DIR, str(gcomicid) + ".jpg") #try: urllib.urlretrieve(str(ComicImage), str(coverfile)) try: with open(str(coverfile)) as f: ComicImage = os.path.join('cache',str(gcomicid) + ".jpg") #this is for Firefox when outside the LAN...it works, but I don't know how to implement it #without breaking the normal flow for inside the LAN (above) #ComicImage = "http://" + str(mylar.HTTP_HOST) + ":" + str(mylar.HTTP_PORT) + "/cache/" + str(comi$ logger.info(u"Sucessfully retrieved cover for " + ComicName) #if the comic cover local is checked, save a cover.jpg to the series folder. if mylar.COMIC_COVER_LOCAL: comiclocal = os.path.join(str(comlocation) + "/cover.jpg") shutil.copy(ComicImage,comiclocal) except IOError as e: logger.error(u"Unable to save cover locally at this time.") #if comic['ComicVersion'].isdigit(): # comicVol = "v" + comic['ComicVersion'] #else: # comicVol = None controlValueDict = {"ComicID": gcomicid} newValueDict = {"ComicName": ComicName, "ComicSortName": sortname, "ComicYear": ComicYear, "Total": comicIssues, "ComicLocation": comlocation, #"ComicVersion": comicVol, "ComicImage": ComicImage, #"ComicPublisher": comic['ComicPublisher'], #"ComicPublished": comicPublished, "DateAdded": helpers.today(), "Status": "Loading"} myDB.upsert("comics", newValueDict, controlValueDict) #comicsort here... #run the re-sortorder here in order to properly display the page if pullupd is None: helpers.ComicSort(sequence='update') logger.info(u"Sucessfully retrieved issue details for " + ComicName ) n = 0 iscnt = int(comicIssues) issnum = [] issname = [] issdate = [] int_issnum = [] #let's start issue #'s at 0 -- thanks to DC for the new 52 reboot! :) latestiss = "0" latestdate = "0000-00-00" #print ("total issues:" + str(iscnt)) #---removed NEW code here--- logger.info(u"Now adding/updating issues for " + ComicName) bb = 0 while (bb <= iscnt): #---NEW.code try: gcdval = gcdinfo['gcdchoice'][bb] #print ("gcdval: " + str(gcdval)) except IndexError: #account for gcd variation here if gcdinfo['gcdvariation'] == 'gcd': #print ("gcd-variation accounted for.") issdate = '0000-00-00' int_issnum = int ( issis / 1000 ) break if 'nn' in str(gcdval['GCDIssue']): #no number detected - GN, TP or the like logger.warn(u"Non Series detected (Graphic Novel, etc) - cannot proceed at this time.") updater.no_searchresults(comicid) return elif '.' in str(gcdval['GCDIssue']): issst = str(gcdval['GCDIssue']).find('.') issb4dec = str(gcdval['GCDIssue'])[:issst] #if the length of decimal is only 1 digit, assume it's a tenth decis = str(gcdval['GCDIssue'])[issst+1:] if len(decis) == 1: decisval = int(decis) * 10 issaftdec = str(decisval) if len(decis) == 2: decisval = int(decis) issaftdec = str(decisval) if int(issaftdec) == 0: issaftdec = "00" gcd_issue = issb4dec + "." + issaftdec gcdis = (int(issb4dec) * 1000) + decisval else: gcdis = int(str(gcdval['GCDIssue'])) * 1000 gcd_issue = str(gcdval['GCDIssue']) #get the latest issue / date using the date. int_issnum = int( gcdis / 1000 ) issdate = str(gcdval['GCDDate']) issid = "G" + str(gcdval['IssueID']) if gcdval['GCDDate'] > latestdate: latestiss = str(gcd_issue) latestdate = str(gcdval['GCDDate']) #print("(" + str(bb) + ") IssueID: " + str(issid) + " IssueNo: " + str(gcd_issue) + " Date" + str(issdate) ) #---END.NEW. # check if the issue already exists iss_exists = myDB.action('SELECT * from issues WHERE IssueID=?', [issid]).fetchone() # Only change the status & add DateAdded if the issue is not already in the database if iss_exists is None: newValueDict['DateAdded'] = helpers.today() #adjust for inconsistencies in GCD date format - some dates have ? which borks up things. if "?" in str(issdate): issdate = "0000-00-00" controlValueDict = {"IssueID": issid} newValueDict = {"ComicID": gcomicid, "ComicName": ComicName, "Issue_Number": gcd_issue, "IssueDate": issdate, "Int_IssueNumber": int_issnum } #print ("issueid:" + str(controlValueDict)) #print ("values:" + str(newValueDict)) if mylar.AUTOWANT_ALL: newValueDict['Status'] = "Wanted" elif issdate > helpers.today() and mylar.AUTOWANT_UPCOMING: newValueDict['Status'] = "Wanted" else: newValueDict['Status'] = "Skipped" if iss_exists: #print ("Existing status : " + str(iss_exists['Status'])) newValueDict['Status'] = iss_exists['Status'] myDB.upsert("issues", newValueDict, controlValueDict) bb+=1 # logger.debug(u"Updating comic cache for " + ComicName) # cache.getThumb(ComicID=issue['issueid']) # logger.debug(u"Updating cache for: " + ComicName) # cache.getThumb(ComicIDcomicid) controlValueStat = {"ComicID": gcomicid} newValueStat = {"Status": "Active", "LatestIssue": latestiss, "LatestDate": latestdate, "LastUpdated": helpers.now() } myDB.upsert("comics", newValueStat, controlValueStat) if mylar.CVINFO: if not os.path.exists(comlocation + "/cvinfo"): with open(comlocation + "/cvinfo","w") as text_file: text_file.write("http://www.comicvine.com/volume/49-" + str(comicid)) logger.info(u"Updating complete for: " + ComicName) #move the files...if imported is not empty (meaning it's not from the mass importer.) if imported is None or imported == 'None': pass else: if mylar.IMP_MOVE: logger.info("Mass import - Move files") moveit.movefiles(gcomicid,comlocation,ogcname) else: logger.info("Mass import - Moving not Enabled. Setting Archived Status for import.") moveit.archivefiles(gcomicid,ogcname) #check for existing files... updater.forceRescan(gcomicid) if pullupd is None: # lets' check the pullist for anyting at this time as well since we're here. if mylar.AUTOWANT_UPCOMING and 'Present' in ComicPublished: logger.info(u"Checking this week's pullist for new issues of " + ComicName) updater.newpullcheck(comic['ComicName'], gcomicid) #here we grab issues that have been marked as wanted above... results = myDB.select("SELECT * FROM issues where ComicID=? AND Status='Wanted'", [gcomicid]) if results: logger.info(u"Attempting to grab wanted issues for : " + ComicName) for result in results: foundNZB = "none" if (mylar.NZBSU or mylar.DOGNZB or mylar.EXPERIMENTAL or mylar.NEWZNAB or mylar.NZBX) and (mylar.SAB_HOST): foundNZB = search.searchforissue(result['IssueID']) if foundNZB == "yes": updater.foundsearch(result['ComicID'], result['IssueID']) else: logger.info(u"No issues marked as wanted for " + ComicName) logger.info(u"Finished grabbing what I could.")
def Process(self): self._log("nzb name: " + str(self.nzb_name), logger.DEBUG) self._log("nzb folder: " + str(self.nzb_folder), logger.DEBUG) logger.fdebug("nzb name: " + str(self.nzb_name)) logger.fdebug("nzb folder: " + str(self.nzb_folder)) if mylar.USE_SABNZBD==0: logger.fdebug("Not using SABNzbd") else: # if the SAB Directory option is enabled, let's use that folder name and append the jobname. if mylar.SAB_DIRECTORY is not None and mylar.SAB_DIRECTORY is not 'None' and len(mylar.SAB_DIRECTORY) > 4: self.nzb_folder = os.path.join(mylar.SAB_DIRECTORY, self.nzb_name).encode(mylar.SYS_ENCODING) #lookup nzb_name in nzblog table to get issueid #query SAB to find out if Replace Spaces enabled / not as well as Replace Decimals #http://localhost:8080/sabnzbd/api?mode=set_config§ion=misc&keyword=dirscan_speed&value=5 querysab = str(mylar.SAB_HOST) + "/api?mode=get_config§ion=misc&output=xml&apikey=" + str(mylar.SAB_APIKEY) #logger.info("querysab_string:" + str(querysab)) file = urllib2.urlopen(querysab) data = file.read() file.close() dom = parseString(data) sabreps = dom.getElementsByTagName('replace_spaces')[0].firstChild.wholeText sabrepd = dom.getElementsByTagName('replace_dots')[0].firstChild.wholeText logger.fdebug("SAB Replace Spaces: " + str(sabreps)) logger.fdebug("SAB Replace Dots: " + str(sabrepd)) if mylar.USE_NZBGET==1: logger.fdebug("Using NZBGET") logger.fdebug("NZB name as passed from NZBGet: " + self.nzb_name) myDB = db.DBConnection() nzbname = self.nzb_name #remove extensions from nzb_name if they somehow got through (Experimental most likely) extensions = ('.cbr', '.cbz') if nzbname.lower().endswith(extensions): fd, ext = os.path.splitext(nzbname) self._log("Removed extension from nzb: " + ext, logger.DEBUG) nzbname = re.sub(str(ext), '', str(nzbname)) #replace spaces nzbname = re.sub(' ', '.', str(nzbname)) nzbname = re.sub('[\,\:\?]', '', str(nzbname)) nzbname = re.sub('[\&]', 'and', str(nzbname)) logger.fdebug("After conversions, nzbname is : " + str(nzbname)) # if mylar.USE_NZBGET==1: # nzbname=self.nzb_name self._log("nzbname: " + str(nzbname), logger.DEBUG) nzbiss = myDB.action("SELECT * from nzblog WHERE nzbname=?", [nzbname]).fetchone() if nzbiss is None: self._log("Failure - could not initially locate nzbfile in my database to rename.", logger.DEBUG) logger.fdebug("Failure - could not locate nzbfile initially.") # if failed on spaces, change it all to decimals and try again. nzbname = re.sub('_', '.', str(nzbname)) self._log("trying again with this nzbname: " + str(nzbname), logger.DEBUG) logger.fdebug("trying again with nzbname of : " + str(nzbname)) nzbiss = myDB.action("SELECT * from nzblog WHERE nzbname=?", [nzbname]).fetchone() if nzbiss is None: logger.error(u"Unable to locate downloaded file to rename. PostProcessing aborted.") return else: self._log("I corrected and found the nzb as : " + str(nzbname)) logger.fdebug("auto-corrected and found the nzb as : " + str(nzbname)) issueid = nzbiss['IssueID'] else: issueid = nzbiss['IssueID'] print "issueid:" + str(issueid) #use issueid to get publisher, series, year, issue number issuenzb = myDB.action("SELECT * from issues WHERE issueid=?", [issueid]).fetchone() if helpers.is_number(issueid): sandwich = int(issuenzb['IssueID']) else: #if it's non-numeric, it contains a 'G' at the beginning indicating it's a multi-volume #using GCD data. Set sandwich to 1 so it will bypass and continue post-processing. sandwich = 1 if issuenzb is None or sandwich >= 900000: # this has no issueID, therefore it's a one-off or a manual post-proc. # At this point, let's just drop it into the Comic Location folder and forget about it.. self._log("One-off mode enabled for Post-Processing. All I'm doing is moving the file untouched into the Grab-bag directory.", logger.DEBUG) logger.info("One-off mode enabled for Post-Processing. Will move into Grab-bag directory.") self._log("Grab-Bag Directory set to : " + mylar.GRABBAG_DIR, logger.DEBUG) for root, dirnames, filenames in os.walk(self.nzb_folder): for filename in filenames: if filename.lower().endswith(extensions): ofilename = filename path, ext = os.path.splitext(ofilename) if mylar.GRABBAG_DIR: grdst = mylar.GRABBAG_DIR else: grdst = mylar.DESTINATION_DIR grab_dst = os.path.join(grdst, ofilename) self._log("Destination Path : " + grab_dst, logger.DEBUG) grab_src = os.path.join(self.nzb_folder, ofilename) self._log("Source Path : " + grab_src, logger.DEBUG) logger.info("Moving " + str(ofilename) + " into grab-bag directory : " + str(grdst)) try: shutil.move(grab_src, grab_dst) except (OSError, IOError): self.log("Failed to move directory - check directories and manually re-run.", logger.DEBUG) logger.debug("Failed to move directory - check directories and manually re-run.") return self.log #tidyup old path try: shutil.rmtree(self.nzb_folder) except (OSError, IOError): self._log("Failed to remove temporary directory.", logger.DEBUG) logger.debug("Failed to remove temporary directory - check directory and manually re-run.") return self.log logger.debug("Removed temporary directory : " + str(self.nzb_folder)) self._log("Removed temporary directory : " + self.nzb_folder, logger.DEBUG) #delete entry from nzblog table myDB.action('DELETE from nzblog WHERE issueid=?', [issueid]) return self.log comicid = issuenzb['ComicID'] issuenumOG = issuenzb['Issue_Number'] #issueno = str(issuenum).split('.')[0] #new CV API - removed all decimals...here we go AGAIN! issuenum = issuenumOG issue_except = 'None' if 'au' in issuenum.lower(): issuenum = re.sub("[^0-9]", "", issuenum) issue_except = ' AU' if '.' in issuenum: iss_find = issuenum.find('.') iss_b4dec = issuenum[:iss_find] iss_decval = issuenum[iss_find+1:] if int(iss_decval) == 0: iss = iss_b4dec issdec = int(iss_decval) issueno = str(iss) self._log("Issue Number: " + str(issueno), logger.DEBUG) logger.fdebug("Issue Number: " + str(issueno)) else: if len(iss_decval) == 1: iss = iss_b4dec + "." + iss_decval issdec = int(iss_decval) * 10 else: iss = iss_b4dec + "." + iss_decval.rstrip('0') issdec = int(iss_decval.rstrip('0')) * 10 issueno = iss_b4dec self._log("Issue Number: " + str(iss), logger.DEBUG) logger.fdebug("Issue Number: " + str(iss)) else: iss = issuenum issueno = str(iss) # issue zero-suppression here if mylar.ZERO_LEVEL == "0": zeroadd = "" else: if mylar.ZERO_LEVEL_N == "none": zeroadd = "" elif mylar.ZERO_LEVEL_N == "0x": zeroadd = "0" elif mylar.ZERO_LEVEL_N == "00x": zeroadd = "00" logger.fdebug("Zero Suppression set to : " + str(mylar.ZERO_LEVEL_N)) if str(len(issueno)) > 1: if int(issueno) < 10: self._log("issue detected less than 10", logger.DEBUG) if '.' in iss: if int(iss_decval) > 0: issueno = str(iss) prettycomiss = str(zeroadd) + str(iss) else: prettycomiss = str(zeroadd) + str(int(issueno)) else: prettycomiss = str(zeroadd) + str(iss) if issue_except != 'None': prettycomiss = str(prettycomiss) + issue_except self._log("Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ". Issue will be set as : " + str(prettycomiss), logger.DEBUG) elif int(issueno) >= 10 and int(issueno) < 100: self._log("issue detected greater than 10, but less than 100", logger.DEBUG) if mylar.ZERO_LEVEL_N == "none": zeroadd = "" else: zeroadd = "0" if '.' in iss: if int(iss_decval) > 0: issueno = str(iss) prettycomiss = str(zeroadd) + str(iss) else: prettycomiss = str(zeroadd) + str(int(issueno)) else: prettycomiss = str(zeroadd) + str(iss) if issue_except != 'None': prettycomiss = str(prettycomiss) + issue_except self._log("Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ".Issue will be set as : " + str(prettycomiss), logger.DEBUG) else: self._log("issue detected greater than 100", logger.DEBUG) if '.' in iss: if int(iss_decval) > 0: issueno = str(iss) prettycomiss = str(issueno) if issue_except != 'None': prettycomiss = str(prettycomiss) + issue_except self._log("Zero level supplement set to " + str(mylar.ZERO_LEVEL_N) + ". Issue will be set as : " + str(prettycomiss), logger.DEBUG) else: prettycomiss = str(issueno) self._log("issue length error - cannot determine length. Defaulting to None: " + str(prettycomiss), logger.DEBUG) logger.fdebug("Pretty Comic Issue is : " + str(prettycomiss)) issueyear = issuenzb['IssueDate'][:4] self._log("Issue Year: " + str(issueyear), logger.DEBUG) logger.fdebug("Issue Year : " + str(issueyear)) comicnzb= myDB.action("SELECT * from comics WHERE comicid=?", [comicid]).fetchone() publisher = comicnzb['ComicPublisher'] self._log("Publisher: " + publisher, logger.DEBUG) logger.fdebug("Publisher: " + str(publisher)) #we need to un-unicode this to make sure we can write the filenames properly for spec.chars series = comicnzb['ComicName'].encode('ascii', 'ignore').strip() self._log("Series: " + series, logger.DEBUG) logger.fdebug("Series: " + str(series)) seriesyear = comicnzb['ComicYear'] self._log("Year: " + seriesyear, logger.DEBUG) logger.fdebug("Year: " + str(seriesyear)) comlocation = comicnzb['ComicLocation'] self._log("Comic Location: " + comlocation, logger.DEBUG) logger.fdebug("Comic Location: " + str(comlocation)) comversion = comicnzb['ComicVersion'] self._log("Comic Version: " + str(comversion), logger.DEBUG) logger.fdebug("Comic Version: " + str(comversion)) if comversion is None: comversion = 'None' #if comversion is None, remove it so it doesn't populate with 'None' if comversion == 'None': chunk_f_f = re.sub('\$VolumeN','',mylar.FILE_FORMAT) chunk_f = re.compile(r'\s+') chunk_file_format = chunk_f.sub(' ', chunk_f_f) self._log("No version # found for series - tag will not be available for renaming.", logger.DEBUG) logger.fdebug("No version # found for series, removing from filename") logger.fdebug("new format is now: " + str(chunk_file_format)) else: chunk_file_format = mylar.FILE_FORMAT #Run Pre-script if mylar.ENABLE_PRE_SCRIPTS: nzbn = self.nzb_name #original nzb name nzbf = self.nzb_folder #original nzb folder #name, comicyear, comicid , issueid, issueyear, issue, publisher #create the dic and send it. seriesmeta = [] seriesmetadata = {} seriesmeta.append({ 'name': series, 'comicyear': seriesyear, 'comicid': comicid, 'issueid': issueid, 'issueyear': issueyear, 'issue': issuenum, 'publisher': publisher }) seriesmetadata['seriesmeta'] = seriesmeta self._run_pre_scripts(nzbn, nzbf, seriesmetadata ) #rename file and move to new path #nfilename = series + " " + issueno + " (" + seriesyear + ")" file_values = {'$Series': series, '$Issue': prettycomiss, '$Year': issueyear, '$series': series.lower(), '$Publisher': publisher, '$publisher': publisher.lower(), '$VolumeY': 'V' + str(seriesyear), '$VolumeN': comversion } for root, dirnames, filenames in os.walk(self.nzb_folder): for filename in filenames: if filename.lower().endswith(extensions): ofilename = filename path, ext = os.path.splitext(ofilename) self._log("Original Filename: " + ofilename, logger.DEBUG) self._log("Original Extension: " + ext, logger.DEBUG) logger.fdebug("Original Filname: " + str(ofilename)) logger.fdebug("Original Extension: " + str(ext)) if mylar.FILE_FORMAT == '' or not mylar.RENAME_FILES: self._log("Rename Files isn't enabled...keeping original filename.", logger.DEBUG) logger.fdebug("Rename Files isn't enabled - keeping original filename.") #check if extension is in nzb_name - will screw up otherwise if ofilename.lower().endswith(extensions): nfilename = ofilename[:-4] else: nfilename = ofilename else: nfilename = helpers.replace_all(chunk_file_format, file_values) if mylar.REPLACE_SPACES: #mylar.REPLACE_CHAR ...determines what to replace spaces with underscore or dot nfilename = nfilename.replace(' ', mylar.REPLACE_CHAR) nfilename = re.sub('[\,\:\?]', '', nfilename) self._log("New Filename: " + nfilename, logger.DEBUG) logger.fdebug("New Filename: " + str(nfilename)) src = os.path.join(self.nzb_folder, ofilename) filechecker.validateAndCreateDirectory(comlocation, True) if mylar.LOWERCASE_FILENAMES: dst = (comlocation + "/" + nfilename + ext).lower() else: dst = comlocation + "/" + nfilename + ext.lower() self._log("Source:" + src, logger.DEBUG) self._log("Destination:" + dst, logger.DEBUG) logger.fdebug("Source: " + str(src)) logger.fdebug("Destination: " + str(dst)) os.rename(os.path.join(self.nzb_folder, str(ofilename)), os.path.join(self.nzb_folder,str(nfilename + ext))) src = os.path.join(self.nzb_folder, str(nfilename + ext)) try: shutil.move(src, dst) except (OSError, IOError): self._log("Failed to move directory - check directories and manually re-run.", logger.DEBUG) self._log("Post-Processing ABORTED.", logger.DEBUG) return #tidyup old path try: shutil.rmtree(self.nzb_folder) except (OSError, IOError): self._log("Failed to remove temporary directory - check directory and manually re-run.", logger.DEBUG) self._log("Post-Processing ABORTED.", logger.DEBUG) return self._log("Removed temporary directory : " + str(self.nzb_folder), logger.DEBUG) #delete entry from nzblog table myDB.action('DELETE from nzblog WHERE issueid=?', [issueid]) #force rescan of files updater.forceRescan(comicid) logger.info(u"Post-Processing completed for: " + series + " issue: " + str(issuenumOG) ) self._log(u"Post Processing SUCCESSFULL! ", logger.DEBUG) if mylar.PROWL_ENABLED: pushmessage = series + '(' + issueyear + ') - issue #' + issuenumOG logger.info(u"Prowl request") prowl = notifiers.PROWL() prowl.notify(pushmessage,"Download and Postprocessing completed") if mylar.NMA_ENABLED: nma = notifiers.NMA() nma.notify(series, str(issueyear), str(issuenumOG)) if mylar.PUSHOVER_ENABLED: pushmessage = series + ' (' + str(issueyear) + ') - issue #' + str(issuenumOG) logger.info(u"Pushover request") pushover = notifiers.PUSHOVER() pushover.notify(pushmessage, "Download and Post-Processing completed") # retrieve/create the corresponding comic objects if mylar.ENABLE_EXTRA_SCRIPTS: folderp = str(dst) #folder location after move/rename nzbn = self.nzb_name #original nzb name filen = str(nfilename + ext) #new filename #name, comicyear, comicid , issueid, issueyear, issue, publisher #create the dic and send it. seriesmeta = [] seriesmetadata = {} seriesmeta.append({ 'name': series, 'comicyear': seriesyear, 'comicid': comicid, 'issueid': issueid, 'issueyear': issueyear, 'issue': issuenum, 'publisher': publisher }) seriesmetadata['seriesmeta'] = seriesmeta self._run_extra_scripts(nzbname, self.nzb_folder, filen, folderp, seriesmetadata ) return self.log
def movefiles(comicid, comlocation, imported): #comlocation is destination #comicid is used for rename files_moved = [] try: imported = ast.literal_eval(imported) except ValueError: pass myDB = db.DBConnection() logger.fdebug('comlocation is : ' + comlocation) logger.fdebug('original comicname is : ' + imported['ComicName']) impres = imported['filelisting'] if impres is not None: if all([ mylar.CONFIG.CREATE_FOLDERS is False, not os.path.isdir(comlocation) ]): checkdirectory = filechecker.validateAndCreateDirectory( comlocation, True) if not checkdirectory: logger.warn( 'Error trying to validate/create directory. Aborting this process at this time.' ) return for impr in impres: srcimp = impr['comiclocation'] orig_filename = impr['comicfilename'] #before moving check to see if Rename to Mylar structure is enabled. if mylar.CONFIG.IMP_RENAME and mylar.CONFIG.FILE_FORMAT != '': logger.fdebug( "Renaming files according to configuration details : " + str(mylar.CONFIG.FILE_FORMAT)) renameit = helpers.rename_param(comicid, imported['ComicName'], impr['issuenumber'], orig_filename) nfilename = renameit['nfilename'] dstimp = os.path.join(comlocation, nfilename) else: logger.fdebug( "Renaming files not enabled, keeping original filename(s)") dstimp = os.path.join(comlocation, orig_filename) logger.info("moving " + srcimp + " ... to " + dstimp) try: shutil.move(srcimp, dstimp) files_moved.append({ 'srid': imported['srid'], 'filename': impr['comicfilename'] }) except (OSError, IOError): logger.error( "Failed to move files - check directories and manually re-run." ) logger.fdebug("all files moved.") #now that it's moved / renamed ... we remove it from importResults or mark as completed. if len(files_moved) > 0: logger.info('files_moved: ' + str(files_moved)) for result in files_moved: try: res = result['import_id'] except: #if it's an 'older' import that wasn't imported, just make it a basic match so things can move and update properly. controlValue = { "ComicFilename": result['filename'], "SRID": result['srid'] } newValue = {"Status": "Imported", "ComicID": comicid} else: controlValue = { "impID": result['import_id'], "ComicFilename": result['filename'] } newValue = { "Status": "Imported", "SRID": result['srid'], "ComicID": comicid } myDB.upsert("importresults", newValue, controlValue) return
def main(): # Fixed paths to mylar if hasattr(sys, 'frozen'): mylar.FULL_PATH = os.path.abspath(sys.executable) else: mylar.FULL_PATH = os.path.abspath(__file__) mylar.PROG_DIR = os.path.dirname(mylar.FULL_PATH) mylar.ARGS = sys.argv[1:] # From sickbeard mylar.SYS_ENCODING = None try: locale.setlocale(locale.LC_ALL, "") mylar.SYS_ENCODING = locale.getpreferredencoding() except (locale.Error, IOError): pass # for OSes that are poorly configured I'll just force UTF-8 if not mylar.SYS_ENCODING or mylar.SYS_ENCODING in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'): mylar.SYS_ENCODING = 'UTF-8' # Set up and gather command line arguments parser = argparse.ArgumentParser( description='Comic Book add-on for SABnzbd+') parser.add_argument('-v', '--verbose', action='store_true', help='Increase console logging verbosity') parser.add_argument('-q', '--quiet', action='store_true', help='Turn off console logging') parser.add_argument('-d', '--daemon', action='store_true', help='Run as a daemon') parser.add_argument('-p', '--port', type=int, help='Force mylar to run on a specified port') parser.add_argument( '--datadir', help='Specify a directory where to store your data files') parser.add_argument('--config', help='Specify a config file to use') parser.add_argument('--nolaunch', action='store_true', help='Prevent browser from launching on startup') parser.add_argument( '--pidfile', help='Create a pid file (only relevant when running as a daemon)') args = parser.parse_args() if args.verbose: mylar.VERBOSE = 2 elif args.quiet: mylar.VERBOSE = 0 if args.daemon: mylar.DAEMON = True mylar.VERBOSE = 0 if args.pidfile: mylar.PIDFILE = args.pidfile if args.datadir: mylar.DATA_DIR = args.datadir else: mylar.DATA_DIR = mylar.PROG_DIR if args.config: mylar.CONFIG_FILE = args.config else: mylar.CONFIG_FILE = os.path.join(mylar.DATA_DIR, 'config.ini') # Try to create the DATA_DIR if it doesn't exist #if not os.path.exists(mylar.DATA_DIR): # try: # os.makedirs(mylar.DATA_DIR) # except OSError: # raise SystemExit('Could not create data directory: ' + mylar.DATA_DIR + '. Exiting....') filechecker.validateAndCreateDirectory(mylar.DATA_DIR, True) # Make sure the DATA_DIR is writeable if not os.access(mylar.DATA_DIR, os.W_OK): raise SystemExit('Cannot write to the data directory: ' + mylar.DATA_DIR + '. Exiting...') # Put the database in the DATA_DIR mylar.DB_FILE = os.path.join(mylar.DATA_DIR, 'mylar.db') mylar.CFG = ConfigObj(mylar.CONFIG_FILE, encoding='utf-8') # Read config & start logging mylar.initialize() if mylar.DAEMON: mylar.daemonize() # Force the http port if neccessary if args.port: http_port = args.port logger.info('Starting Mylar on foced port: %i' % http_port) else: http_port = int(mylar.HTTP_PORT) # Try to start the server. webstart.initialize({ 'http_port': http_port, 'http_host': mylar.HTTP_HOST, 'http_root': mylar.HTTP_ROOT, 'http_username': mylar.HTTP_USERNAME, 'http_password': mylar.HTTP_PASSWORD, }) logger.info('Starting Mylar on port: %i' % http_port) if mylar.LAUNCH_BROWSER and not args.nolaunch: mylar.launch_browser(mylar.HTTP_HOST, http_port, mylar.HTTP_ROOT) # Start the background threads mylar.start() while True: if not mylar.SIGNAL: time.sleep(1) else: logger.info('Received signal: ' + mylar.SIGNAL) if mylar.SIGNAL == 'shutdown': mylar.shutdown() elif mylar.SIGNAL == 'restart': mylar.shutdown(restart=True) else: mylar.shutdown(restart=True, update=True) mylar.SIGNAL = None return
# Put the database in the DATA_DIR mylar.DB_FILE = os.path.join(mylar.DATA_DIR, 'mylar.db') # Read config and start logging if mylar.MAINTENANCE is False: print('Initializing startup sequence....') #try: mylar.initialize(mylar.CONFIG_FILE) #except Exception as e: # print e # raise SystemExit('FATAL ERROR') if mylar.MAINTENANCE is False: filechecker.validateAndCreateDirectory(mylar.DATA_DIR, True) # Make sure the DATA_DIR is writeable if not os.access(mylar.DATA_DIR, os.W_OK): raise SystemExit('Cannot write to the data directory: ' + mylar.DATA_DIR + '. Exiting...') # backup the db and configs before they load. if args.backup: print '[AUTO-BACKUP] Backing up .db and config.ini files for safety.' backupdir = os.path.join(mylar.DATA_DIR, 'backup') try: os.makedirs(backupdir) print '[AUTO-BACKUP] Directory does not exist for backup - creating : ' + backupdir except OSError as exception:
if filename.lower().endswith(extensions): ofilename = filename path, ext = os.path.splitext(ofilename) if 'S' in sandwich: if mylar.STORYARCDIR: grdst = storyarcd else: grdst = mylar.DESTINATION_DIR else: if mylar.GRABBAG_DIR: grdst = mylar.GRABBAG_DIR else: grdst = mylar.DESTINATION_DIR filechecker.validateAndCreateDirectory(grdst, True) if 'S' in sandwich: #if from a StoryArc, check to see if we're appending the ReadingOrder to the filename if mylar.READ2FILENAME: issuearcid = re.sub('S', '', issueid) logger.fdebug('issuearcid:' + str(issuearcid)) arcdata = myDB.action( "SELECT * FROM readinglist WHERE IssueArcID=?", [issuearcid]).fetchone() logger.fdebug('readingorder#: ' + str(arcdata['ReadingOrder'])) if int(arcdata['ReadingOrder']) < 10: readord = "00" + str(arcdata['ReadingOrder']) elif int(arcdata['ReadingOrder']) > 10 and int( arcdata['ReadingOrder']) < 99: