def run(xsea, tdex, config): """ TheTVDB.com (TVDB) Scraper """ xstitle = tdex[xsea]['title'] logthis("Retrieving series information from theTVDb for", suffix=xstitle, loglevel=LL.VERBOSE) tvdb_id = tdex[xsea].get('tvdb_id', tvdb_get_id(xstitle, config)) if tvdb_id: if tdex[xsea].has_key('tvdb_id'): logthis("tvdb_id set from override", loglevel=LL.VERBOSE) del (tdex[xsea]['tvdb_id']) logthis("Got theTVDb Series ID#", suffix=tvdb_id, loglevel=LL.VERBOSE) # Retrieve entry from TVDB tvdb_info = tvdb_get_info(tvdb_id, config) tdex[xsea].update(tvdb_process(tvdb_info, config, xsea)) logthis("theTVDb info:", suffix=tvdb_info, loglevel=LL.DEBUG2) return True else: logthis("No results in theTVDb found for series", suffix=xstitle, loglevel=LL.WARNING) return False
def dumpSub(vfile, trackid, outfile): """ Use mkvextract to dump subtitle track @trackid from @vfile to @outfile """ # run mkvextract to dump subtitle track try: subprocess.check_output( [bpath.mepath, 'tracks', vfile, "%d:%s" % (trackid, outfile)]) except subprocess.CalledProcessError as e: logexc(e, "mkvextract failed") failwith(ER.PROCFAIL, "Sub extraction failed. Unable to continue. Aborting") # check for output file if not os.path.exists(outfile): logthis("Expected output sub file, but not found:", suffix=outfile, loglevel=LL.ERROR) failwith(ER.PROCFAIL, "Sub extraction failed. Unable to continue. Aborting") logthis("Extracted subtitle track successfully:", suffix=outfile, loglevel=LL.VERBOSE)
def scanrunner(in_q, out_q): """ Process queue runner """ hproc = multiprocessing.current_process() setproctitle("xbake: scanrunner") while True: # pop next job off the queue; will block until a job is available thisjob = in_q.get() if thisjob.get('EOF') is not None: logthis("Got end-of-queue marker; terminating; pid =", suffix=hproc.pid, loglevel=LL.DEBUG) # we need to wait until the master process pulls our items from the queue while out_q.qsize() > 0: time.sleep(0.1) os._exit(0) scandata = scanfile(**thisjob) if isinstance(scandata, dict): # Single file, Single song out_q.put((thisjob['infile'], scandata)) elif isinstance(scandata, list): # Single file, Many subsongs (eg. cue sheet or FLAC w/ embedded cue) for subsong in scandata: ssid = "{}#!{}".format(thisjob['infile'], subsong['subsong']['index']) out_q.put((ssid, subsong))
def getgroup(xgid): """convert gid to group name""" try: return grp.getgrgid(xgid).gr_name except: logthis("GID lookup failed; gid =", suffix=xgid, loglevel=LL.WARNING) return None
def dumpFonts(vfile, moveto=None): """ Use ffmpeg -dump_attachment to dump font files for baking subs """ prelist = os.listdir(".") try: subprocess.check_output( [bpath.ffpath, '-y', '-dump_attachment:t', '', '-i', vfile], stderr=subprocess.STDOUT) except subprocess.CalledProcessError: logthis( "FFmpeg returned non-zero, but dump_attachment is buggy, so it's OK.", loglevel=LL.VERBOSE) # get fonts that were dumped postlist = os.listdir(".") fontlist = list(set(postlist).difference(prelist)) if not len(fontlist): logthis("Warning: No (new) fonts were dumped", loglevel=LL.WARNING) else: logthis("New fonts dumped:", suffix=len(fontlist), loglevel=LL.VERBOSE) logthis("Fonts:", suffix=fontlist, loglevel=LL.DEBUG) # move fonts to another directory, if enabled if moveto: moveto = os.path.expanduser(moveto).rstrip('/') for i in fontlist: shutil.move(os.path.realpath(i), os.path.realpath(moveto + '/' + i)) logthis("Moved fonts to new location:", suffix=os.path.realpath(moveto), loglevel=LL.VERBOSE) return fontlist
def getuser(xuid): """convert uid to username""" try: return pwd.getpwuid(xuid).pw_name except: logthis("UID lookup failed; uid =", suffix=xuid, loglevel=LL.WARNING) return None
def get_all_tags_once(tagdata): """return a dict of all tags containing their first value""" tout = {} for ttag, tval in dict(tagdata).items(): try: dtag = ttag.decode('utf8') except Exception as e: logthis("Encountered bad tag; ignoring:", suffix=str(e), loglevel=LL.WARNING) continue try: if hasattr(tval, '__iter__'): if len(list(tval)) > 0: tout[dtag] = get_true_value(tval[0]) else: logthis("Encountered null tag data for", suffix=dtag, loglevel=LL.VERBOSE) tout = None else: tout[dtag] = get_true_value(tval) except Exception as e: logexc( e, "Failed to convert %s tag for serialization [type=%s]" % (dtag, type(tval))) return tout
def getxml(uribase, qget=None, qauth=None): """ Make HTTP request and decode XML response """ useragent = "Mozilla/5.0 (compatible; XBake/" + __version__ + " +https://ycnrg.org/); " + os.uname( )[0] + " " + os.uname()[4] rstat = {'status': None, 'ok': False, 'answer': None} # Set headers rqheaders = {'User-Agent': useragent} # Perform request logthis("Performing HTTP request to:", suffix=uribase, loglevel=LL.DEBUG) r = requests.get(uribase, params=qget, auth=qauth, headers=rqheaders) # Process response logthis("Got response status:", suffix=str(r.status_code) + ' ' + r.reason, loglevel=LL.DEBUG) rstat['status'] = r.status_code # If all went well, decode the XML response if r.status_code == 200: rstat['answer'] = xmltodict.parse(r.text) rstat['ok'] = True return rstat
def date2time(dstr, fstr="%Y-%m-%d"): """ Convert date string to integer UNIX epoch time """ try: return long(time.mktime(time.strptime(dstr, fstr))) except Exception as e: logthis("strptime() conversion failed:", suffix=e, loglevel=LL.VERBOSE) return None
def dfork(): """Fork into the background""" logthis("Forking...", loglevel=LL.DEBUG) try: # first fork pid = os.fork() except OSError, e: logthis("os.fork() failed:", suffix=e, loglevel=LL.ERROR) failwith(ER.PROCFAIL, "Failed to fork into background. Aborting.")
def tvdb_process(indata, config, tdex_id): """ TVDB: Process data and enumerate artwork assets """ txc = { 'tv': {}, 'xrefs': {}, 'synopsis': {}, 'xref': {}, 'artwork': {}, 'episodes': [] } iser = indata.get('Series', {}) logthis("Processing info from theTVDb; enumerating artwork assets", loglevel=LL.VERBOSE) # Get list of genres; filter out any empty entries txc['genre'] = safe_split(iser.get('Genre', '')) # Get attributes txc['ctitle'] = iser.get('SeriesName', None) txc['xrefs']['tvdb'] = iser.get('id', None) txc['xrefs']['imdb'] = iser.get('IMDB_ID', None) txc['lastupdated'] = long(iser.get('lastupdated', time.time())) txc['tv']['network'] = iser.get('Network', None) txc['tv']['dayslot'] = iser.get('Airs_DayOfWeek', None) txc['tv']['timeslot'] = iser.get('Airs_Time', None) txc['tv']['debut'] = date2time(iser.get('FirstAired', None)) txc['synopsis']['tvdb'] = iser.get('Overview', None) txc['default_synopsis'] = 'tvdb' txc['status'] = iser.get('Status', 'unknown').lower() txc['fetched'] = long(time.time()) # Generate series ID txc['_id'] = mkid_series(tdex_id, txc) # Get artwork defaults bandefs = {} bandefs['banners'] = iser.get('banner', None) bandefs['fanart'] = iser.get('fanart', None) bandefs['poster'] = iser.get('poster', None) # Get Artwork txc['artwork'] = tvdb_get_artwork(txc['xrefs']['tvdb'], config, bandefs) # Add Episode information txc['episodes'] = tvdb_process_episodes(indata.get('Episode', []), txc['_id']) logthis("Series metadata set.", loglevel=LL.VERBOSE) return txc
def scp(src, dest): """ executes scp to perform a transfer @src and @dest should contain host:path """ try: subprocess.check_output(['/usr/bin/scp', '-B', '-r', src, dest]) return True except subprocess.CalledProcessError as e: logthis("Error: scp returned non-zero.", suffix=e, loglevel=LL.ERROR) return False
def get_info_safe(obj, attr, default=None): """safely retrieve @attr from @obj""" try: oval = obj.__getattribute__(attr) except: logthis("Attribute does not exist, using default", prefix=attr, suffix=default, loglevel=LL.WARNING) oval = default return oval
def save_checksums(fname, chksums): """ Save checksums to xattribs """ ox = { 'checksum.md5': chksums.get('md5', ''), 'checksum.ed2k': chksums.get('ed2k', ''), 'checksum.crc32': chksums.get('crc32', '') } logthis("Setting checksum xattribs:", suffix=ox, loglevel=LL.DEBUG) fsutil.xattr_set(fname, ox)
def unsetter(xconfig): """ Remove overrides for a file or directory """ infile = xconfig.run['infile'] dlist = list(fsutil.xattr_get(infile)) logthis("Removing overrides:\n", suffix=print_r(dlist), loglevel=LL.VERBOSE) fsutil.xattr_del(infile, dlist) logthis("Overrides cleared.", ccode=C.GRN, loglevel=LL.INFO) return 0
def optexpand(iop): """ expand CLI options like "xcode.scale" from 1D to 2D array/dict (like [xcode][scale]) """ outrc = {} for i in iop: dsec, dkey = i.split(".") if not outrc.has_key(dsec): outrc[dsec] = {} outrc[dsec][dkey] = iop[i] logthis("Expanded cli optdex:", suffix=outrc, loglevel=LL.DEBUG2) return outrc
def start(xconfig, qname="xcode"): """ fork queue runner for queue @qname """ global rdx, mdx, dadpid, handlers, hmetrics, xprofiles, config # Fork into its own process logthis("Forking...", loglevel=LL.DEBUG) dadpid = os.getpid() try: pid = os.fork() except OSError, e: logthis("os.fork() failed:", suffix=e, loglevel=LL.ERROR) failwith(ER.PROCFAIL, "Failed to fork worker. Aborting.")
def rcList(xtraConf=None): """ build list of config files to parse """ global rcfiles rcc = [] if xtraConf: xcf = os.path.expanduser(xtraConf) if os.path.exists(xcf): rcc.append(xcf) logthis("Added rcfile candidate (from command line):", suffix=xcf, loglevel=LL.DEBUG) else: logthis("Specified rcfile does not exist:", suffix=xcf, loglevel=LL.ERROR) for tf in rcfiles: ttf = os.path.expanduser(tf) logthis("Checking for rcfile candidate", suffix=ttf, loglevel=LL.DEBUG2) if os.path.exists(ttf): rcc.append(ttf) logthis("Got rcfile candidate", suffix=ttf, loglevel=LL.DEBUG2) return rcc
def xattr_del(xfile, xsetter): """ Remove extended file attributes Accepts a list/array with attrib names that are not prefixed with the 'user.' namespace """ for k in xsetter: try: xattr.removexattr(xfile, 'user.'+str(k)) except Exception as e: logthis("Failed to remove extended file attributes for", suffix=xfile, loglevel=LL.WARNING) logthis("xattr:", suffix=e, loglevel=LL.WARNING) return False return True
def xattr_set(xfile, xsetter): """ Set extended file attributes Accepts a dict with attrib names that are not prefixed with the 'user.' namespace """ for k, v in xsetter.iteritems(): try: xattr.setxattr(xfile, 'user.'+str(k), str(v)) except Exception as e: logthis("Failed to set extended file attributes for", suffix=xfile, loglevel=LL.WARNING) logthis("xattr:", suffix=e, loglevel=LL.WARNING) return False return True
def vdataInsert(xvid): """ Insert data into MongoDB collection """ global monjer logthis("Inserting data into Mongo...", loglevel=LL.INFO) if vinfo.mxmode == MXM.INSERT: monjer.insert('videos', xvid) elif vinfo.mxmode == MXM.UPDATE: vsetter = { 'versions.' + vinfo.vername: xvid['versions'][vinfo.vername] } monjer.update_set('videos', vinfo.id, vsetter)
def parse_album_date(tagdata): """parse various datetime formats and return an Arrow date object""" ad = None for dt in ('TDRC', 'TDAT', 'TYER', 'TRDA', 'DATE', 'YEAR', 'WM/Year'): if dt in tagdata: try: tstr = get_true_value(tagdata[dt][0]) ad = arrow.get(tstr, ["YYYY-MM-DD", "YYYY-MM", "YYYY"]) break except Exception as e: logthis("Failed to parse tag:", prefix=dt, suffix=str(e), loglevel=LL.WARNING) return ad
def xattr_get(xfile): """ Get extended file attributes Returns a dict with the 'user.' portion stripped from keys """ xout = {} try: for k, v in xattr.xattr(xfile).iteritems(): xout[k.replace('user.', '')] = v except Exception as e: logthis("Failed to get extended file attributes for", suffix=xfile, loglevel=LL.WARNING) logthis("xattr:", suffix=e, loglevel=LL.WARNING) return None return xout
def enqueue(qname, jid, fid, opts, silent=False): """ create a new job in queue (@qname), with job ID (@jid), file ID (@fid), and options (@opts) """ global rdx rdx.lpush("queue_" + qname, json.dumps({ 'id': jid, 'fid': fid, 'opts': opts })) if not silent: logthis("Enqueued job# %s in queue:" % (jid), suffix=qname, loglevel=LL.VERBOSE)
def precheck(rheaders=False, require_ctype=True): """ Perform authorization and content-type checks before passing along a request to the destination route """ # Check for proper Content-Type if require_ctype: try: ctype = request.headers['Content-Type'] except KeyError: ctype = None if not re.match(r'^(application\/json|text\/x-json)', ctype, re.I): logthis("Content-Type mismatch. Not acceptable:", suffix=ctype, loglevel=LL.WARNING) if rheaders: return ({ 'status': "error", 'error': "json_required", 'message': "Content-Type must be application/json" }, "417 Content Mismatch") else: return False # Check authentication try: wauth = request.headers['WWW-Authenticate'] except KeyError: wauth = None skey = config.srv['shared_key'] if wauth: if wauth == skey: logthis("Authentication passed", loglevel=LL.VERBOSE) if rheaders: return ({'status': "ok"}, "212 Login Validated") else: return True else: logthis("Authentication failed; invalid credentials", loglevel=LL.WARNING) if rheaders: return ({ 'status': "error", 'error': "auth_fail", 'message': "Authentication failed" }, "401 Unauthorized") else: return False else: logthis( "Authentication failed; WWW-Authenticate header missing from request", loglevel=LL.WARNING) if rheaders: return ({ 'status': "error", 'error': "www_authenticate_header_missing", 'message': "Must include WWW-Authenticate header" }, "400 Bad Request") else: return False
def parse_xattr_overrides(xpath): """ Parse overrides from extended file attributes """ xrides = {} xatr = fsutil.xattr_get(xpath) for xk, xv in xatr.iteritems(): if xaov_map.has_key(xk): logthis("Got override from xattrib [user.%s]: %s ->" % (xk, xaov_map[xk]), suffix=xv, loglevel=LL.VERBOSE) xrides[xaov_map[xk]] = xv return xrides
def series_add(sname, ovrx=None): """ Add series to tdex """ snamex = normalize(sname) if tdex.has_key(snamex): logthis("inc count for series:", suffix=snamex, loglevel=LL.DEBUG) tdex[snamex]['count'] += 1 else: logthis("New series found:", suffix=snamex, loglevel=LL.DEBUG) tdex[snamex] = {'title': sname, 'count': 1} if ovrx is not None: if 'tvdb_id' in ovrx: tdex[snamex]['tvdb_id'] = ovrx['tvdb_id'] if 'mal_id' in ovrx: tdex[snamex]['mal_id'] = ovrx['mal_id'] return snamex
def check_file_xfer(freal, fsize): """ determine if a file has been transferred returns True if file exists and is correct size, False otherwise """ if os.path.exists(freal): truesize = dstat(freal)['size'] if truesize == fsize: return True else: logthis("%s - Size mismatch: Expected %d bytes, got" % (freal, fsize), suffix=truesize, loglevel=LL.ERROR) return False else: return False
def get_single_tag(tagdata, tagname): """return first tag in tagdata matching name; else, return None""" tg = tagdata.get(tagname) if tg is None: return None else: try: tout = get_true_value(tg[0]) if isinstance(tout, unicode) or isinstance(tout, str): return tout.strip() else: return tout except: logthis("Encountered null tag data for", suffix=tagname, loglevel=LL.WARNING) return None
def load_metrics(): """ Load metrics from [hosts] section of RC file """ global config if 'hosts' in config: mets = config.hosts else: mets = {} mets2 = {} for tm in mets: try: mets2[tm.replace('.', '_')] = int(mets[tm]) except: logthis("Bad hostline for", suffix=tm, loglevel=LL.ERROR) logthis("Host metric list:\n", suffix=print_r(mets2), loglevel=LL.DEBUG) return mets2