Exemplo n.º 1
0
def get_nzoid(inputName):
    nzoid = None
    logger.debug("Searching for nzoid from SAbnzbd ...")
    if "http" in core.SABNZBDHOST:
        baseURL = "%s:%s/api" % (core.SABNZBDHOST, core.SABNZBDPORT)
    else:
        baseURL = "http://%s:%s/api" % (core.SABNZBDHOST, core.SABNZBDPORT)
    url = baseURL
    params = {}
    params['apikey'] = core.SABNZBDAPIKEY
    params['mode'] = "queue"
    params['output'] = 'json'
    try:
        r = requests.get(url, params=params, verify=False, timeout=(30, 120))
    except requests.ConnectionError:
        logger.error("Unable to open URL")
        return nzoid  # failure
    try:
        result = r.json()
        cleanName = os.path.splitext(os.path.split(inputName)[1])[0]
        for slot in result['queue']['slots']:
            if slot['filename'] in [inputName, cleanName]:
                nzoid = slot['nzo_id']
                logger.debug("Found nzoid: %s" % nzoid)
                break
    except:
        logger.warning("Data from SABnzbd could not be parsed")
    return nzoid
Exemplo n.º 2
0
def check_python():
    """Check End-of-Life status for Python version."""
    # Raise if end of life
    eol.check()

    # Warn if within grace period
    grace_period = 365  # days
    eol.warn_for_status(grace_period=-grace_period)

    # Log warning if within grace period
    days_left = eol.lifetime()
    if days_left > 0:
        logger.info(
            'Python v{major}.{minor} will reach end of life in {x} days.'.
            format(
                major=sys.version_info[0],
                minor=sys.version_info[1],
                x=days_left,
            ), )
    else:
        logger.info(
            'Python v{major}.{minor} reached end of life {x} days ago.'.format(
                major=sys.version_info[0],
                minor=sys.version_info[1],
                x=-days_left,
            ), )
    if days_left <= grace_period:
        logger.warning('Please upgrade to a more recent Python version.')
Exemplo n.º 3
0
    def loadTrainedChars(self):
        options = self.options
        chars = {}

        chars_dir = os.path.join(options.mod_dir, "char")

        if not os.path.exists(chars_dir):
            Logger.error("It seems that you did not train this module", True)
            return None
        else:
            Logger.info("\nLoading trained characters")

        # Create numbers and letters dir
        char_folders = range(10) + list(string.lowercase[:26])
        for i in char_folders:
            char_dir = os.path.join(chars_dir, str(i))
            if not os.path.exists(char_dir):
                Logger.warning("It seems that there is no folder for char {}".format(i))
            else:
                chars[i] = []

                # Read and load letters
                for img in os.listdir(char_dir):
                    (root, ext) = os.path.splitext(img)

                    # Work only with gifs
                    if ext == ".gif":
                        chars[i].append(self.imageTo2DBinaryList(Image.open(os.path.join(char_dir, img))))

        return chars
Exemplo n.º 4
0
 def show_batch_results(self, ideal_result, real_result):
   if ideal_result == real_result:
     Logger.success("{}/{} - 100%".format(ideal_result, real_result), True)
   elif ideal_result == 0:
     Logger.error("0/{} - 0%".format(real_result), True)
   else:
     Logger.warning("{}/{} - {}%".format(ideal_result, real_result, round(100.0 * ideal_result / real_result, 2)), True)
Exemplo n.º 5
0
def on_msg(msg):
    msg_type = msg.type
    from_user = msg.member.name
    now = msg.create_time
    # 处理撤回的消息
    if msg_type == wxpy.NOTE:
        revoked = ETree.fromstring(msg.raw['Content'].replace(
            '&lt;', '<').replace('&gt;', '>')).find('revokemsg')
        if revoked:
            # 根据找到的撤回消息 id 找到 bot.messages 中的原消息
            revoked_msg = bot.messages.search(
                id=int(revoked.find('msgid').text))[0]
            if not revoked_msg:
                return
            with locks[from_user]:
                session = Session()
                title_or_thinking = get_column(revoked_msg.type)
                sharing = session.query(Sharing).filter(
                    Sharing.name == from_user).filter(
                        getattr(Sharing, title_or_thinking) ==
                        revoked_msg.text).first()
                setattr(sharing, title_or_thinking, None)
    else:
        with locks[from_user]:
            c = get_column(msg_type)
            min_length = getattr(settings, 'min_thinking_len', 1)
            if c == 'title' or len(msg.text) >= min_length:
                upsert(from_user, c, msg.text, now)
            else:
                logger.warning(msg.text + ' of length %d less than %d' %
                               (len(msg.text), min_length))
Exemplo n.º 6
0
def get_nzoid(inputName):
    nzoid = None
    logger.debug("Searching for nzoid from SAbnzbd ...")
    if "http" in core.SABNZBDHOST:
        baseURL = "%s:%s/api" % (core.SABNZBDHOST, core.SABNZBDPORT)
    else:
        baseURL = "http://%s:%s/api" % (core.SABNZBDHOST, core.SABNZBDPORT)
    url = baseURL
    params = {}
    params['apikey'] = core.SABNZBDAPIKEY
    params['mode'] = "queue"
    params['output'] = 'json'
    try:
        r = requests.get(url, params=params, verify=False, timeout=(30, 120))
    except requests.ConnectionError:
        logger.error("Unable to open URL")
        return nzoid  # failure
    try:
        result = r.json()
        cleanName = os.path.splitext(os.path.split(inputName)[1])[0]
        for slot in result['queue']['slots']:
            if slot['filename'] in [inputName, cleanName]:
                nzoid = slot['nzo_id']
                logger.debug("Found nzoid: %s" % nzoid)
                break
    except:
        logger.warning("Data from SABnzbd could not be parsed")
    return nzoid
Exemplo n.º 7
0
def monitor():
    """
    Sniffs/monitors given capturing interface
    """

    logger.info("running...")

    print_status()
    
    try:
        for _cap in _caps:
            reader_and_decoder_thread = ReaderAndDecoderThread(_cap)
            reader_and_decoder_thread.daemon = True
            reader_and_decoder_thread.start()

        while _caps and not reader_end_of_file.is_set():
            time.sleep(1)

        logger.info("all capturing interfaces closed")
        
    except SystemError, ex:
        if "error return without" in str(ex):
            logger.warning("stopping (Ctrl-C pressed)")
        else:
            raise
Exemplo n.º 8
0
    def processEpisode(self, section, dirName, inputName=None, status=0, clientAgent="manual", inputCategory=None):
        if int(status) != 0:
            logger.warning("FAILED DOWNLOAD DETECTED, nothing to process.", section)
            return [1, "{0}: Failed to post-process. {1} does not support failed downloads".format(section, section)]

        cfg = dict(core.CFG[section][inputCategory])

        host = cfg["host"]
        port = cfg["port"]
        username = cfg["username"]
        password = cfg["password"]
        ssl = int(cfg.get("ssl", 0))
        web_root = cfg.get("web_root", "")
        remote_path = int(cfg.get("remote_path"), 0)
        protocol = "https://" if ssl else "http://"

        url = "{0}{1}:{2}{3}/post_process".format(protocol, host, port, web_root)
        if not server_responding(url):
            logger.error("Server did not respond. Exiting", section)
            return [1, "{0}: Failed to post-process - {1} did not respond.".format(section, section)]

        inputName, dirName = convert_to_ascii(inputName, dirName)
        clean_name, ext = os.path.splitext(inputName)
        if len(ext) == 4:  # we assume this was a standard extension.
            inputName = clean_name

        params = {"nzb_folder": remoteDir(dirName) if remote_path else dirName}

        if inputName is not None:
            params["nzb_name"] = inputName

        success = False

        logger.debug("Opening URL: {0}".format(url), section)
        try:
            r = requests.get(
                url, auth=(username, password), params=params, stream=True, verify=False, timeout=(30, 300)
            )
        except requests.ConnectionError:
            logger.error("Unable to open URL", section)
            return [1, "{0}: Failed to post-process - Unable to connect to {1}".format(section, section)]
        for line in r.iter_lines():
            if line:
                logger.postprocess("{0}".format(line), section)
            if "Post Processing SUCCESSFUL" in line:
                success = True

        if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
            logger.error("Server returned status {0}".format(r.status_code), section)
            return [1, "{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code)]

        if success:
            logger.postprocess("SUCCESS: This issue has been processed successfully", section)
            return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
        else:
            logger.warning("The issue does not appear to have successfully processed. Please check your Logs", section)
            return [
                1,
                "{0}: Failed to post-process - Returned log from {1} was not as expected.".format(section, section),
            ]
Exemplo n.º 9
0
def upsert(user, title_or_thinking, text, time):
    # ensure unique
    today = time.date()
    tomorrow = today + timedelta(days=1)
    session = Session()
    count = session.query(func.count(Sharing.id))\
        .filter(
            (Sharing.time >= today) &
            (Sharing.time < tomorrow) &
            (Sharing.name == user) &
            (Sharing.title.isnot(None)) &
            (Sharing.thinking.isnot(None))
        ).scalar()
    if count >= settings.max_daka_per_day:
        return

    sharing = session.query(Sharing).filter(
        (Sharing.time >= today) & (Sharing.time < tomorrow)
        & (Sharing.name == user)
        & (getattr(Sharing, title_or_thinking) == None)).first()
    if sharing is None:
        sharing = Sharing(**{'name': user})
        session.add(sharing)
    sharing.time = time
    setattr(sharing, title_or_thinking, text)
    try:
        session.commit()
    except IntegrityError as e:
        logger.error(str(e))
        logger.warning('conflict %s from %s: %s' %
                       (title_or_thinking, user, text))
    except StaleDataError as e:
        logger.error(str(e))
        logger.warning('conflict %s from %s: %s' %
                       (title_or_thinking, user, text))
Exemplo n.º 10
0
def launcher(process, name):
    try:
        mod = import_module(process)
        setproctitle("vision:" + name)
        mod.main()
    except Exception as e:
        logger.warning("process %s[%s] failed to launch %s" %
                       (name, process, e))
        pass
Exemplo n.º 11
0
def copy_link(src, targetLink, useLink):
    logger.info("MEDIAFILE: [%s]" % (os.path.basename(targetLink)), 'COPYLINK')
    logger.info("SOURCE FOLDER: [%s]" % (os.path.dirname(src)), 'COPYLINK')
    logger.info("TARGET FOLDER: [%s]" % (os.path.dirname(targetLink)),
                'COPYLINK')

    if src != targetLink and os.path.exists(targetLink):
        logger.info(
            "MEDIAFILE already exists in the TARGET folder, skipping ...",
            'COPYLINK')
        return True
    elif src == targetLink and os.path.isfile(targetLink) and os.path.isfile(
            src):
        logger.info("SOURCE AND TARGET files are the same, skipping ...",
                    'COPYLINK')
        return True
    elif src == os.path.dirname(targetLink):
        logger.info("SOURCE AND TARGET folders are the same, skipping ...",
                    'COPYLINK')
        return True

    makeDir(os.path.dirname(targetLink))
    try:
        if useLink == 'dir':
            logger.info("Directory linking SOURCE FOLDER -> TARGET FOLDER",
                        'COPYLINK')
            linktastic.dirlink(src, targetLink)
            return True
        if useLink == 'junction':
            logger.info(
                "Directory junction linking SOURCE FOLDER -> TARGET FOLDER",
                'COPYLINK')
            linktastic.dirlink(src, targetLink)
            return True
        elif useLink == "hard":
            logger.info("Hard linking SOURCE MEDIAFILE -> TARGET FOLDER",
                        'COPYLINK')
            linktastic.link(src, targetLink)
            return True
        elif useLink == "sym":
            logger.info("Sym linking SOURCE MEDIAFILE -> TARGET FOLDER",
                        'COPYLINK')
            linktastic.symlink(src, targetLink)
            return True
        elif useLink == "move-sym":
            logger.info("Sym linking SOURCE MEDIAFILE -> TARGET FOLDER",
                        'COPYLINK')
            shutil.move(src, targetLink)
            linktastic.symlink(targetLink, src)
            return True
        elif useLink == "move":
            logger.info("Moving SOURCE MEDIAFILE -> TARGET FOLDER", 'COPYLINK')
            shutil.move(src, targetLink)
            return True
    except Exception, e:
        logger.warning("Error: %s, copying instead ... " % (e), 'COPYLINK')
Exemplo n.º 12
0
def dl_inst(i, prov_name, recent):
    dp = get_provider(prov_name)
    try:
        if prov_name in i.contract_data:
            dp.download_instrument(i, recent=recent)
    except ConnectionException as e:
        raise e
    except Exception as e:
        logger.warning(e)
        logger.warning('Contract download error, ignoring')
Exemplo n.º 13
0
def dl_spot(s, prov_name):
    dp = get_provider(prov_name)
    try:
        if prov_name in s.price_data:
            dp.download_spot(s)
    except ConnectionException as e:
        raise (e)
    except Exception as e:
        logger.warning(e)
        logger.warning('Spot price download error, ignoring')
Exemplo n.º 14
0
def dl_cur(c, prov_name):
    dp = get_provider(prov_name)
    try:
        if prov_name in c.currency_data:
            dp.download_currency(c)
    except ConnectionException as e:
        raise e
    except Exception as e:
        logger.warning(e)
        logger.warning('Currency download error, ignoring')
Exemplo n.º 15
0
def rmReadOnly(filename):
    if os.path.isfile(filename):
        #check first the read-only attribute
        file_attribute = os.stat(filename)[0]
        if (not file_attribute & stat.S_IWRITE):
            # File is read-only, so make it writeable
            logger.debug('Read only mode on file ' + filename + ' Will try to make it writeable')
            try:
                os.chmod(filename, stat.S_IWRITE)
            except:
                logger.warning('Cannot change permissions of ' + filename, logger.WARNING)
Exemplo n.º 16
0
def rmReadOnly(filename):
    if os.path.isfile(filename):
        #check first the read-only attribute
        file_attribute = os.stat(filename)[0]
        if (not file_attribute & stat.S_IWRITE):
            # File is read-only, so make it writeable
            logger.debug('Read only mode on file ' + filename + ' Will try to make it writeable')
            try:
                os.chmod(filename, stat.S_IWRITE)
            except:
                logger.warning('Cannot change permissions of ' + filename, logger.WARNING)
Exemplo n.º 17
0
def rmReadOnly(filename):
    if os.path.isfile(filename):
        # check first the read-only attribute
        file_attribute = os.stat(filename)[0]
        if not file_attribute & stat.S_IWRITE:
            # File is read-only, so make it writeable
            logger.debug('Read only mode on file {name}. Attempting to make it writeable'.format
                         (name=filename))
            try:
                os.chmod(filename, stat.S_IWRITE)
            except:
                logger.warning('Cannot change permissions of {file}'.format(file=filename), logger.WARNING)
Exemplo n.º 18
0
    async def on_connect(self, ws):
        log.info('Successfully connected to id %s WebSocket' % self.vk_id)
        self.task = asyncio.ensure_future(self.task_listener(ws))

        self.alive = True
        async for msg in ws:
            if msg.type == aiohttp.WSMsgType.TEXT:
                await self.process_message(ws, msg.data)
            elif msg.type == aiohttp.WSMsgType.ERROR:
                log.warning('Got ws error: %s' % msg.data)
                self.alive = False
                return
Exemplo n.º 19
0
def resume_torrent(clientAgent, inputHash, inputID, inputName):
    logger.debug("Starting torrent %s in %s" % (inputName, clientAgent))
    try:
        if clientAgent == 'utorrent' and core.TORRENT_CLASS != "":
            core.TORRENT_CLASS.start(inputHash)
        if clientAgent == 'transmission' and core.TORRENT_CLASS != "":
            core.TORRENT_CLASS.start_torrent(inputID)
        if clientAgent == 'deluge' and core.TORRENT_CLASS != "":
            core.TORRENT_CLASS.core.resume_torrent([inputID])
        time.sleep(5)
    except:
        logger.warning("Failed to start torrent %s in %s" % (inputName, clientAgent))
Exemplo n.º 20
0
def rmReadOnly(filename):
    if os.path.isfile(filename):
        # check first the read-only attribute
        file_attribute = os.stat(filename)[0]
        if not file_attribute & stat.S_IWRITE:
            # File is read-only, so make it writeable
            logger.debug('Read only mode on file {name}. Attempting to make it writeable'.format
                         (name=filename))
            try:
                os.chmod(filename, stat.S_IWRITE)
            except:
                logger.warning('Cannot change permissions of {file}'.format(file=filename), logger.WARNING)
Exemplo n.º 21
0
def main():
    print(figlet)
    
    logger.info("%s (sensor) #v%s" % (NAME, VERSION))

    parser = optparse.OptionParser(version=VERSION)
    parser.add_option("-c", dest="config_file", default=CONFIG_FILE, help="configuration file (default: '%s')" % os.path.split(CONFIG_FILE)[-1])
    parser.add_option("-i", dest="pcap_file", help="open pcap file for offline analysis")
    parser.add_option("--console", dest="console", action="store_true", help="print events to console (too)")
    parser.add_option("--no-updates", dest="no_updates", action="store_true", help="disable (online) trail updates")
    parser.add_option("--debug", dest="debug", action="store_true", help=optparse.SUPPRESS_HELP)
    options, _ = parser.parse_args()

    if not check_sudo():
        exit("[!] please run '%s' with sudo/Administrator privileges" % __file__)

    read_config(options.config_file)

    if options.debug:
        config.console = True
        config.SHOW_DEBUG = True
    
    create_log_directory(config.LOG_DIR)

    logger.init_file_loggers()

    config.plugins = DEFAULT_PLUGINS
    
    if config.PLUGINS:
        config.plugins += re.split(r"[,;]", config.PLUGINS)

    config.triggers = []
    
    if config.TRIGGERS:
        config.triggers += re.split(r"[,;]", config.TRIGGERS)

    for option in dir(options):
        if isinstance(getattr(options, option), (basestring, bool)) and not option.startswith('_'):
            config[option] = getattr(options, option)

    if options.pcap_file:
        if options.pcap_file == '-':
            logger.info("using STDIN")
        elif not os.path.isfile(options.pcap_file):
            exit("missing pcap file '%s'" % options.pcap_file)
        else:
            logger.info("using pcap file '%s'" % options.pcap_file)

    try:
        init()
        monitor()
    except KeyboardInterrupt:
        logger.warning("stopping (Ctrl-C pressed)")
Exemplo n.º 22
0
def pause_torrent(clientAgent, inputHash, inputID, inputName):
    logger.debug("Stopping torrent %s in %s while processing" % (inputName, clientAgent))
    try:
        if clientAgent == 'utorrent' and core.TORRENT_CLASS != "":
            core.TORRENT_CLASS.stop(inputHash)
        if clientAgent == 'transmission' and core.TORRENT_CLASS != "":
            core.TORRENT_CLASS.stop_torrent(inputID)
        if clientAgent == 'deluge' and core.TORRENT_CLASS != "":
            core.TORRENT_CLASS.core.pause_torrent([inputID])
        time.sleep(5)
    except:
        logger.warning("Failed to stop torrent %s in %s" % (inputName, clientAgent))
Exemplo n.º 23
0
def pause_torrent(clientAgent, inputHash, inputID, inputName):
    logger.debug("Stopping torrent %s in %s while processing" % (inputName, clientAgent))
    try:
        if clientAgent == 'utorrent' and core.TORRENT_CLASS != "":
            core.TORRENT_CLASS.stop(inputHash)
        if clientAgent == 'transmission' and core.TORRENT_CLASS != "":
            core.TORRENT_CLASS.stop_torrent(inputID)
        if clientAgent == 'deluge' and core.TORRENT_CLASS != "":
            core.TORRENT_CLASS.core.pause_torrent([inputID])
        time.sleep(5)
    except:
        logger.warning("Failed to stop torrent %s in %s" % (inputName, clientAgent))
Exemplo n.º 24
0
    def getCodeFromString(self, str):
        codes = re.findall("\[(.*)\]", str)
        code = None

        if len(codes) == 0:
            Logger.error("No code found in image name: " + str)
        elif len(codes) == 1:
            code = codes[0]
        else:
            Logger.warning("Found more than one code in image name: " + str)
            code = codes[0]

        return code
Exemplo n.º 25
0
def resume_torrent(clientAgent, inputHash, inputID, inputName):
    logger.debug("Starting torrent %s in %s" % (inputName, clientAgent))
    try:
        if clientAgent == 'utorrent' and core.TORRENT_CLASS != "":
            core.TORRENT_CLASS.start(inputHash)
        if clientAgent == 'transmission' and core.TORRENT_CLASS != "":
            core.TORRENT_CLASS.start_torrent(inputID)
        if clientAgent == 'deluge' and core.TORRENT_CLASS != "":
            core.TORRENT_CLASS.core.resume_torrent([inputID])
        time.sleep(5)
    except:
        logger.warning("Failed to start torrent %s in %s" %
                       (inputName, clientAgent))
Exemplo n.º 26
0
def pause_torrent(client_agent, input_hash, input_id, input_name):
    logger.debug('Stopping torrent {0} in {1} while processing'.format(input_name, client_agent))
    try:
        if client_agent == 'utorrent' and core.TORRENT_CLASS != '':
            core.TORRENT_CLASS.stop(input_hash)
        if client_agent == 'transmission' and core.TORRENT_CLASS != '':
            core.TORRENT_CLASS.stop_torrent(input_id)
        if client_agent == 'deluge' and core.TORRENT_CLASS != '':
            core.TORRENT_CLASS.core.pause_torrent([input_id])
        if client_agent == 'qbittorrent' and core.TORRENT_CLASS != '':
            core.TORRENT_CLASS.pause(input_hash)
        time.sleep(5)
    except Exception:
        logger.warning('Failed to stop torrent {0} in {1}'.format(input_name, client_agent))
Exemplo n.º 27
0
def resume_torrent(clientAgent, inputHash, inputID, inputName):
    if not core.TORRENT_RESUME == 1:
        return
    logger.debug("Starting torrent {0} in {1}".format(inputName, clientAgent))
    try:
        if clientAgent == 'utorrent' and core.TORRENT_CLASS != "":
            core.TORRENT_CLASS.start(inputHash)
        if clientAgent == 'transmission' and core.TORRENT_CLASS != "":
            core.TORRENT_CLASS.start_torrent(inputID)
        if clientAgent == 'deluge' and core.TORRENT_CLASS != "":
            core.TORRENT_CLASS.core.resume_torrent([inputID])
        time.sleep(5)
    except:
        logger.warning("Failed to start torrent {0} in {1}".format(inputName, clientAgent))
def replace_filename(filename, dirname, name):
    head, fileExtension = os.path.splitext(os.path.basename(filename))
    if media_pattern.search(os.path.basename(dirname).replace(' ','.')) is not None: 
        newname = os.path.basename(dirname).replace(' ', '.')
        logger.debug("Replacing file name %s with directory name %s" % (head, newname), "EXCEPTION")
    elif media_pattern.search(name.replace(' ','.').lower()) is not None:
        newname = name.replace(' ', '.')
        logger.debug("Replacing file name %s with download name %s" % (head, newname), "EXCEPTION")
    else:
        logger.warning("No name replacement determined for %s" % (head), "EXCEPTION")
        newname = name 
    newfile = newname + fileExtension
    newfilePath = os.path.join(dirname, newfile)
    return newfilePath
Exemplo n.º 29
0
def pause_torrent(client_agent, input_hash, input_id, input_name):
    logger.debug('Stopping torrent {0} in {1} while processing'.format(input_name, client_agent))
    try:
        if client_agent == 'utorrent' and core.TORRENT_CLASS != '':
            core.TORRENT_CLASS.stop(input_hash)
        if client_agent == 'transmission' and core.TORRENT_CLASS != '':
            core.TORRENT_CLASS.stop_torrent(input_id)
        if client_agent == 'deluge' and core.TORRENT_CLASS != '':
            core.TORRENT_CLASS.core.pause_torrent([input_id])
        if client_agent == 'qbittorrent' and core.TORRENT_CLASS != '':
            core.TORRENT_CLASS.pause(input_hash)
        time.sleep(5)
    except Exception:
        logger.warning('Failed to stop torrent {0} in {1}'.format(input_name, client_agent))
Exemplo n.º 30
0
def copy_link(src, targetLink, useLink):
    logger.info("MEDIAFILE: [{0}]".format(os.path.basename(targetLink)), 'COPYLINK')
    logger.info("SOURCE FOLDER: [{0}]".format(os.path.dirname(src)), 'COPYLINK')
    logger.info("TARGET FOLDER: [{0}]".format(os.path.dirname(targetLink)), 'COPYLINK')

    if src != targetLink and os.path.exists(targetLink):
        logger.info("MEDIAFILE already exists in the TARGET folder, skipping ...", 'COPYLINK')
        return True
    elif src == targetLink and os.path.isfile(targetLink) and os.path.isfile(src):
        logger.info("SOURCE AND TARGET files are the same, skipping ...", 'COPYLINK')
        return True
    elif src == os.path.dirname(targetLink):
        logger.info("SOURCE AND TARGET folders are the same, skipping ...", 'COPYLINK')
        return True

    makeDir(os.path.dirname(targetLink))
    try:
        if useLink == 'dir':
            logger.info("Directory linking SOURCE FOLDER -> TARGET FOLDER", 'COPYLINK')
            linktastic.dirlink(src, targetLink)
            return True
        if useLink == 'junction':
            logger.info("Directory junction linking SOURCE FOLDER -> TARGET FOLDER", 'COPYLINK')
            linktastic.dirlink(src, targetLink)
            return True
        elif useLink == "hard":
            logger.info("Hard linking SOURCE MEDIAFILE -> TARGET FOLDER", 'COPYLINK')
            linktastic.link(src, targetLink)
            return True
        elif useLink == "sym":
            logger.info("Sym linking SOURCE MEDIAFILE -> TARGET FOLDER", 'COPYLINK')
            linktastic.symlink(src, targetLink)
            return True
        elif useLink == "move-sym":
            logger.info("Sym linking SOURCE MEDIAFILE -> TARGET FOLDER", 'COPYLINK')
            shutil.move(src, targetLink)
            linktastic.symlink(targetLink, src)
            return True
        elif useLink == "move":
            logger.info("Moving SOURCE MEDIAFILE -> TARGET FOLDER", 'COPYLINK')
            shutil.move(src, targetLink)
            return True
    except Exception as e:
        logger.warning("Error: {0}, copying instead ... ".format(e), 'COPYLINK')

    logger.info("Copying SOURCE MEDIAFILE -> TARGET FOLDER", 'COPYLINK')
    shutil.copy(src, targetLink)

    return True
Exemplo n.º 31
0
def copy_link(src, target_link, use_link):
    logger.info('MEDIAFILE: [{0}]'.format(os.path.basename(target_link)), 'COPYLINK')
    logger.info('SOURCE FOLDER: [{0}]'.format(os.path.dirname(src)), 'COPYLINK')
    logger.info('TARGET FOLDER: [{0}]'.format(os.path.dirname(target_link)), 'COPYLINK')

    if src != target_link and os.path.exists(target_link):
        logger.info('MEDIAFILE already exists in the TARGET folder, skipping ...', 'COPYLINK')
        return True
    elif src == target_link and os.path.isfile(target_link) and os.path.isfile(src):
        logger.info('SOURCE AND TARGET files are the same, skipping ...', 'COPYLINK')
        return True
    elif src == os.path.dirname(target_link):
        logger.info('SOURCE AND TARGET folders are the same, skipping ...', 'COPYLINK')
        return True

    make_dir(os.path.dirname(target_link))
    try:
        if use_link == 'dir':
            logger.info('Directory linking SOURCE FOLDER -> TARGET FOLDER', 'COPYLINK')
            linktastic.dirlink(src, target_link)
            return True
        if use_link == 'junction':
            logger.info('Directory junction linking SOURCE FOLDER -> TARGET FOLDER', 'COPYLINK')
            linktastic.dirlink(src, target_link)
            return True
        elif use_link == 'hard':
            logger.info('Hard linking SOURCE MEDIAFILE -> TARGET FOLDER', 'COPYLINK')
            linktastic.link(src, target_link)
            return True
        elif use_link == 'sym':
            logger.info('Sym linking SOURCE MEDIAFILE -> TARGET FOLDER', 'COPYLINK')
            linktastic.symlink(src, target_link)
            return True
        elif use_link == 'move-sym':
            logger.info('Sym linking SOURCE MEDIAFILE -> TARGET FOLDER', 'COPYLINK')
            shutil.move(src, target_link)
            linktastic.symlink(target_link, src)
            return True
        elif use_link == 'move':
            logger.info('Moving SOURCE MEDIAFILE -> TARGET FOLDER', 'COPYLINK')
            shutil.move(src, target_link)
            return True
    except Exception as e:
        logger.warning('Error: {0}, copying instead ... '.format(e), 'COPYLINK')

    logger.info('Copying SOURCE MEDIAFILE -> TARGET FOLDER', 'COPYLINK')
    shutil.copy(src, target_link)

    return True
Exemplo n.º 32
0
def _get_data(library, q_type, database, symbol, **kwargs):
    """
    General method to get quotes data from storage
    :param library: storage library name (usually corresponds to a data provider name)
    :param q_type: one of 'futures' | 'currency' | 'others'
    :param database: local storage database name
    :param symbol: local storage symbol name
    :return: pd.DataFrame or None in case of error
    """
    try:
        return Store(library, q_type, database + '_' + symbol).get()
    except Exception as e:
        logger.warning("Something went wrong on symbol %s_%s request from storage: %s" %
                       (database, symbol, e))
        return None
Exemplo n.º 33
0
def connect(broker, port, user, pwd):
    while running:
        try:
            logger.info("connecting to %s:%s" % (broker, port))
            client = mqtt.Client(client_id="visionreporter")
            if user and pwd:
                client.username_pw_set(user, pwd)

            client.connect(broker, port, 60)

            return client
        except Exception as e:
            logger.warning("unable to connect %s:%s: %s" % (broker, port, e))
            logger.warning("retrying in 10 seconds")
            sleep(10)
Exemplo n.º 34
0
def replace_filename(filename, dirname, name):
    head, fileExtension = os.path.splitext(os.path.basename(filename))
    if media_pattern.search(os.path.basename(dirname).replace(' ', '.')) is not None:
        newname = os.path.basename(dirname).replace(' ', '.')
        logger.debug("Replacing file name {old} with directory name {new}".format(old=head, new=newname), "EXCEPTION")
    elif media_pattern.search(name.replace(' ', '.').lower()) is not None:
        newname = name.replace(' ', '.')
        logger.debug("Replacing file name {old} with download name {new}".format
                     (old=head, new=newname), "EXCEPTION")
    else:
        logger.warning("No name replacement determined for {name}".format(name=head), "EXCEPTION")
        newname = name
    newfile = newname + fileExtension
    newfilePath = os.path.join(dirname, newfile)
    return newfilePath
Exemplo n.º 35
0
def replace_filename(filename, dirname, name):
    head, file_extension = os.path.splitext(os.path.basename(filename))
    if media_pattern.search(os.path.basename(dirname).replace(' ', '.')) is not None:
        newname = os.path.basename(dirname).replace(' ', '.')
        logger.debug('Replacing file name {old} with directory name {new}'.format(old=head, new=newname), 'EXCEPTION')
    elif media_pattern.search(name.replace(' ', '.').lower()) is not None:
        newname = name.replace(' ', '.')
        logger.debug('Replacing file name {old} with download name {new}'.format
                     (old=head, new=newname), 'EXCEPTION')
    else:
        logger.warning('No name replacement determined for {name}'.format(name=head), 'EXCEPTION')
        newname = name
    newfile = newname + file_extension
    newfile_path = os.path.join(dirname, newfile)
    return newfile_path
Exemplo n.º 36
0
def clean(segment_dir, threshold):
    cleaned = 0
    for segment in sorted(list_abs(segment_dir), key=getmtime):
        if getmtime(segment) >= threshold:
            break

        try:
            logger.warning("segment %s reached retention period" % segment)
            remove(segment)
            logger.info("segment %s was removed" % segment)
            cleaned += 1
        except Exception as e:
            logger.error("unable to clean segment %s: %s" % (segment, e))

    return cleaned
Exemplo n.º 37
0
def resume_torrent(client_agent, input_hash, input_id, input_name):
    if not core.TORRENT_RESUME == 1:
        return
    logger.debug('Starting torrent {0} in {1}'.format(input_name, client_agent))
    try:
        if client_agent == 'utorrent' and core.TORRENT_CLASS != '':
            core.TORRENT_CLASS.start(input_hash)
        if client_agent == 'transmission' and core.TORRENT_CLASS != '':
            core.TORRENT_CLASS.start_torrent(input_id)
        if client_agent == 'deluge' and core.TORRENT_CLASS != '':
            core.TORRENT_CLASS.core.resume_torrent([input_id])
        if client_agent == 'qbittorrent' and core.TORRENT_CLASS != '':
            core.TORRENT_CLASS.resume(input_hash)
        time.sleep(5)
    except Exception:
        logger.warning('Failed to start torrent {0} in {1}'.format(input_name, client_agent))
Exemplo n.º 38
0
def remove_torrent(clientAgent, inputHash, inputID, inputName):
    if core.DELETE_ORIGINAL == 1 or core.USELINK == 'move':
        logger.debug("Deleting torrent %s from %s" % (inputName, clientAgent))
        try:
            if clientAgent == 'utorrent' and core.TORRENT_CLASS != "":
                core.TORRENT_CLASS.removedata(inputHash)
                core.TORRENT_CLASS.remove(inputHash)
            if clientAgent == 'transmission' and core.TORRENT_CLASS != "":
                core.TORRENT_CLASS.remove_torrent(inputID, True)
            if clientAgent == 'deluge' and core.TORRENT_CLASS != "":
                core.TORRENT_CLASS.core.remove_torrent(inputID, True)
            time.sleep(5)
        except:
            logger.warning("Failed to delete torrent %s in %s" % (inputName, clientAgent))
    else:    
        resume_torrent(clientAgent, inputHash, inputID, inputName)
Exemplo n.º 39
0
def remove_torrent(clientAgent, inputHash, inputID, inputName):
    if core.DELETE_ORIGINAL == 1 or core.USELINK == 'move':
        logger.debug("Deleting torrent %s from %s" % (inputName, clientAgent))
        try:
            if clientAgent == 'utorrent' and core.TORRENT_CLASS != "":
                core.TORRENT_CLASS.removedata(inputHash)
                core.TORRENT_CLASS.remove(inputHash)
            if clientAgent == 'transmission' and core.TORRENT_CLASS != "":
                core.TORRENT_CLASS.remove_torrent(inputID, True)
            if clientAgent == 'deluge' and core.TORRENT_CLASS != "":
                core.TORRENT_CLASS.core.remove_torrent(inputID, True)
            time.sleep(5)
        except:
            logger.warning("Failed to delete torrent %s in %s" % (inputName, clientAgent))
    else:
        resume_torrent(clientAgent, inputHash, inputID, inputName)
Exemplo n.º 40
0
def resume_torrent(client_agent, input_hash, input_id, input_name):
    if not core.TORRENT_RESUME == 1:
        return
    logger.debug('Starting torrent {0} in {1}'.format(input_name, client_agent))
    try:
        if client_agent == 'utorrent' and core.TORRENT_CLASS != '':
            core.TORRENT_CLASS.start(input_hash)
        if client_agent == 'transmission' and core.TORRENT_CLASS != '':
            core.TORRENT_CLASS.start_torrent(input_id)
        if client_agent == 'deluge' and core.TORRENT_CLASS != '':
            core.TORRENT_CLASS.core.resume_torrent([input_id])
        if client_agent == 'qbittorrent' and core.TORRENT_CLASS != '':
            core.TORRENT_CLASS.resume(input_hash)
        time.sleep(5)
    except Exception:
        logger.warning('Failed to start torrent {0} in {1}'.format(input_name, client_agent))
Exemplo n.º 41
0
def WakeUp():
    host = core.CFG["WakeOnLan"]["host"]
    port = int(core.CFG["WakeOnLan"]["port"])
    mac = core.CFG["WakeOnLan"]["mac"]

    i = 1
    while TestCon(host, port) == "Down" and i < 4:
        logger.info(("Sending WakeOnLan Magic Packet for mac: %s" % (mac)))
        WakeOnLan(mac)
        time.sleep(20)
        i = i + 1

    if TestCon(host, port) == "Down":  # final check.
        logger.warning("System with mac: %s has not woken after 3 attempts. Continuing with the rest of the script." % (
        mac))
    else:
        logger.info("System with mac: %s has been woken. Continuing with the rest of the script." % (mac))
Exemplo n.º 42
0
def WakeUp():
    host = core.CFG["WakeOnLan"]["host"]
    port = int(core.CFG["WakeOnLan"]["port"])
    mac = core.CFG["WakeOnLan"]["mac"]

    i = 1
    while TestCon(host, port) == "Down" and i < 4:
        logger.info(("Sending WakeOnLan Magic Packet for mac: %s" % (mac)))
        WakeOnLan(mac)
        time.sleep(20)
        i = i + 1

    if TestCon(host, port) == "Down":  # final check.
        logger.warning("System with mac: %s has not woken after 3 attempts. Continuing with the rest of the script." % (
        mac))
    else:
        logger.info("System with mac: %s has been woken. Continuing with the rest of the script." % (mac))
Exemplo n.º 43
0
def replace_links(link, max_depth=10):
    link_depth = 0
    target = link

    for attempt in range(0, max_depth):
        if not islink(target):
            break
        target = readlink(target)
        link_depth = attempt

    if not link_depth:
        logger.debug('{0} is not a link'.format(link))
    elif link_depth > max_depth or (link_depth == max_depth and islink(target)):
        logger.warning('Exceeded maximum depth {0} while following link {1}'.format(max_depth, link))
    else:
        logger.info('Changing sym-link: {0} to point directly to file: {1}'.format(link, target), 'COPYLINK')
        os.unlink(link)
        linktastic.symlink(target, link)
Exemplo n.º 44
0
def remove_torrent(client_agent, input_hash, input_id, input_name):
    if core.DELETE_ORIGINAL == 1 or core.USELINK == 'move':
        logger.debug('Deleting torrent {0} from {1}'.format(input_name, client_agent))
        try:
            if client_agent == 'utorrent' and core.TORRENT_CLASS != '':
                core.TORRENT_CLASS.removedata(input_hash)
                core.TORRENT_CLASS.remove(input_hash)
            if client_agent == 'transmission' and core.TORRENT_CLASS != '':
                core.TORRENT_CLASS.remove_torrent(input_id, True)
            if client_agent == 'deluge' and core.TORRENT_CLASS != '':
                core.TORRENT_CLASS.core.remove_torrent(input_id, True)
            if client_agent == 'qbittorrent' and core.TORRENT_CLASS != '':
                core.TORRENT_CLASS.delete_permanently(input_hash)
            time.sleep(5)
        except Exception:
            logger.warning('Failed to delete torrent {0} in {1}'.format(input_name, client_agent))
    else:
        resume_torrent(client_agent, input_hash, input_id, input_name)
Exemplo n.º 45
0
def remove_torrent(client_agent, input_hash, input_id, input_name):
    if core.DELETE_ORIGINAL == 1 or core.USE_LINK == 'move':
        logger.debug('Deleting torrent {0} from {1}'.format(input_name, client_agent))
        try:
            if client_agent == 'utorrent' and core.TORRENT_CLASS != '':
                core.TORRENT_CLASS.removedata(input_hash)
                core.TORRENT_CLASS.remove(input_hash)
            if client_agent == 'transmission' and core.TORRENT_CLASS != '':
                core.TORRENT_CLASS.remove_torrent(input_id, True)
            if client_agent == 'deluge' and core.TORRENT_CLASS != '':
                core.TORRENT_CLASS.core.remove_torrent(input_id, True)
            if client_agent == 'qbittorrent' and core.TORRENT_CLASS != '':
                core.TORRENT_CLASS.delete_permanently(input_hash)
            time.sleep(5)
        except Exception:
            logger.warning('Failed to delete torrent {0} in {1}'.format(input_name, client_agent))
    else:
        resume_torrent(client_agent, input_hash, input_id, input_name)
Exemplo n.º 46
0
def start():
    logger.info("starting recorder[pid=%s]" % common.PID)
    config = common.load_config()
    segment_dir = "%s/%s" % (config["output"], PROCESS_NAME)
    config = config["cameras"][PROCESS_NAME]
    logger.info("saving segments of camera %s in directory %s" %
                (PROCESS_NAME, segment_dir))
    duration = int(config["duration"])
    command = [
        "ffmpeg", "-rtsp_transport", "tcp", "-i", config["rtsp.url"], "-an",
        "-sn", "-b:v", "132k", "-bufsize", "132k", "-c:v", "copy", "-r",
        str(config["fps"]), "-bsf:v", "h264_mp4toannexb", "-map", "0",
        "-shortest", "-strftime", "1", "-f", "segment", "-segment_time",
        str(duration), "-segment_format", "mp4",
        "%s/%s-%s.mp4" % (segment_dir, PROCESS_NAME, "%Y%m%d%H%M%S")
    ]
    url = (config["rtsp.ip"], config["rtsp.port"])
    request_command = bytes(
        "OPTIONS rtsp://%s:%s RTSP/1.0\\r\\nCSeq: 1\\r\\nUser-Agent: python\\r\\nAccept: application/sdp\\r\\n\\r\\n"
        % (url[0], str(url[1])), "utf-8")
    del config, segment_dir
    process = None
    try:
        while 1:
            if not is_reachable(url, request_command):
                logger.warning("destination %s:%s is not reachable" %
                               (url[0], str(url[1])))
                logger.info("waiting for camera[%s:%s] to be available" %
                            (url[0], str(url[1])))
                while not is_reachable(url, request_command):
                    sleep(1)

                close(process)
                process = None

            if not is_running(process):
                close(process)
                process = launch(command)
            else:
                sleep(duration)
    finally:
        logger.info("stopping recorder[pid=%s]" % common.PID)
        close(process)
Exemplo n.º 47
0
def check_python():
    """Check End-of-Life status for Python version."""
    # Raise if end of life
    eol.check()

    # Warn if within grace period
    grace_period = 365  # days
    eol.warn_for_status(grace_period=-grace_period)

    # Log warning if within grace period
    days_left = eol.lifetime()
    logger.info(
        'Python v{major}.{minor} will reach end of life in {x} days.'.format(
            major=sys.version_info[0],
            minor=sys.version_info[1],
            x=days_left,
        )
    )
    if days_left <= grace_period:
        logger.warning('Please upgrade to a more recent Python version.')
Exemplo n.º 48
0
def loop(conn, segment_dirs, loop_interval, output, threshold):
    inc = storage.inc
    delete_total = storage.get_int(conn, WATCHER_DELETED_TOTAL)
    if not delete_total:
        delete_total = 0

    delete_since_start = 0
    global running
    running = True
    while running:
        if usage_percentage(output) > threshold:
            logger.warning("filesystem has reached max size threshold")
            logger.info("cleaning old segments")
            for segment_dir in segment_dirs:
                if clean(segment_dir):
                    delete_total = inc(conn, WATCHER_DELETED_TOTAL, delete_total)
                    delete_since_start = inc(conn, WATCHER_DELETED_SINCE_START, delete_since_start)
                    if usage_percentage(output) < threshold:
                        break

        sleep(loop_interval)
Exemplo n.º 49
0
def wake_up():
    wol = core.CFG['WakeOnLan']
    host = wol['host']
    port = int(wol['port'])
    mac = wol['mac']
    max_attempts = 4

    logger.info('Trying to wake On lan.')

    for attempt in range(0, max_attempts):
        logger.info('Attempt {0} of {1}'.format(attempt + 1, max_attempts, mac))
        if test_connection(host, port) == 'Up':
            logger.info('System with mac: {0} has been woken.'.format(mac))
            break
        wake_on_lan(mac)
        time.sleep(20)
    else:
        if test_connection(host, port) == 'Down':  # final check.
            msg = 'System with mac: {0} has not woken after {1} attempts.'
            logger.warning(msg.format(mac, max_attempts))

    logger.info('Continuing with the rest of the script.')
Exemplo n.º 50
0
def get_nzoid(inputName):
    nzoid = None
    slots = []
    logger.debug("Searching for nzoid from SAbnzbd ...")
    if "http" in core.SABNZBDHOST:
        baseURL = "{0}:{1}/api".format(core.SABNZBDHOST, core.SABNZBDPORT)
    else:
        baseURL = "http://{0}:{1}/api".format(core.SABNZBDHOST, core.SABNZBDPORT)
    url = baseURL
    params = {
        'apikey': core.SABNZBDAPIKEY,
        'mode': "queue",
        'output': 'json',
    }
    try:
        r = requests.get(url, params=params, verify=False, timeout=(30, 120))
    except requests.ConnectionError:
        logger.error("Unable to open URL")
        return nzoid  # failure
    try:
        result = r.json()
        cleanName = os.path.splitext(os.path.split(inputName)[1])[0]
        slots.extend([(slot['nzo_id'], slot['filename']) for slot in result['queue']['slots']])
    except:
        logger.warning("Data from SABnzbd queue could not be parsed")
    params['mode'] = "history"
    try:
        r = requests.get(url, params=params, verify=False, timeout=(30, 120))
    except requests.ConnectionError:
        logger.error("Unable to open URL")
        return nzoid  # failure
    try:
        result = r.json()
        cleanName = os.path.splitext(os.path.split(inputName)[1])[0]
        slots.extend([(slot['nzo_id'], slot['name']) for slot in result['history']['slots']])
    except:
        logger.warning("Data from SABnzbd history could not be parsed")
    try:
        for nzo_id, name in slots:
            if name in [inputName, cleanName]:
                nzoid = nzo_id
                logger.debug("Found nzoid: {0}".format(nzoid))
                break
    except:
        logger.warning("Data from SABnzbd could not be parsed")
    return nzoid
Exemplo n.º 51
0
def get_nzoid(input_name):
    nzoid = None
    slots = []
    logger.debug('Searching for nzoid from SAbnzbd ...')
    if 'http' in core.SABNZBD_HOST:
        base_url = '{0}:{1}/api'.format(core.SABNZBD_HOST, core.SABNZBD_PORT)
    else:
        base_url = 'http://{0}:{1}/api'.format(core.SABNZBD_HOST, core.SABNZBD_PORT)
    url = base_url
    params = {
        'apikey': core.SABNZBD_APIKEY,
        'mode': 'queue',
        'output': 'json',
    }
    try:
        r = requests.get(url, params=params, verify=False, timeout=(30, 120))
    except requests.ConnectionError:
        logger.error('Unable to open URL')
        return nzoid  # failure
    try:
        result = r.json()
        clean_name = os.path.splitext(os.path.split(input_name)[1])[0]
        slots.extend([(slot['nzo_id'], slot['filename']) for slot in result['queue']['slots']])
    except Exception:
        logger.warning('Data from SABnzbd queue could not be parsed')
    params['mode'] = 'history'
    try:
        r = requests.get(url, params=params, verify=False, timeout=(30, 120))
    except requests.ConnectionError:
        logger.error('Unable to open URL')
        return nzoid  # failure
    try:
        result = r.json()
        clean_name = os.path.splitext(os.path.split(input_name)[1])[0]
        slots.extend([(slot['nzo_id'], slot['name']) for slot in result['history']['slots']])
    except Exception:
        logger.warning('Data from SABnzbd history could not be parsed')
    try:
        for nzo_id, name in slots:
            if name in [input_name, clean_name]:
                nzoid = nzo_id
                logger.debug('Found nzoid: {0}'.format(nzoid))
                break
    except Exception:
        logger.warning('Data from SABnzbd could not be parsed')
    return nzoid
Exemplo n.º 52
0
def main(args, section=None):
    # Initialize the config
    core.initialize(section)

    # clientAgent for NZBs
    clientAgent = core.NZB_CLIENTAGENT

    logger.info("#########################################################")
    logger.info("## ..::[%s]::.. ##" % os.path.basename(__file__))
    logger.info("#########################################################")

    # debug command line options
    logger.debug("Options passed into nzbToMedia: %s" % args)

    # Post-Processing Result
    result = [0, ""]
    status = 0

    # NZBGet
    if os.environ.has_key('NZBOP_SCRIPTDIR'):
        # Check if the script is called from nzbget 11.0 or later
        if os.environ['NZBOP_VERSION'][0:5] < '11.0':
            logger.error("NZBGet Version %s is not supported. Please update NZBGet." %(str(os.environ['NZBOP_VERSION'])))
            sys.exit(core.NZBGET_POSTPROCESS_ERROR)

        logger.info("Script triggered from NZBGet Version %s." %(str(os.environ['NZBOP_VERSION'])))

        # Check if the script is called from nzbget 13.0 or later
        if os.environ.has_key('NZBPP_TOTALSTATUS'):
            if not os.environ['NZBPP_TOTALSTATUS'] == 'SUCCESS':
                logger.info("Download failed with status %s." %(os.environ['NZBPP_STATUS']))
                status = 1

        else:
            # Check par status
            if os.environ['NZBPP_PARSTATUS'] == '1' or os.environ['NZBPP_PARSTATUS'] == '4':
                logger.warning("Par-repair failed, setting status \"failed\"")
                status = 1

            # Check unpack status
            if os.environ['NZBPP_UNPACKSTATUS'] == '1':
                logger.warning("Unpack failed, setting status \"failed\"")
                status = 1

            if os.environ['NZBPP_UNPACKSTATUS'] == '0' and os.environ['NZBPP_PARSTATUS'] == '0':
                # Unpack was skipped due to nzb-file properties or due to errors during par-check

                if os.environ['NZBPP_HEALTH'] < 1000:
                    logger.warning(
                        "Download health is compromised and Par-check/repair disabled or no .par2 files found. Setting status \"failed\"")
                    logger.info("Please check your Par-check/repair settings for future downloads.")
                    status = 1

                else:
                    logger.info(
                        "Par-check/repair disabled or no .par2 files found, and Unpack not required. Health is ok so handle as though download successful")
                    logger.info("Please check your Par-check/repair settings for future downloads.")

        # Check for download_id to pass to CouchPotato
        download_id = ""
        failureLink = None
        if os.environ.has_key('NZBPR_COUCHPOTATO'):
            download_id = os.environ['NZBPR_COUCHPOTATO']
        elif os.environ.has_key('NZBPR_DRONE'):
            download_id = os.environ['NZBPR_DRONE']
        elif os.environ.has_key('NZBPR_SONARR'):
            download_id = os.environ['NZBPR_SONARR']
        if os.environ.has_key('NZBPR__DNZB_FAILURE'):
            failureLink = os.environ['NZBPR__DNZB_FAILURE']

        # All checks done, now launching the script.
        clientAgent = 'nzbget'
        result = process(os.environ['NZBPP_DIRECTORY'], inputName=os.environ['NZBPP_NZBNAME'], status=status,
                         clientAgent=clientAgent, download_id=download_id, inputCategory=os.environ['NZBPP_CATEGORY'],
                         failureLink=failureLink)
    # SABnzbd Pre 0.7.17
    elif len(args) == core.SABNZB_NO_OF_ARGUMENTS:
        # SABnzbd argv:
        # 1 The final directory of the job (full path)
        # 2 The original name of the NZB file
        # 3 Clean version of the job name (no path info and ".nzb" removed)
        # 4 Indexer's report number (if supported)
        # 5 User-defined category
        # 6 Group that the NZB was posted in e.g. alt.binaries.x
        # 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
        clientAgent = 'sabnzbd'
        logger.info("Script triggered from SABnzbd")
        result = process(args[1], inputName=args[2], status=args[7], inputCategory=args[5], clientAgent=clientAgent,
                         download_id='')
    # SABnzbd 0.7.17+
    elif len(args) >= core.SABNZB_0717_NO_OF_ARGUMENTS:
        # SABnzbd argv:
        # 1 The final directory of the job (full path)
        # 2 The original name of the NZB file
        # 3 Clean version of the job name (no path info and ".nzb" removed)
        # 4 Indexer's report number (if supported)
        # 5 User-defined category
        # 6 Group that the NZB was posted in e.g. alt.binaries.x
        # 7 Status of post processing. 0 = OK, 1=failed verification, 2=failed unpack, 3=1+2
        # 8 Failure URL
        clientAgent = 'sabnzbd'
        logger.info("Script triggered from SABnzbd 0.7.17+")
        result = process(args[1], inputName=args[2], status=args[7], inputCategory=args[5], clientAgent=clientAgent,
                        download_id='', failureLink=''.join(args[8:]))
    else:
        # Perform Manual Post-Processing
        logger.warning("Invalid number of arguments received from client, Switching to manual run mode ...")

        for section, subsections in core.SECTIONS.items():
            for subsection in subsections:
                if not core.CFG[section][subsection].isenabled():
                    continue
                for dirName in getDirs(section, subsection, link = 'move'):
                    logger.info("Starting manual run for %s:%s - Folder:%s" % (section, subsection, dirName))

                    logger.info("Checking database for download info for %s ..." % (os.path.basename(dirName)))
                    core.DOWNLOADINFO = get_downloadInfo(os.path.basename(dirName), 0)
                    if core.DOWNLOADINFO:
                        logger.info(
                            "Found download info for %s, setting variables now ..." % (os.path.basename(dirName)))
                    else:
                        logger.info(
                            'Unable to locate download info for %s, continuing to try and process this release ...' % (
                                os.path.basename(dirName))
                        )

                    try:
                        clientAgent = str(core.DOWNLOADINFO[0]['client_agent'])
                    except:
                        clientAgent = 'manual'
                    try:
                        download_id = str(core.DOWNLOADINFO[0]['input_id'])
                    except:
                        download_id = None

                    if clientAgent.lower() not in core.NZB_CLIENTS and clientAgent != 'manual':
                        continue

                    try:
                        dirName = dirName.encode(core.SYS_ENCODING)
                    except: pass
                    inputName = os.path.basename(dirName)
                    try:
                        inputName = inputName.encode(core.SYS_ENCODING)
                    except: pass

                    results = process(dirName, inputName, 0, clientAgent=clientAgent,
                                      download_id=download_id, inputCategory=subsection)
                    if results[0] != 0:
                        logger.error("A problem was reported when trying to perform a manual run for %s:%s." % (
                        section, subsection))
                        result = results

    if result[0] == 0:
        logger.info("The %s script completed successfully." % args[0])
        if result[1]:
            print result[1] + "!"  # For SABnzbd Status display.
        if os.environ.has_key('NZBOP_SCRIPTDIR'):  # return code for nzbget v11
            del core.MYAPP
            return (core.NZBGET_POSTPROCESS_SUCCESS)
    else:
        logger.error("A problem was reported in the %s script." % args[0])
        if result[1]:
            print result[1] + "!"  # For SABnzbd Status display.
        if os.environ.has_key('NZBOP_SCRIPTDIR'):  # return code for nzbget v11
            del core.MYAPP
            return (core.NZBGET_POSTPROCESS_ERROR)
    del core.MYAPP
    return (result[0])
Exemplo n.º 53
0
    def processEpisode(self, section, dirName, inputName=None, status=0, clientAgent='manual', inputCategory=None):
        if int(status) != 0:
            logger.warning("FAILED DOWNLOAD DETECTED, nothing to process.",section)
            return [1, "%s: Failed to post-process. %s does not support failed downloads" % (section, section) ]

        host = core.CFG[section][inputCategory]["host"]
        port = core.CFG[section][inputCategory]["port"]
        username = core.CFG[section][inputCategory]["username"]
        password = core.CFG[section][inputCategory]["password"]
        try:
            ssl = int(core.CFG[section][inputCategory]["ssl"])
        except:
            ssl = 0
        try:
            web_root = core.CFG[section][inputCategory]["web_root"]
        except:
            web_root = ""
        try:
            remote_path = int(core.CFG[section][inputCategory]["remote_path"])
        except:
            remote_path = 0

        if ssl:
            protocol = "https://"
        else:
            protocol = "http://"

        url = "%s%s:%s%s/post_process" % (protocol, host, port, web_root)
        if not server_responding(url):
            logger.error("Server did not respond. Exiting", section)
            return [1, "%s: Failed to post-process - %s did not respond." % (section, section) ]

        inputName, dirName = convert_to_ascii(inputName, dirName)
        clean_name, ext = os.path.splitext(inputName)
        if len(ext) == 4:  # we assume this was a standrard extension. 
            inputName = clean_name

        params = {}
        params['nzb_folder'] = dirName

        if remote_path:
            params['nzb_folder'] = remoteDir(dirName)

        if inputName != None:
            params['nzb_name'] = inputName

        success = False

        logger.debug("Opening URL: %s" % (url), section)
        try:
            r = requests.get(url, auth=(username, password), params=params, stream=True, verify=False, timeout=(30, 300))
        except requests.ConnectionError:
            logger.error("Unable to open URL", section)
            return [1, "%s: Failed to post-process - Unable to connect to %s" % (section, section) ]
        for line in r.iter_lines():
            if line: logger.postprocess("%s" % (line), section)
            if ("Post Processing SUCCESSFUL!" or "Post Processing SUCCESSFULL!")in line: success = True

        if not r.status_code in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
            logger.error("Server returned status %s" % (str(r.status_code)), section)
            return [1, "%s: Failed to post-process - Server returned status %s" % (section, str(r.status_code)) ]

        if success:
            logger.postprocess("SUCCESS: This issue has been processed successfully",section)
            return [0, "%s: Successfully post-processed %s" % (section, inputName) ]
        else:
            logger.warning("The issue does not appear to have successfully processed. Please check your Logs",section)
            return [1, "%s: Failed to post-process - Returned log from %s was not as expected." % (section, section) ]
Exemplo n.º 54
0
    def processEpisode(self, section, dirName, inputName=None, failed=False, clientAgent="manual", download_id=None, inputCategory=None, failureLink=None):

        cfg = dict(core.CFG[section][inputCategory])

        host = cfg["host"]
        port = cfg["port"]
        ssl = int(cfg.get("ssl", 0))
        web_root = cfg.get("web_root", "")
        protocol = "https://" if ssl else "http://"
        username = cfg.get("username", "")
        password = cfg.get("password", "")
        apikey = cfg.get("apikey", "")

        if server_responding("{0}{1}:{2}{3}".format(protocol, host, port, web_root)):
            # auto-detect correct fork
            fork, fork_params = autoFork(section, inputCategory)
        elif not username and not apikey:
            logger.info('No SickBeard username or Sonarr apikey entered. Performing transcoder functions only')
            fork, fork_params = "None", {}
        else:
            logger.error("Server did not respond. Exiting", section)
            return [1, "{0}: Failed to post-process - {1} did not respond.".format(section, section)]

        delete_failed = int(cfg.get("delete_failed", 0))
        nzbExtractionBy = cfg.get("nzbExtractionBy", "Downloader")
        process_method = cfg.get("process_method")
        if  clientAgent == core.TORRENT_CLIENTAGENT and core.USELINK == "move-sym":
            process_method = "symlink"
        remote_path = int(cfg.get("remote_path", 0))
        wait_for = int(cfg.get("wait_for", 2))
        force = int(cfg.get("force", 0))
        delete_on = int(cfg.get("delete_on", 0))
        ignore_subs = int(cfg.get("ignore_subs", 0))
        status = int(failed)
        if status > 0 and core.NOEXTRACTFAILED:
            extract = 0
        else:
            extract = int(cfg.get("extract", 0))
        #get importmode, default to "Move" for consistency with legacy
        importMode = cfg.get("importMode","Move")

        if not os.path.isdir(dirName) and os.path.isfile(dirName):  # If the input directory is a file, assume single file download and split dir/name.
            dirName = os.path.split(os.path.normpath(dirName))[0]

        SpecificPath = os.path.join(dirName, str(inputName))
        cleanName = os.path.splitext(SpecificPath)
        if cleanName[1] == ".nzb":
            SpecificPath = cleanName[0]
        if os.path.isdir(SpecificPath):
            dirName = SpecificPath

        # Attempt to create the directory if it doesn't exist and ignore any
        # error stating that it already exists. This fixes a bug where SickRage
        # won't process the directory because it doesn't exist.
        try:
            os.makedirs(dirName)  # Attempt to create the directory
        except OSError as e:
            # Re-raise the error if it wasn't about the directory not existing
            if e.errno != errno.EEXIST:
                raise

        if 'process_method' not in fork_params or (clientAgent in ['nzbget', 'sabnzbd'] and nzbExtractionBy != "Destination"):
            if inputName:
                process_all_exceptions(inputName, dirName)
                inputName, dirName = convert_to_ascii(inputName, dirName)

            # Now check if tv files exist in destination. 
            if not listMediaFiles(dirName, media=True, audio=False, meta=False, archives=False):
                if listMediaFiles(dirName, media=False, audio=False, meta=False, archives=True) and extract:
                    logger.debug('Checking for archives to extract in directory: {0}'.format(dirName))
                    core.extractFiles(dirName)
                    inputName, dirName = convert_to_ascii(inputName, dirName)

            if listMediaFiles(dirName, media=True, audio=False, meta=False, archives=False):  # Check that a video exists. if not, assume failed.
                flatten(dirName)

        # Check video files for corruption
        good_files = 0
        num_files = 0
        for video in listMediaFiles(dirName, media=True, audio=False, meta=False, archives=False):
            num_files += 1
            if transcoder.isVideoGood(video, status):
                good_files += 1
                import_subs(video)
        if num_files > 0:
            if good_files == num_files and not status == 0:
                logger.info('Found Valid Videos. Setting status Success')
                status = 0
                failed = 0
            if good_files < num_files and status == 0:
                logger.info('Found corrupt videos. Setting status Failed')
                status = 1
                failed = 1
                if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0':
                    print('[NZB] MARK=BAD')
                if failureLink:
                    failureLink += '&corrupt=true'
        elif clientAgent == "manual":
            logger.warning("No media files found in directory {0} to manually process.".format(dirName), section)
            return [0, ""]  # Success (as far as this script is concerned)
        elif nzbExtractionBy == "Destination":
            logger.info("Check for media files ignored because nzbExtractionBy is set to Destination.")
            if int(failed) == 0:
                logger.info("Setting Status Success.")
                status = 0
                failed = 0
            else:
                logger.info("Downloader reported an error during download or verification. Processing this as a failed download.")
                status = 1
                failed = 1
        else:
            logger.warning("No media files found in directory {0}. Processing this as a failed download".format(dirName), section)
            status = 1
            failed = 1
            if 'NZBOP_VERSION' in os.environ and os.environ['NZBOP_VERSION'][0:5] >= '14.0':
                print('[NZB] MARK=BAD')

        if status == 0 and core.TRANSCODE == 1:  # only transcode successful downloads
            result, newDirName = transcoder.Transcode_directory(dirName)
            if result == 0:
                logger.debug("SUCCESS: Transcoding succeeded for files in {0}".format(dirName), section)
                dirName = newDirName

                chmod_directory = int(str(cfg.get("chmodDirectory", "0")), 8)
                logger.debug("Config setting 'chmodDirectory' currently set to {0}".format(oct(chmod_directory)), section)
                if chmod_directory:
                    logger.info("Attempting to set the octal permission of '{0}' on directory '{1}'".format(oct(chmod_directory), dirName), section)
                    core.rchmod(dirName, chmod_directory)
            else:
                logger.error("FAILED: Transcoding failed for files in {0}".format(dirName), section)
                return [1, "{0}: Failed to post-process - Transcoding failed".format(section)]

        # configure SB params to pass
        fork_params['quiet'] = 1
        fork_params['proc_type'] = 'manual'
        if inputName is not None:
            fork_params['nzbName'] = inputName

        for param in copy.copy(fork_params):
            if param == "failed":
                fork_params[param] = failed
                del fork_params['proc_type']
                if "type" in fork_params:
                    del fork_params['type']

            if param == "return_data":
                fork_params[param] = 0
                del fork_params['quiet']

            if param == "type":
                fork_params[param] = 'manual'
                if "proc_type" in fork_params:
                    del fork_params['proc_type']

            if param in ["dirName", "dir", "proc_dir", "process_directory", "path"]:
                fork_params[param] = dirName
                if remote_path:
                    fork_params[param] = remoteDir(dirName)

            if param == "process_method":
                if process_method:
                    fork_params[param] = process_method
                else:
                    del fork_params[param]

            if param in ["force", "force_replace"]:
                if force:
                    fork_params[param] = force
                else:
                    del fork_params[param]

            if param in ["delete_on", "delete"]:
                if delete_on:
                    fork_params[param] = delete_on
                else:
                    del fork_params[param]

            if param == "ignore_subs":
                if ignore_subs:
                    fork_params[param] = ignore_subs
                else:
                    del fork_params[param]

            if param == "force_next":
                fork_params[param] = 1

        # delete any unused params so we don't pass them to SB by mistake
        [fork_params.pop(k) for k, v in fork_params.items() if v is None]

        if status == 0:
            if section == "NzbDrone" and not apikey:
                logger.info('No Sonarr apikey entered. Processing completed.')
                return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
            logger.postprocess("SUCCESS: The download succeeded, sending a post-process request", section)
        else:
            core.FAILED = True
            if failureLink:
                reportNzb(failureLink, clientAgent)
            if 'failed' in fork_params:
                logger.postprocess("FAILED: The download failed. Sending 'failed' process request to {0} branch".format(fork), section)
            elif section == "NzbDrone":
                logger.postprocess("FAILED: The download failed. Sending failed download to {0} for CDH processing".format(fork), section)
                return [1, "{0}: Download Failed. Sending back to {1}".format(section, section)]  # Return as failed to flag this in the downloader.
            else:
                logger.postprocess("FAILED: The download failed. {0} branch does not handle failed downloads. Nothing to process".format(fork), section)
                if delete_failed and os.path.isdir(dirName) and not os.path.dirname(dirName) == dirName:
                    logger.postprocess("Deleting failed files and folder {0}".format(dirName), section)
                    rmDir(dirName)
                return [1, "{0}: Failed to post-process. {1} does not support failed downloads".format(section, section)]  # Return as failed to flag this in the downloader.

        url = None
        if section == "SickBeard":
            if apikey:
                url = "{0}{1}:{2}{3}/api/{4}/?cmd=postprocess".format(protocol, host, port, web_root, apikey)
            else:
                url = "{0}{1}:{2}{3}/home/postprocess/processEpisode".format(protocol, host, port, web_root)
        elif section == "NzbDrone":
            url = "{0}{1}:{2}{3}/api/command".format(protocol, host, port, web_root)
            url2 = "{0}{1}:{2}{3}/api/config/downloadClient".format(protocol, host, port, web_root)
            headers = {"X-Api-Key": apikey}
            # params = {'sortKey': 'series.title', 'page': 1, 'pageSize': 1, 'sortDir': 'asc'}
            if remote_path:
                logger.debug("remote_path: {0}".format(remoteDir(dirName)), section)
                data = {"name": "DownloadedEpisodesScan", "path": remoteDir(dirName), "downloadClientId": download_id, "importMode": importMode}
            else:
                logger.debug("path: {0}".format(dirName), section)
                data = {"name": "DownloadedEpisodesScan", "path": dirName, "downloadClientId": download_id, "importMode": importMode}
            if not download_id:
                data.pop("downloadClientId")
            data = json.dumps(data)

        try:
            if section == "SickBeard":
                logger.debug("Opening URL: {0} with params: {1}".format(url, fork_params), section)
                s = requests.Session()
                if not apikey and username and password:
                    login = "******".format(protocol, host, port, web_root)
                    login_params = {'username': username, 'password': password}
                    r = s.get(login, verify=False, timeout=(30,60))
                    if r.status_code == 401 and r.cookies.get('_xsrf'):
                        login_params['_xsrf'] = r.cookies.get('_xsrf')
                    s.post(login, data=login_params, stream=True, verify=False, timeout=(30, 60))
                r = s.get(url, auth=(username, password), params=fork_params, stream=True, verify=False, timeout=(30, 1800))
            elif section == "NzbDrone":
                logger.debug("Opening URL: {0} with data: {1}".format(url, data), section)
                r = requests.post(url, data=data, headers=headers, stream=True, verify=False, timeout=(30, 1800))
        except requests.ConnectionError:
            logger.error("Unable to open URL: {0}".format(url), section)
            return [1, "{0}: Failed to post-process - Unable to connect to {1}".format(section, section)]

        if r.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
            logger.error("Server returned status {0}".format(r.status_code), section)
            return [1, "{0}: Failed to post-process - Server returned status {1}".format(section, r.status_code)]

        Success = False
        Queued = False
        Started = False
        if section == "SickBeard":
            if apikey:
                if r.json()['result'] == 'success':
                    Success = True
            else:
                for line in r.iter_lines():
                    if line:
                        logger.postprocess("{0}".format(line), section)
                        if "Moving file from" in line:
                            inputName = os.path.split(line)[1]
                        if "added to the queue" in line:
                            Queued = True
                        if "Processing succeeded" in line or "Successfully processed" in line:
                            Success = True

            if Queued:
                time.sleep(60)
        elif section == "NzbDrone":
            try:
                res = json.loads(r.content)
                scan_id = int(res['id'])
                logger.debug("Scan started with id: {0}".format(scan_id), section)
                Started = True
            except Exception as e:
                logger.warning("No scan id was returned due to: {0}".format(e), section)
                scan_id = None
                Started = False

        if status != 0 and delete_failed and not os.path.dirname(dirName) == dirName:
            logger.postprocess("Deleting failed files and folder {0}".format(dirName), section)
            rmDir(dirName)

        if Success:
            return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
        elif section == "NzbDrone" and Started:
            n = 0
            params = {}
            url = "{0}/{1}".format(url, scan_id)
            while n < 6:  # set up wait_for minutes to see if command completes..
                time.sleep(10 * wait_for)
                command_status = self.command_complete(url, params, headers, section)
                if command_status and command_status in ['completed', 'failed']:
                    break
                n += 1
            if command_status:
                logger.debug("The Scan command return status: {0}".format(command_status), section)
            if not os.path.exists(dirName):
                logger.debug("The directory {0} has been removed. Renaming was successful.".format(dirName), section)
                return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
            elif command_status and command_status in ['completed']:
                logger.debug("The Scan command has completed successfully. Renaming was successful.", section)
                return [0, "{0}: Successfully post-processed {1}".format(section, inputName)]
            elif command_status and command_status in ['failed']:
                logger.debug("The Scan command has failed. Renaming was not successful.", section)
                # return [1, "%s: Failed to post-process %s" % (section, inputName) ]
            if self.CDH(url2, headers, section=section):
                logger.debug("The Scan command did not return status completed, but complete Download Handling is enabled. Passing back to {0}.".format(section), section)
                return [status, "{0}: Complete DownLoad Handling is enabled. Passing back to {1}".format(section, section)]
            else:
                logger.warning("The Scan command did not return a valid status. Renaming was not successful.", section)
                return [1, "{0}: Failed to post-process {1}".format(section, inputName)]
        else:
            return [1, "{0}: Failed to post-process - Returned log from {1} was not as expected.".format(section, section)]  # We did not receive Success confirmation.
Exemplo n.º 55
0
    def process(self, section, dirName, inputName=None, status=0, clientAgent="manual", inputCategory=None):
        status = int(status)

        host = core.CFG[section][inputCategory]["host"]
        port = core.CFG[section][inputCategory]["port"]
        apikey = core.CFG[section][inputCategory]["apikey"]
        wait_for = int(core.CFG[section][inputCategory]["wait_for"])

        try:
            ssl = int(core.CFG[section][inputCategory]["ssl"])
        except:
            ssl = 0
        try:
            web_root = core.CFG[section][inputCategory]["web_root"]
        except:
            web_root = ""
        try:
            remote_path = int(core.CFG[section][inputCategory]["remote_path"])
        except:
            remote_path = 0
        try:
            extract = int(section[inputCategory]["extract"])
        except:
            extract = 0

        if ssl:
            protocol = "https://"
        else:
            protocol = "http://"


        url = "%s%s:%s%s/api" % (protocol,host,port,web_root)
        if not server_responding(url):
            logger.error("Server did not respond. Exiting", section)
            return [1, "%s: Failed to post-process - %s did not respond." % (section, section) ]

        if not os.path.isdir(dirName) and os.path.isfile(dirName): # If the input directory is a file, assume single file download and split dir/name.
            dirName = os.path.split(os.path.normpath(dirName))[0]

        SpecificPath = os.path.join(dirName, str(inputName))
        cleanName = os.path.splitext(SpecificPath)
        if cleanName[1] == ".nzb":
            SpecificPath = cleanName[0]
        if os.path.isdir(SpecificPath):
            dirName = SpecificPath

        process_all_exceptions(inputName, dirName)
        inputName, dirName = convert_to_ascii(inputName, dirName)

        if not listMediaFiles(dirName, media=False, audio=True, meta=False, archives=False) and listMediaFiles(dirName, media=False, audio=False, meta=False, archives=True) and extract:
            logger.debug('Checking for archives to extract in directory: %s' % (dirName))
            core.extractFiles(dirName)
            inputName, dirName = convert_to_ascii(inputName, dirName)

        if listMediaFiles(dirName, media=False, audio=True, meta=False, archives=False) and status:
            logger.info("Status shown as failed from Downloader, but %s valid video files found. Setting as successful." % (str(good_files)), section)
            status = 0

        if status == 0:

            params = {}
            params['apikey'] = apikey
            params['cmd'] = "forceProcess"

            params['dir'] = os.path.dirname(dirName)
            if remote_path:
                params['dir'] = remoteDir(os.path.dirname(dirName))

            release_status = self.get_status(url, apikey, dirName)
            if not release_status:
                logger.error("Could not find a status for %s, is it in the wanted list ?" % (inputName),section)

            logger.debug("Opening URL: %s with PARAMS: %s" % (url, params), section)

            try:
                r = requests.get(url, params=params, verify=False, timeout=(30, 600))
            except requests.ConnectionError:
                logger.error("Unable to open URL %s" % (url) ,section)
                return [1, "%s: Failed to post-process - Unable to connect to %s" % (section, section) ]

            logger.debug("Result: %s" % (r.text),section)

            if not r.status_code in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
                logger.error("Server returned status %s" % (str(r.status_code)), section)
                return [1, "%s: Failed to post-process - Server returned status %s" % (section, str(r.status_code)) ]
            elif r.text == "OK":
                logger.postprocess("SUCCESS: Post-Processing started for %s in folder %s ..." % (inputName, dirName),section) 
            else:
                logger.error("FAILED: Post-Processing has NOT started for %s in folder %s. exiting!" % (inputName, dirName),section)
                return [1, "%s: Failed to post-process - Returned log from %s was not as expected." % (section, section) ]

        else:
            logger.warning("FAILED DOWNLOAD DETECTED", section)
            return [1, "%s: Failed to post-process. %s does not support failed downloads" % (section, section) ]

        # we will now wait for this album to be processed before returning to TorrentToMedia and unpausing.
        timeout = time.time() + 60 * wait_for
        while (time.time() < timeout):
            current_status = self.get_status(url, apikey, dirName)
            if current_status is not None and current_status != release_status:  # Something has changed. CPS must have processed this movie.
                logger.postprocess("SUCCESS: This release is now marked as status [%s]" % (current_status),section)
                return [0, "%s: Successfully post-processed %s" % (section, inputName) ]
            if not os.path.isdir(dirName):
                logger.postprocess("SUCCESS: The input directory %s has been removed Processing must have finished." % (dirName),section)
                return [0, "%s: Successfully post-processed %s" % (section, inputName) ]
            time.sleep(10 * wait_for)

        # The status hasn't changed. uTorrent can resume seeding now.
        logger.warning("The music album does not appear to have changed status after %s minutes. Please check your Logs" % (wait_for),section)
        return [1, "%s: Failed to post-process - No change in wanted status" % (section) ]
Exemplo n.º 56
0
    def process(self, section, dirName, inputName=None, status=0, clientAgent="manual", download_id="", inputCategory=None, failureLink=None):

        host = core.CFG[section][inputCategory]["host"]
        port = core.CFG[section][inputCategory]["port"]
        apikey = core.CFG[section][inputCategory]["apikey"]
        method = core.CFG[section][inputCategory]["method"]
        delete_failed = int(core.CFG[section][inputCategory]["delete_failed"])
        wait_for = int(core.CFG[section][inputCategory]["wait_for"])

        try:
            ssl = int(core.CFG[section][inputCategory]["ssl"])
        except:
            ssl = 0
        try:
            web_root = core.CFG[section][inputCategory]["web_root"]
        except:
            web_root = ""
        try:
            remote_path = int(core.CFG[section][inputCategory]["remote_path"])
        except:
            remote_path = 0
        try:
            extract = int(section[inputCategory]["extract"])
        except:
            extract = 0

        if ssl:
            protocol = "https://"
        else:
            protocol = "http://"

        baseURL = "%s%s:%s%s/api/%s" % (protocol, host, port, web_root, apikey)
        if not server_responding(baseURL):
            logger.error("Server did not respond. Exiting", section)
            return [1, "%s: Failed to post-process - %s did not respond." % (section, section) ]

        imdbid = find_imdbid(dirName, inputName)
        release = self.get_release(baseURL, imdbid, download_id)

        # pull info from release found if available
        release_id = None
        media_id = None
        downloader = None
        release_status_old = None
        if release and imdbid:
            try:
                release_id = release.keys()[0]
                media_id = release[release_id]['media_id']
                download_id = release[release_id]['download_info']['id']
                downloader = release[release_id]['download_info']['downloader']
                release_status_old = release[release_id]['status']
            except:
                pass

        if not os.path.isdir(dirName) and os.path.isfile(dirName): # If the input directory is a file, assume single file download and split dir/name.
            dirName = os.path.split(os.path.normpath(dirName))[0]

        SpecificPath = os.path.join(dirName, str(inputName))
        cleanName = os.path.splitext(SpecificPath)
        if cleanName[1] == ".nzb":
            SpecificPath = cleanName[0]
        if os.path.isdir(SpecificPath):
            dirName = SpecificPath

        process_all_exceptions(inputName, dirName)
        inputName, dirName = convert_to_ascii(inputName, dirName)

        if not listMediaFiles(dirName, media=True, audio=False, meta=False, archives=False) and listMediaFiles(dirName, media=False, audio=False, meta=False, archives=True) and extract:
            logger.debug('Checking for archives to extract in directory: %s' % (dirName))
            core.extractFiles(dirName)
            inputName, dirName = convert_to_ascii(inputName, dirName)

        good_files = 0
        num_files = 0
        # Check video files for corruption
        status = int(status)
        for video in listMediaFiles(dirName, media=True, audio=False, meta=False, archives=False):
            num_files += 1
            if transcoder.isVideoGood(video, status):
                import_subs(video)
                good_files += 1
        if num_files > 0 and good_files == num_files:
            if status:
                logger.info("Status shown as failed from Downloader, but %s valid video files found. Setting as success." % (str(good_files)), section)
                status = 0
        elif num_files > 0 and good_files < num_files:
            logger.info("Status shown as success from Downloader, but corrupt video files found. Setting as failed.", section)
            if os.environ.has_key('NZBOP_VERSION') and os.environ['NZBOP_VERSION'][0:5] >= '14.0':
                print('[NZB] MARK=BAD')
            if failureLink:
                failureLink = failureLink + '&corrupt=true'
            status = 1
        elif clientAgent == "manual":
            logger.warning("No media files found in directory %s to manually process." % (dirName), section)
            return [0, ""]  # Success (as far as this script is concerned)
        else:
            logger.warning("No media files found in directory %s. Processing this as a failed download" % (dirName), section)
            status = 1
            if os.environ.has_key('NZBOP_VERSION') and os.environ['NZBOP_VERSION'][0:5] >= '14.0':
                print('[NZB] MARK=BAD')

        if status == 0:
            if core.TRANSCODE == 1:
                result, newDirName = transcoder.Transcode_directory(dirName)
                if result == 0:
                    logger.debug("Transcoding succeeded for files in %s" % (dirName), section)
                    dirName = newDirName
                else:
                    logger.error("Transcoding failed for files in %s" % (dirName), section)
                    return [1, "%s: Failed to post-process - Transcoding failed" % (section) ]
            for video in listMediaFiles(dirName, media=True, audio=False, meta=False, archives=False): 
                if not release and not ".cp(tt" in video and imdbid:
                    videoName, videoExt = os.path.splitext(video)
                    video2 = "%s.cp(%s)%s" % (videoName, imdbid, videoExt)
                    logger.debug('Renaming: %s to: %s' % (video, video2))
                    os.rename(video, video2)        

            params = {}
            if download_id:
                params['downloader'] = downloader or clientAgent
                params['download_id'] = download_id

            params['media_folder'] = dirName
            if remote_path:
                params['media_folder'] = remoteDir(dirName)

            if method == "manage":
                command = "/manage.update"
                params = {}
            else:
                command = "/renamer.scan"

            url = "%s%s" % (baseURL, command)

            logger.debug("Opening URL: %s with PARAMS: %s" % (url, params), section)

            logger.postprocess("Starting %s scan for %s" % (method, inputName), section)

            try:
                r = requests.get(url, params=params, verify=False, timeout=(30, 1800))
            except requests.ConnectionError:
                logger.error("Unable to open URL", section)
                return [1, "%s: Failed to post-process - Unable to connect to %s" % (section, section) ]

            result = r.json()
            if not r.status_code in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
                logger.error("Server returned status %s" % (str(r.status_code)), section)
                return [1, "%s: Failed to post-process - Server returned status %s" % (section, str(r.status_code)) ]
            elif result['success']:
                logger.postprocess("SUCCESS: Finished %s scan for folder %s" % (method, dirName), section)
                if method == "manage":
                    return [0, "%s: Successfully post-processed %s" % (section, inputName) ]
            else:
                logger.error("FAILED: %s scan was unable to finish for folder %s. exiting!" % (method, dirName),
                             section)
                return [1, "%s: Failed to post-process - Server did not return success" % (section) ]

        else:
            logger.postprocess("FAILED DOWNLOAD DETECTED FOR %s" % (inputName), section)
            if failureLink:
                reportNzb(failureLink, clientAgent)

            if delete_failed and os.path.isdir(dirName) and not os.path.dirname(dirName) == dirName:
                logger.postprocess("Deleting failed files and folder %s" % dirName, section)
                rmDir(dirName)

            if not release_id and not media_id:
                logger.error("Could not find a downloaded movie in the database matching %s, exiting!" % inputName,
                             section)
                return [1, "%s: Failed to post-process - Failed download not found in %s" % (section, section) ]

            if release_id:
                logger.postprocess("Setting failed release %s to ignored ..." % (inputName), section)

                url = baseURL + "/release.ignore"
                params = {'id': release_id}

                logger.debug("Opening URL: %s with PARAMS: %s" % (url, params), section)

                try:
                    r = requests.get(url, params=params, verify=False, timeout=(30, 120))
                except requests.ConnectionError:
                    logger.error("Unable to open URL %s" % (url), section)
                    return [1, "%s: Failed to post-process - Unable to connect to %s" % (section, section) ]

                result = r.json()
                if not r.status_code in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
                    logger.error("Server returned status %s" % (str(r.status_code)), section)
                    return [1, "%s: Failed to post-process - Server returned status %s" % (section, str(r.status_code)) ]
                elif result['success']:
                    logger.postprocess("SUCCESS: %s has been set to ignored ..." % (inputName), section)
                else:
                    logger.warning("FAILED: Unable to set %s to ignored!" % (inputName), section)
                    return [1, "%s: Failed to post-process - Unable to set %s to ignored" % (section, inputName) ]

            logger.postprocess("Trying to snatch the next highest ranked release.", section)

            url = "%s/movie.searcher.try_next" % (baseURL)
            logger.debug("Opening URL: %s" % (url), section)

            try:
                r = requests.get(url, params={'media_id': media_id}, verify=False, timeout=(30, 600))
            except requests.ConnectionError:
                logger.error("Unable to open URL %s" % (url), section)
                return [1, "%s: Failed to post-process - Unable to connect to %s" % (section, section) ]

            result = r.json()
            if not r.status_code in [requests.codes.ok, requests.codes.created, requests.codes.accepted]:
                logger.error("Server returned status %s" % (str(r.status_code)), section)
                return [1, "%s: Failed to post-process - Server returned status %s" % (section, str(r.status_code)) ]
            elif result['success']:
                logger.postprocess("SUCCESS: Snatched the next highest release ...", section)
                return [0, "%s: Successfully snatched next highest release" % (section) ]
            else:
                logger.postprocess("SUCCESS: Unable to find a new release to snatch now. CP will keep searching!", section)
                return [0, "%s: No new release found now. %s will keep searching" % (section, section) ]

        # Added a releease that was not in the wanted list so confirm rename successful by finding this movie media.list.
        if not release:
            download_id = None  # we don't want to filter new releases based on this.

        # we will now check to see if CPS has finished renaming before returning to TorrentToMedia and unpausing.
        timeout = time.time() + 60 * wait_for
        while (time.time() < timeout):  # only wait 2 (default) minutes, then return.
            logger.postprocess("Checking for status change, please stand by ...", section)
            release = self.get_release(baseURL, imdbid, download_id, release_id)
            if release:
                try:
                    if release_id is None and release_status_old is None:  # we didn't have a release before, but now we do.
                        logger.postprocess("SUCCESS: Movie %s has now been added to CouchPotato" % (imdbid), section)
                        return [0, "%s: Successfully post-processed %s" % (section, inputName) ]

                    release_status_new = release[release_id]['status']
                    if release_status_new != release_status_old:
                        logger.postprocess("SUCCESS: Release %s has now been marked with a status of [%s]" % (
                            inputName, str(release_status_new).upper()), section)
                        return [0, "%s: Successfully post-processed %s" % (section, inputName) ]
                except:
                    pass
            if not os.path.isdir(dirName):
                logger.postprocess("SUCCESS: Input Directory [%s] has been processed and removed" % (
                    dirName), section)
                return [0, "%s: Successfully post-processed %s" % (section, inputName) ]

            elif not listMediaFiles(dirName, media=True, audio=False, meta=False, archives=True):
                logger.postprocess("SUCCESS: Input Directory [%s] has no remaining media files. This has been fully processed." % (
                    dirName), section)
                return [0, "%s: Successfully post-processed %s" % (section, inputName) ]

            # pause and let CouchPotatoServer catch its breath
            time.sleep(10 * wait_for)

        # The status hasn't changed. we have waited 2 minutes which is more than enough. uTorrent can resule seeding now.
        logger.warning(
            "%s does not appear to have changed status after %s minutes, Please check your logs." % (inputName, wait_for),
            section)
        return [1, "%s: Failed to post-process - No change in status" % (section) ]
Exemplo n.º 57
0
def autoFork(section, inputCategory):
    # auto-detect correct section
    # config settings
    try:
        host = core.CFG[section][inputCategory]["host"]
        port = core.CFG[section][inputCategory]["port"]
    except:
        host = None
        port = None

    try:
        username = core.CFG[section][inputCategory]["username"]
        password = core.CFG[section][inputCategory]["password"]
    except:
        username = None
        password = None

    try:
        apikey = core.CFG[section][inputCategory]["apikey"]
    except:
        apikey = None

    try:
        ssl = int(core.CFG[section][inputCategory]["ssl"])
    except:
        ssl = 0

    try:
        web_root = core.CFG[section][inputCategory]["web_root"]
    except:
        web_root = ""

    try:
        fork = core.FORKS.items()[core.FORKS.keys().index(core.CFG[section][inputCategory]["fork"])]
    except:
        fork = "auto"

    if ssl:
        protocol = "https://"
    else:
        protocol = "http://"

    detected = False
    if section == "NzbDrone":
        logger.info("Attempting to verify %s fork" % inputCategory)
        url = "%s%s:%s%s/api/rootfolder" % (protocol, host, port, web_root)
        headers = {"X-Api-Key": apikey}
        try:
            r = requests.get(url, headers=headers, stream=True, verify=False)
        except requests.ConnectionError:
            logger.warning("Could not connect to %s:%s to verify fork!" % (section, inputCategory))

        if not r.ok:
            logger.warning("Connection to %s:%s failed! Check your configuration" % (section, inputCategory))

        fork = ["default", {}]

    elif fork == "auto":
        params = core.ALL_FORKS
        rem_params = []
        logger.info("Attempting to auto-detect %s fork" % inputCategory)
        # define the order to test. Default must be first since the default fork doesn't reject parameters.
        # then in order of most unique parameters.
        url = "%s%s:%s%s/home/postprocess/" % (protocol, host, port, web_root)
        # attempting to auto-detect fork
        try:
            if username and password:
                s = requests.Session()
                login = "******" % (protocol, host, port, web_root)
                login_params = {"username": username, "password": password}
                s.post(login, data=login_params, stream=True, verify=False)
                r = s.get(url, auth=(username, password), verify=False)
            else:
                r = requests.get(url, verify=False)
        except requests.ConnectionError:
            logger.info("Could not connect to %s:%s to perform auto-fork detection!" % (section, inputCategory))
            r = []
        if r and r.ok:
            for param in params:
                if not 'name="%s"' % (param) in r.text:
                    rem_params.append(param)
            for param in rem_params:
                params.pop(param)
            for fork in sorted(core.FORKS.iteritems(), reverse=False):
                if params == fork[1]:
                    detected = True
                    break
        if detected:
            logger.info("%s:%s fork auto-detection successful ..." % (section, inputCategory))
        elif rem_params:
            logger.info("%s:%s fork auto-detection found custom params %s" % (section, inputCategory, params))
            fork = ["custom", params]
        else:
            logger.info("%s:%s fork auto-detection failed" % (section, inputCategory))
            fork = core.FORKS.items()[core.FORKS.keys().index(core.FORK_DEFAULT)]

    logger.info("%s:%s fork set to %s" % (section, inputCategory, fork[0]))
    return fork[0], fork[1]
Exemplo n.º 58
0
def initialize(section=None):
    global NZBGET_POSTPROCESS_ERROR, NZBGET_POSTPROCESS_NONE, NZBGET_POSTPROCESS_PARCHECK, NZBGET_POSTPROCESS_SUCCESS, \
        NZBTOMEDIA_TIMEOUT, FORKS, FORK_DEFAULT, FORK_FAILED_TORRENT, FORK_FAILED, NOEXTRACTFAILED, \
        NZBTOMEDIA_BRANCH, NZBTOMEDIA_VERSION, NEWEST_VERSION, NEWEST_VERSION_STRING, VERSION_NOTIFY, SYS_ARGV, CFG, \
        SABNZB_NO_OF_ARGUMENTS, SABNZB_0717_NO_OF_ARGUMENTS, CATEGORIES, TORRENT_CLIENTAGENT, USELINK, OUTPUTDIRECTORY, \
        NOFLATTEN, UTORRENTPWD, UTORRENTUSR, UTORRENTWEBUI, DELUGEHOST, DELUGEPORT, DELUGEUSR, DELUGEPWD, VLEVEL, \
        TRANSMISSIONHOST, TRANSMISSIONPORT, TRANSMISSIONPWD, TRANSMISSIONUSR, COMPRESSEDCONTAINER, MEDIACONTAINER, \
        METACONTAINER, SECTIONS, ALL_FORKS, TEST_FILE, GENERALOPTS, LOG_GIT, GROUPS, SEVENZIP, CONCAT, VCRF, \
        __INITIALIZED__, AUTO_UPDATE, APP_FILENAME, USER_DELAY, APP_NAME, TRANSCODE, DEFAULTS, GIT_PATH, GIT_USER, \
        GIT_BRANCH, GIT_REPO, SYS_ENCODING, NZB_CLIENTAGENT, SABNZBDHOST, SABNZBDPORT, SABNZBDAPIKEY, \
        DUPLICATE, IGNOREEXTENSIONS, VEXTENSION, OUTPUTVIDEOPATH, PROCESSOUTPUT, VCODEC, VCODEC_ALLOW, VPRESET, \
        VFRAMERATE, LOG_DB, VBITRATE, VRESOLUTION, ALANGUAGE, AINCLUDE, ACODEC, ACODEC_ALLOW, ABITRATE, FAILED, \
        ACODEC2, ACODEC2_ALLOW, ABITRATE2, ACODEC3, ACODEC3_ALLOW, ABITRATE3, ALLOWSUBS, SEXTRACT, SEMBED, SLANGUAGES, \
        SINCLUDE, SUBSDIR, SCODEC, OUTPUTFASTSTART, OUTPUTQUALITYPERCENT, BURN, GETSUBS, HWACCEL, LOG_DIR, LOG_FILE, \
        NICENESS, LOG_DEBUG, FORCE_CLEAN, FFMPEG_PATH, FFMPEG, FFPROBE, AUDIOCONTAINER, EXTCONTAINER, TORRENT_CLASS, \
        DELETE_ORIGINAL, TORRENT_CHMOD_DIRECTORY, PASSWORDSFILE, USER_DELAY, USER_SCRIPT, USER_SCRIPT_CLEAN, USER_SCRIPT_MEDIAEXTENSIONS, \
        USER_SCRIPT_PARAM, USER_SCRIPT_RUNONCE, USER_SCRIPT_SUCCESSCODES, DOWNLOADINFO, CHECK_MEDIA, SAFE_MODE, \
        TORRENT_DEFAULTDIR, TORRENT_RESUME_ON_FAILURE, NZB_DEFAULTDIR, REMOTEPATHS, LOG_ENV, PID_FILE, MYAPP, ACHANNELS, ACHANNELS2, ACHANNELS3, \
        PLEXSSL, PLEXHOST, PLEXPORT, PLEXTOKEN, PLEXSEC, TORRENT_RESUME

    if __INITIALIZED__:
        return False

    if 'NTM_LOGFILE' in os.environ:
        LOG_FILE = os.environ['NTM_LOGFILE']
        LOG_DIR = os.path.split(LOG_FILE)[0]

    if not makeDir(LOG_DIR):
        print("No log folder, logging to screen only")

    MYAPP = RunningProcess()
    while MYAPP.alreadyrunning():
        print("Waiting for existing session to end")
        time.sleep(30)

    try:
        locale.setlocale(locale.LC_ALL, "")
        SYS_ENCODING = locale.getpreferredencoding()
    except (locale.Error, IOError):
        pass

    # For OSes that are poorly configured I'll just randomly force UTF-8
    if not SYS_ENCODING or SYS_ENCODING in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'):
        SYS_ENCODING = 'UTF-8'

    if not hasattr(sys, "setdefaultencoding"):
        reload_module(sys)

    try:
        # pylint: disable=E1101
        # On non-unicode builds this will raise an AttributeError, if encoding type is not valid it throws a LookupError
        sys.setdefaultencoding(SYS_ENCODING)
    except:
        print('Sorry, you MUST add the nzbToMedia folder to the PYTHONPATH environment variable'
              '\nor find another way to force Python to use {codec} for string encoding.'.format
              (codec=SYS_ENCODING))
        if 'NZBOP_SCRIPTDIR' in os.environ:
            sys.exit(NZBGET_POSTPROCESS_ERROR)
        else:
            sys.exit(1)

    # init logging
    logger.ntm_log_instance.initLogging()

    # run migrate to convert old cfg to new style cfg plus fix any cfg missing values/options.
    if not config.migrate():
        logger.error("Unable to migrate config file {0}, exiting ...".format(CONFIG_FILE))
        if 'NZBOP_SCRIPTDIR' in os.environ:
            pass  # We will try and read config from Environment.
        else:
            sys.exit(-1)

    # run migrate to convert NzbGet data from old cfg style to new cfg style
    if 'NZBOP_SCRIPTDIR' in os.environ:
        CFG = config.addnzbget()

    else:  # load newly migrated config
        logger.info("Loading config from [{0}]".format(CONFIG_FILE))
        CFG = config()

    # Enable/Disable DEBUG Logging
    LOG_DEBUG = int(CFG['General']['log_debug'])
    LOG_DB = int(CFG['General']['log_db'])
    LOG_ENV = int(CFG['General']['log_env'])
    LOG_GIT = int(CFG['General']['log_git'])

    if LOG_ENV:
        for item in os.environ:
            logger.info("{0}: {1}".format(item, os.environ[item]), "ENVIRONMENT")

    # initialize the main SB database
    nzbToMediaDB.upgradeDatabase(nzbToMediaDB.DBConnection(), mainDB.InitialSchema)

    # Set Version and GIT variables
    NZBTOMEDIA_VERSION = '11.02'
    VERSION_NOTIFY = int(CFG['General']['version_notify'])
    AUTO_UPDATE = int(CFG['General']['auto_update'])
    GIT_REPO = 'nzbToMedia'
    GIT_PATH = CFG['General']['git_path']
    GIT_USER = CFG['General']['git_user'] or 'clinton-hall'
    GIT_BRANCH = CFG['General']['git_branch'] or 'master'
    FORCE_CLEAN = int(CFG["General"]["force_clean"])
    FFMPEG_PATH = CFG["General"]["ffmpeg_path"]
    CHECK_MEDIA = int(CFG["General"]["check_media"])
    SAFE_MODE = int(CFG["General"]["safe_mode"])
    NOEXTRACTFAILED = int(CFG["General"]["no_extract_failed"])

    # Check for updates via GitHUB
    if versionCheck.CheckVersion().check_for_new_version():
        if AUTO_UPDATE == 1:
            logger.info("Auto-Updating nzbToMedia, Please wait ...")
            updated = versionCheck.CheckVersion().update()
            if updated:
                # restart nzbToMedia
                try:
                    del MYAPP
                except:
                    pass
                restart()
            else:
                logger.error("Update wasn't successful, not restarting. Check your log for more information.")

    # Set Current Version
    logger.info('nzbToMedia Version:{version} Branch:{branch} ({system} {release})'.format
                (version=NZBTOMEDIA_VERSION, branch=GIT_BRANCH,
                 system=platform.system(), release=platform.release()))

    if int(CFG["WakeOnLan"]["wake"]) == 1:
        WakeUp()

    NZB_CLIENTAGENT = CFG["Nzb"]["clientAgent"]  # sabnzbd
    SABNZBDHOST = CFG["Nzb"]["sabnzbd_host"]
    SABNZBDPORT = int(CFG["Nzb"]["sabnzbd_port"] or 8080) # defaults to accomodate NzbGet
    SABNZBDAPIKEY = CFG["Nzb"]["sabnzbd_apikey"]
    NZB_DEFAULTDIR = CFG["Nzb"]["default_downloadDirectory"]
    GROUPS = CFG["Custom"]["remove_group"]
    if isinstance(GROUPS, str):
        GROUPS = GROUPS.split(',')
    if GROUPS == ['']:
        GROUPS = None

    TORRENT_CLIENTAGENT = CFG["Torrent"]["clientAgent"]  # utorrent | deluge | transmission | rtorrent | vuze |other
    USELINK = CFG["Torrent"]["useLink"]  # no | hard | sym
    OUTPUTDIRECTORY = CFG["Torrent"]["outputDirectory"]  # /abs/path/to/complete/
    TORRENT_DEFAULTDIR = CFG["Torrent"]["default_downloadDirectory"]
    CATEGORIES = (CFG["Torrent"]["categories"])  # music,music_videos,pictures,software
    NOFLATTEN = (CFG["Torrent"]["noFlatten"])
    if isinstance(NOFLATTEN, str):
        NOFLATTEN = NOFLATTEN.split(',')
    if isinstance(CATEGORIES, str):
        CATEGORIES = CATEGORIES.split(',')
    DELETE_ORIGINAL = int(CFG["Torrent"]["deleteOriginal"])
    TORRENT_CHMOD_DIRECTORY = int(str(CFG["Torrent"]["chmodDirectory"]), 8)
    TORRENT_RESUME_ON_FAILURE = int(CFG["Torrent"]["resumeOnFailure"])
    TORRENT_RESUME = int(CFG["Torrent"]["resume"])
    UTORRENTWEBUI = CFG["Torrent"]["uTorrentWEBui"]  # http://localhost:8090/gui/
    UTORRENTUSR = CFG["Torrent"]["uTorrentUSR"]  # mysecretusr
    UTORRENTPWD = CFG["Torrent"]["uTorrentPWD"]  # mysecretpwr

    TRANSMISSIONHOST = CFG["Torrent"]["TransmissionHost"]  # localhost
    TRANSMISSIONPORT = int(CFG["Torrent"]["TransmissionPort"])
    TRANSMISSIONUSR = CFG["Torrent"]["TransmissionUSR"]  # mysecretusr
    TRANSMISSIONPWD = CFG["Torrent"]["TransmissionPWD"]  # mysecretpwr

    DELUGEHOST = CFG["Torrent"]["DelugeHost"]  # localhost
    DELUGEPORT = int(CFG["Torrent"]["DelugePort"])  # 8084
    DELUGEUSR = CFG["Torrent"]["DelugeUSR"]  # mysecretusr
    DELUGEPWD = CFG["Torrent"]["DelugePWD"]  # mysecretpwr

    REMOTEPATHS = CFG["Network"]["mount_points"] or []
    if REMOTEPATHS:
        if isinstance(REMOTEPATHS, list):
            REMOTEPATHS = ','.join(REMOTEPATHS)  # fix in case this imported as list.
        REMOTEPATHS = [tuple(item.split(',')) for item in
                       REMOTEPATHS.split('|')]  # /volume1/Public/,E:\|/volume2/share/,\\NAS\
        REMOTEPATHS = [(local.strip(), remote.strip()) for local, remote in
                       REMOTEPATHS]  # strip trailing and leading whitespaces

    PLEXSSL = int(CFG["Plex"]["plex_ssl"])
    PLEXHOST = CFG["Plex"]["plex_host"]
    PLEXPORT = CFG["Plex"]["plex_port"]
    PLEXTOKEN = CFG["Plex"]["plex_token"]
    PLEXSEC = CFG["Plex"]["plex_sections"] or []
    if PLEXSEC:
        if isinstance(PLEXSEC, list):
            PLEXSEC = ','.join(PLEXSEC)  # fix in case this imported as list.
        PLEXSEC = [tuple(item.split(',')) for item in PLEXSEC.split('|')]

    devnull = open(os.devnull, 'w')
    try:
        subprocess.Popen(["nice"], stdout=devnull, stderr=devnull).communicate()
        NICENESS.extend(['nice', '-n{0}'.format(int(CFG["Posix"]["niceness"]))])
    except:
        pass
    try:
        subprocess.Popen(["ionice"], stdout=devnull, stderr=devnull).communicate()
        try:
            NICENESS.extend(['ionice', '-c{0}'.format(int(CFG["Posix"]["ionice_class"]))])
        except:
            pass
        try:
            if 'ionice' in NICENESS:
                NICENESS.extend(['-n{0}'.format(int(CFG["Posix"]["ionice_classdata"]))])
            else:
                NICENESS.extend(['ionice', '-n{0}'.format(int(CFG["Posix"]["ionice_classdata"]))])
        except:
            pass
    except:
        pass
    devnull.close()

    COMPRESSEDCONTAINER = [re.compile('.r\d{2}$', re.I),
                           re.compile('.part\d+.rar$', re.I),
                           re.compile('.rar$', re.I)]
    COMPRESSEDCONTAINER += [re.compile('{0}$'.format(ext), re.I) for ext in CFG["Extensions"]["compressedExtensions"]]
    MEDIACONTAINER = CFG["Extensions"]["mediaExtensions"]
    AUDIOCONTAINER = CFG["Extensions"]["audioExtensions"]
    METACONTAINER = CFG["Extensions"]["metaExtensions"]  # .nfo,.sub,.srt
    if isinstance(COMPRESSEDCONTAINER, str):
        COMPRESSEDCONTAINER = COMPRESSEDCONTAINER.split(',')
    if isinstance(MEDIACONTAINER, str):
        MEDIACONTAINER = MEDIACONTAINER.split(',')
    if isinstance(AUDIOCONTAINER, str):
        AUDIOCONTAINER = AUDIOCONTAINER.split(',')
    if isinstance(METACONTAINER, str):
        METACONTAINER = METACONTAINER.split(',')

    GETSUBS = int(CFG["Transcoder"]["getSubs"])
    TRANSCODE = int(CFG["Transcoder"]["transcode"])
    DUPLICATE = int(CFG["Transcoder"]["duplicate"])
    CONCAT = int(CFG["Transcoder"]["concat"])
    IGNOREEXTENSIONS = (CFG["Transcoder"]["ignoreExtensions"])
    if isinstance(IGNOREEXTENSIONS, str):
        IGNOREEXTENSIONS = IGNOREEXTENSIONS.split(',')
    OUTPUTFASTSTART = int(CFG["Transcoder"]["outputFastStart"])
    GENERALOPTS = (CFG["Transcoder"]["generalOptions"])
    if isinstance(GENERALOPTS, str):
        GENERALOPTS = GENERALOPTS.split(',')
    if GENERALOPTS == ['']:
        GENERALOPTS = []
    if '-fflags' not in GENERALOPTS:
        GENERALOPTS.append('-fflags')
    if '+genpts' not in GENERALOPTS:
        GENERALOPTS.append('+genpts')
    try:
        OUTPUTQUALITYPERCENT = int(CFG["Transcoder"]["outputQualityPercent"])
    except:
        pass
    OUTPUTVIDEOPATH = CFG["Transcoder"]["outputVideoPath"]
    PROCESSOUTPUT = int(CFG["Transcoder"]["processOutput"])
    ALANGUAGE = CFG["Transcoder"]["audioLanguage"]
    AINCLUDE = int(CFG["Transcoder"]["allAudioLanguages"])
    SLANGUAGES = CFG["Transcoder"]["subLanguages"]
    if isinstance(SLANGUAGES, str):
        SLANGUAGES = SLANGUAGES.split(',')
    if SLANGUAGES == ['']:
        SLANGUAGES = []
    SINCLUDE = int(CFG["Transcoder"]["allSubLanguages"])
    SEXTRACT = int(CFG["Transcoder"]["extractSubs"])
    SEMBED = int(CFG["Transcoder"]["embedSubs"])
    SUBSDIR = CFG["Transcoder"]["externalSubDir"]
    VEXTENSION = CFG["Transcoder"]["outputVideoExtension"].strip()
    VCODEC = CFG["Transcoder"]["outputVideoCodec"].strip()
    VCODEC_ALLOW = CFG["Transcoder"]["VideoCodecAllow"].strip()
    if isinstance(VCODEC_ALLOW, str):
        VCODEC_ALLOW = VCODEC_ALLOW.split(',')
    if VCODEC_ALLOW == ['']:
        VCODEC_ALLOW = []
    VPRESET = CFG["Transcoder"]["outputVideoPreset"].strip()
    try:
        VFRAMERATE = float(CFG["Transcoder"]["outputVideoFramerate"].strip())
    except:
        pass
    try:
        VCRF = int(CFG["Transcoder"]["outputVideoCRF"].strip())
    except:
        pass
    try:
        VLEVEL = CFG["Transcoder"]["outputVideoLevel"].strip()
    except:
        pass
    try:
        VBITRATE = int((CFG["Transcoder"]["outputVideoBitrate"].strip()).replace('k', '000'))
    except:
        pass
    VRESOLUTION = CFG["Transcoder"]["outputVideoResolution"]
    ACODEC = CFG["Transcoder"]["outputAudioCodec"].strip()
    ACODEC_ALLOW = CFG["Transcoder"]["AudioCodecAllow"].strip()
    if isinstance(ACODEC_ALLOW, str):
        ACODEC_ALLOW = ACODEC_ALLOW.split(',')
    if ACODEC_ALLOW == ['']:
        ACODEC_ALLOW = []
    try:
        ACHANNELS = int(CFG["Transcoder"]["outputAudioChannels"].strip())
    except:
        pass
    try:
        ABITRATE = int((CFG["Transcoder"]["outputAudioBitrate"].strip()).replace('k', '000'))
    except:
        pass
    ACODEC2 = CFG["Transcoder"]["outputAudioTrack2Codec"].strip()
    ACODEC2_ALLOW = CFG["Transcoder"]["AudioCodec2Allow"].strip()
    if isinstance(ACODEC2_ALLOW, str):
        ACODEC2_ALLOW = ACODEC2_ALLOW.split(',')
    if ACODEC2_ALLOW == ['']:
        ACODEC2_ALLOW = []
    try:
        ACHANNELS2 = int(CFG["Transcoder"]["outputAudioTrack2Channels"].strip())
    except:
        pass
    try:
        ABITRATE2 = int((CFG["Transcoder"]["outputAudioTrack2Bitrate"].strip()).replace('k', '000'))
    except:
        pass
    ACODEC3 = CFG["Transcoder"]["outputAudioOtherCodec"].strip()
    ACODEC3_ALLOW = CFG["Transcoder"]["AudioOtherCodecAllow"].strip()
    if isinstance(ACODEC3_ALLOW, str):
        ACODEC3_ALLOW = ACODEC3_ALLOW.split(',')
    if ACODEC3_ALLOW == ['']:
        ACODEC3_ALLOW = []
    try:
        ACHANNELS3 = int(CFG["Transcoder"]["outputAudioOtherChannels"].strip())
    except:
        pass
    try:
        ABITRATE3 = int((CFG["Transcoder"]["outputAudioOtherBitrate"].strip()).replace('k', '000'))
    except:
        pass
    SCODEC = CFG["Transcoder"]["outputSubtitleCodec"].strip()
    BURN = int(CFG["Transcoder"]["burnInSubtitle"].strip())
    DEFAULTS = CFG["Transcoder"]["outputDefault"].strip()
    HWACCEL = int(CFG["Transcoder"]["hwAccel"])

    allow_subs = ['.mkv', '.mp4', '.m4v', 'asf', 'wma', 'wmv']
    codec_alias = {
        'libx264': ['libx264', 'h264', 'h.264', 'AVC', 'MPEG-4'],
        'libmp3lame': ['libmp3lame', 'mp3'],
        'libfaac': ['libfaac', 'aac', 'faac']
    }
    transcode_defaults = {
        'iPad':{
            'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
            'VRESOLUTION':None,'VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
            'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':None, 'ACHANNELS':2,
            'ACODEC2':'ac3','ACODEC2_ALLOW':['ac3'],'ABITRATE2':None, 'ACHANNELS2':6,
            'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
            'SCODEC':'mov_text'
            },
        'iPad-1080p':{
            'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
            'VRESOLUTION':'1920:1080','VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
            'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':None, 'ACHANNELS':2,
            'ACODEC2':'ac3','ACODEC2_ALLOW':['ac3'],'ABITRATE2':None, 'ACHANNELS2':6,
            'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
            'SCODEC':'mov_text'
            },
        'iPad-720p':{
            'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
            'VRESOLUTION':'1280:720','VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
            'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':None, 'ACHANNELS':2,
            'ACODEC2':'ac3','ACODEC2_ALLOW':['ac3'],'ABITRATE2':None, 'ACHANNELS2':6,
            'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
            'SCODEC':'mov_text'
            },
        'Apple-TV':{
            'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
            'VRESOLUTION':'1280:720','VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
            'ACODEC':'ac3','ACODEC_ALLOW':['ac3'],'ABITRATE':None, 'ACHANNELS':6,
            'ACODEC2':'aac','ACODEC2_ALLOW':['libfaac'],'ABITRATE2':None, 'ACHANNELS2':2,
            'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
            'SCODEC':'mov_text'
            },
        'iPod':{
            'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
            'VRESOLUTION':'1280:720','VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
            'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':128000, 'ACHANNELS':2,
            'ACODEC2':None,'ACODEC2_ALLOW':[],'ABITRATE2':None, 'ACHANNELS2':None,
            'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
            'SCODEC':'mov_text'
            },
        'iPhone':{
            'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
            'VRESOLUTION':'460:320','VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
            'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':128000, 'ACHANNELS':2,
            'ACODEC2':None,'ACODEC2_ALLOW':[],'ABITRATE2':None, 'ACHANNELS2':None,
            'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
            'SCODEC':'mov_text'
            },
        'PS3':{
            'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
            'VRESOLUTION':None,'VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
            'ACODEC':'ac3','ACODEC_ALLOW':['ac3'],'ABITRATE':None, 'ACHANNELS':6,
            'ACODEC2':'aac','ACODEC2_ALLOW':['libfaac'],'ABITRATE2':None, 'ACHANNELS2':2,
            'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
            'SCODEC':'mov_text'
            },
        'xbox':{
            'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
            'VRESOLUTION':None,'VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
            'ACODEC':'ac3','ACODEC_ALLOW':['ac3'],'ABITRATE':None, 'ACHANNELS':6,
            'ACODEC2':None,'ACODEC2_ALLOW':[],'ABITRATE2':None, 'ACHANNELS2':None,
            'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
            'SCODEC':'mov_text'
            },
        'Roku-480p':{
            'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
            'VRESOLUTION':None,'VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
            'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':128000, 'ACHANNELS':2,
            'ACODEC2':'ac3','ACODEC2_ALLOW':['ac3'],'ABITRATE2':None, 'ACHANNELS2':6,
            'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
            'SCODEC':'mov_text'
            },
        'Roku-720p':{
            'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
            'VRESOLUTION':None,'VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
            'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':128000, 'ACHANNELS':2,
            'ACODEC2':'ac3','ACODEC2_ALLOW':['ac3'],'ABITRATE2':None, 'ACHANNELS2':6,
            'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
            'SCODEC':'mov_text'
            },
        'Roku-1080p':{
            'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
            'VRESOLUTION':None,'VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
            'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':160000, 'ACHANNELS':2,
            'ACODEC2':'ac3','ACODEC2_ALLOW':['ac3'],'ABITRATE2':None, 'ACHANNELS2':6,
            'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
            'SCODEC':'mov_text'
            },
        'mkv':{
            'VEXTENSION':'.mkv','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':None,'VLEVEL':None,
            'VRESOLUTION':None,'VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4', 'mpeg2video'],
            'ACODEC':'dts','ACODEC_ALLOW':['libfaac', 'dts', 'ac3', 'mp2', 'mp3'],'ABITRATE':None, 'ACHANNELS':8,
            'ACODEC2':None,'ACODEC2_ALLOW':[],'ABITRATE2':None, 'ACHANNELS2':None,
            'ACODEC3':'ac3','ACODEC3_ALLOW':['libfaac', 'dts', 'ac3', 'mp2', 'mp3'],'ABITRATE3':None, 'ACHANNELS3':8,
            'SCODEC':'mov_text'
            },
        'mp4-scene-release':{
            'VEXTENSION':'.mp4','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':None,'VCRF':19,'VLEVEL':'3.1',
            'VRESOLUTION':None,'VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4', 'mpeg2video'],
            'ACODEC':'dts','ACODEC_ALLOW':['libfaac', 'dts', 'ac3', 'mp2', 'mp3'],'ABITRATE':None, 'ACHANNELS':8,
            'ACODEC2':None,'ACODEC2_ALLOW':[],'ABITRATE2':None, 'ACHANNELS2':None,
            'ACODEC3':'ac3','ACODEC3_ALLOW':['libfaac', 'dts', 'ac3', 'mp2', 'mp3'],'ABITRATE3':None, 'ACHANNELS3':8,
            'SCODEC':'mov_text'
            },
        'MKV-SD':{
            'VEXTENSION':'.mkv','VCODEC':'libx264','VPRESET':None,'VFRAMERATE':None,'VBITRATE':'1200k','VCRF':None,'VLEVEL':None,
            'VRESOLUTION':'720:-1','VCODEC_ALLOW':['libx264', 'h264', 'h.264', 'AVC', 'avc', 'mpeg4', 'msmpeg4', 'MPEG-4'],
            'ACODEC':'aac','ACODEC_ALLOW':['libfaac'],'ABITRATE':128000, 'ACHANNELS':2,
            'ACODEC2':'ac3','ACODEC2_ALLOW':['ac3'],'ABITRATE2':None, 'ACHANNELS2':6,
            'ACODEC3':None,'ACODEC3_ALLOW':[],'ABITRATE3':None, 'ACHANNELS3':None,
            'SCODEC':'mov_text'
            }
    }
    if DEFAULTS and DEFAULTS in transcode_defaults:
        VEXTENSION = transcode_defaults[DEFAULTS]['VEXTENSION']
        VCODEC = transcode_defaults[DEFAULTS]['VCODEC']
        VPRESET = transcode_defaults[DEFAULTS]['VPRESET']
        VFRAMERATE = transcode_defaults[DEFAULTS]['VFRAMERATE']
        VBITRATE = transcode_defaults[DEFAULTS]['VBITRATE']
        VRESOLUTION = transcode_defaults[DEFAULTS]['VRESOLUTION']
        VCRF = transcode_defaults[DEFAULTS]['VCRF']
        VLEVEL = transcode_defaults[DEFAULTS]['VLEVEL']
        VCODEC_ALLOW = transcode_defaults[DEFAULTS]['VCODEC_ALLOW']
        ACODEC = transcode_defaults[DEFAULTS]['ACODEC']
        ACODEC_ALLOW = transcode_defaults[DEFAULTS]['ACODEC_ALLOW']
        ACHANNELS = transcode_defaults[DEFAULTS]['ACHANNELS']
        ABITRATE = transcode_defaults[DEFAULTS]['ABITRATE']
        ACODEC2 = transcode_defaults[DEFAULTS]['ACODEC2']
        ACODEC2_ALLOW = transcode_defaults[DEFAULTS]['ACODEC2_ALLOW']
        ACHANNELS2 = transcode_defaults[DEFAULTS]['ACHANNELS2']
        ABITRATE2 = transcode_defaults[DEFAULTS]['ABITRATE2']
        ACODEC3 = transcode_defaults[DEFAULTS]['ACODEC3']
        ACODEC3_ALLOW = transcode_defaults[DEFAULTS]['ACODEC3_ALLOW']
        ACHANNELS3 = transcode_defaults[DEFAULTS]['ACHANNELS3']
        ABITRATE3 = transcode_defaults[DEFAULTS]['ABITRATE3']
        SCODEC = transcode_defaults[DEFAULTS]['SCODEC']
    transcode_defaults = {}  # clear memory
    if transcode_defaults in ['mp4-scene-release'] and not OUTPUTQUALITYPERCENT:
        OUTPUTQUALITYPERCENT = 100

    if VEXTENSION in allow_subs:
        ALLOWSUBS = 1
    if not VCODEC_ALLOW and VCODEC:
        VCODEC_ALLOW.extend([VCODEC])
    for codec in VCODEC_ALLOW:
        if codec in codec_alias:
            extra = [item for item in codec_alias[codec] if item not in VCODEC_ALLOW]
            VCODEC_ALLOW.extend(extra)
    if not ACODEC_ALLOW and ACODEC:
        ACODEC_ALLOW.extend([ACODEC])
    for codec in ACODEC_ALLOW:
        if codec in codec_alias:
            extra = [item for item in codec_alias[codec] if item not in ACODEC_ALLOW]
            ACODEC_ALLOW.extend(extra)
    if not ACODEC2_ALLOW and ACODEC2:
        ACODEC2_ALLOW.extend([ACODEC2])
    for codec in ACODEC2_ALLOW:
        if codec in codec_alias:
            extra = [item for item in codec_alias[codec] if item not in ACODEC2_ALLOW]
            ACODEC2_ALLOW.extend(extra)
    if not ACODEC3_ALLOW and ACODEC3:
        ACODEC3_ALLOW.extend([ACODEC3])
    for codec in ACODEC3_ALLOW:
        if codec in codec_alias:
            extra = [item for item in codec_alias[codec] if item not in ACODEC3_ALLOW]
            ACODEC3_ALLOW.extend(extra)
    codec_alias = {}  # clear memory

    PASSWORDSFILE = CFG["passwords"]["PassWordFile"]

    # Setup FFMPEG, FFPROBE and SEVENZIP locations
    if platform.system() == 'Windows':
        FFMPEG = os.path.join(FFMPEG_PATH, 'ffmpeg.exe')
        FFPROBE = os.path.join(FFMPEG_PATH, 'ffprobe.exe')
        SEVENZIP = os.path.join(PROGRAM_DIR, 'core', 'extractor', 'bin', platform.machine(), '7z.exe')

        if not (os.path.isfile(FFMPEG)):  # problem
            FFMPEG = None
            logger.warning("Failed to locate ffmpeg.exe. Transcoding disabled!")
            logger.warning("Install ffmpeg with x264 support to enable this feature  ...")

        if not (os.path.isfile(FFPROBE)):
            FFPROBE = None
            if CHECK_MEDIA:
                logger.warning("Failed to locate ffprobe.exe. Video corruption detection disabled!")
                logger.warning("Install ffmpeg with x264 support to enable this feature  ...")

    else:
        try:
            SEVENZIP = subprocess.Popen(['which', '7z'], stdout=subprocess.PIPE).communicate()[0].strip()
        except:
            pass
        if not SEVENZIP:
            try:
                SEVENZIP = subprocess.Popen(['which', '7zr'], stdout=subprocess.PIPE).communicate()[0].strip()
            except:
                pass
        if not SEVENZIP:
            try:
                SEVENZIP = subprocess.Popen(['which', '7za'], stdout=subprocess.PIPE).communicate()[0].strip()
            except:
                pass
        if not SEVENZIP:
            SEVENZIP = None
            logger.warning(
                "Failed to locate 7zip. Transcosing of disk images and extraction of .7z files will not be possible!")
        if os.path.isfile(os.path.join(FFMPEG_PATH, 'ffmpeg')) or os.access(os.path.join(FFMPEG_PATH, 'ffmpeg'),
                                                                            os.X_OK):
            FFMPEG = os.path.join(FFMPEG_PATH, 'ffmpeg')
        elif os.path.isfile(os.path.join(FFMPEG_PATH, 'avconv')) or os.access(os.path.join(FFMPEG_PATH, 'avconv'),
                                                                              os.X_OK):
            FFMPEG = os.path.join(FFMPEG_PATH, 'avconv')
        else:
            try:
                FFMPEG = subprocess.Popen(['which', 'ffmpeg'], stdout=subprocess.PIPE).communicate()[0].strip()
            except:
                pass
            if not FFMPEG:
                try:
                    FFMPEG = subprocess.Popen(['which', 'avconv'], stdout=subprocess.PIPE).communicate()[0].strip()
                except:
                    pass
        if not FFMPEG:
            FFMPEG = None
            logger.warning("Failed to locate ffmpeg. Transcoding disabled!")
            logger.warning("Install ffmpeg with x264 support to enable this feature  ...")

        if os.path.isfile(os.path.join(FFMPEG_PATH, 'ffprobe')) or os.access(os.path.join(FFMPEG_PATH, 'ffprobe'),
                                                                             os.X_OK):
            FFPROBE = os.path.join(FFMPEG_PATH, 'ffprobe')
        elif os.path.isfile(os.path.join(FFMPEG_PATH, 'avprobe')) or os.access(os.path.join(FFMPEG_PATH, 'avprobe'),
                                                                               os.X_OK):
            FFPROBE = os.path.join(FFMPEG_PATH, 'avprobe')
        else:
            try:
                FFPROBE = subprocess.Popen(['which', 'ffprobe'], stdout=subprocess.PIPE).communicate()[0].strip()
            except:
                pass
            if not FFPROBE:
                try:
                    FFPROBE = subprocess.Popen(['which', 'avprobe'], stdout=subprocess.PIPE).communicate()[0].strip()
                except:
                    pass
        if not FFPROBE:
            FFPROBE = None
            if CHECK_MEDIA:
                logger.warning("Failed to locate ffprobe. Video corruption detection disabled!")
                logger.warning("Install ffmpeg with x264 support to enable this feature  ...")

    # check for script-defied section and if None set to allow sections
    SECTIONS = CFG[tuple(x for x in CFG if CFG[x].sections and CFG[x].isenabled()) if not section else (section,)]
    for section, subsections in SECTIONS.items():
        CATEGORIES.extend([subsection for subsection in subsections if CFG[section][subsection].isenabled()])
    CATEGORIES = list(set(CATEGORIES))

    # create torrent class
    TORRENT_CLASS = create_torrent_class(TORRENT_CLIENTAGENT)

    # finished initalizing
    return True