def configure():
    if False == common.is_cmd_installed("docker"):
        common.msg("System", "docker is not installed", "warn")
        return False
    common.msg("Perform ", "docker config")
    proxy_dir = '/etc/systemd/system/docker.service.d/'
    http_proxy_file = 'http-proxy.conf'
    https_proxy_file = 'https-proxy.conf'

    proxy = apps['docker']
    if True == proxy['use_proxy']:
        comment = ""
    else:
        comment = "#"

    # Data
    http_proxy_content = """[Service]
    {1}Environment="HTTP_PROXY={0}"
    """.format(proxy['http_proxy_target'], comment)

    https_proxy_content = """[Service]
    {1}Environment="HTTPS_PROXY={0}"
    """.format(proxy['https_proxy_target'], comment)

    # action
    common.create_dir(proxy_dir)
    common.create_file(proxy_dir, http_proxy_file, http_proxy_content)
    common.create_file(proxy_dir, https_proxy_file, https_proxy_content)
    call(["service", "docker", "restart"])
    call(["systemctl", "daemon-reload"])
Esempio n. 2
0
def check():

    showUnlockDialog = False

    # see if our marker is present
    match = re.search(overridemarkerPattern,
                      sys.argv[2] if len(sys.argv) > 2 else None)
    if match:
        sys.argv[2] = re.sub(overridemarkerPattern, '',
                             sys.argv[2])  #strip marker
        showUnlockDialog = True

    unlockWindow = 5 * 60
    if time.time() - getUnlockedTime() < unlockWindow:
        return  #early return, we're in unlock window so we don't wrap

    # see if we're an adult plugin
    thisAddonId = xbmcaddon.Addon().getAddonInfo('id')
    showUnlockDialog = showUnlockDialog or thisAddonId in common.getXbmcAdultIds(
    )

    if showUnlockDialog:
        if codeui.unlockUI():
            setUnlockedTime(int(time.time()))
            common.msg("Unlocked for 5 minutes")
            xbmc.executebuiltin('Container.Update(' + sys.argv[0] +
                                sys.argv[2] + ')')
        #else incorrect code, abort navigation with exit()
        exit()

    __builtins__['__import__'] = wrapper_import
Esempio n. 3
0
def check():

    showUnlockDialog = False
    
    # see if our marker is present
    match=re.search(overridemarkerPattern, sys.argv[2] if len(sys.argv) > 2 else None)
    if match:
        sys.argv[2] = re.sub(overridemarkerPattern,'',sys.argv[2]) #strip marker
        showUnlockDialog=True

    unlockWindow = 5*60
    if time.time() - getUnlockedTime() < unlockWindow:
        return #early return, we're in unlock window so we don't wrap

    # see if we're an adult plugin
    thisAddonId   = xbmcaddon.Addon().getAddonInfo('id')
    showUnlockDialog = showUnlockDialog or thisAddonId in common.getXbmcAdultIds() 

    if showUnlockDialog:
        if codeui.unlockUI():
            setUnlockedTime(int(time.time()))
            common.msg("Unlocked for 5 minutes")
            xbmc.executebuiltin('Container.Update(' + sys.argv[0] + sys.argv[2] + ')')
        #else incorrect code, abort navigation with exit()
        exit()

    __builtins__['__import__'] = wrapper_import
Esempio n. 4
0
    def build_list_of_files(file_list):
        verbose_msg("Building list of files from", file_list)
        # Check that runlist does not have duplicates
        unique_file_list = set(file_list)
        if len(file_list) != len(unique_file_list):
            # for i in file_list
            fatal_msg("Runlist has duplicated entries, fix runlist!",
                      len(unique_file_list), "unique files, while got",
                      len(file_list), "files")
        file_status = {
            "Does not exist": [],
            "Cannot be open": [],
            "Was recovered": [],
            "Is Ok": []
        }
        if check_input_file_integrity:  # Check that input files can be open
            for i in file_list:
                verbose_msg("Checking that TFile", i.strip(),
                            "can be processed")
                file_status[is_root_file_sane(i)] = i
        recovered_files = file_status["Was recovered"]
        not_readable = []
        for i in file_status:
            if i == "Is Ok":
                continue
            not_readable += file_status[i]
        if len(recovered_files) > 0:
            msg(
                "Recovered",
                len(recovered_files),
                "files:\n",
            )
        if len(not_readable) > 0:
            warning_msg(len(not_readable), "over", len(file_list),
                        "files cannot be read and will be skipped")
            for i in not_readable:
                if i not in file_list:
                    warning_msg("did not find file to remove", f"'{i}'")
                file_list.remove(i)

        files_per_batch = []
        iter_file_list = iter(file_list)
        for i in range(0, len(file_list)):
            sub_set = list(islice(iter_file_list, batch_size))
            if len(sub_set) <= 0:
                continue
            files_per_batch.append(sub_set)
        run_list = []
        if len(files_per_batch) > 0:
            for i, lines in enumerate(files_per_batch):
                p = os.path.join(out_path, f"{i}")
                if not os.path.isdir(p):
                    os.makedirs(p)
                run_list.append(os.path.join(p,
                                             f"ListForRun5Analysis.{i}.txt"))
                with open(run_list[-1], "w") as f:
                    for j in lines:
                        f.write(j.strip() + "\n")
        msg("Number of runs:", len(run_list))
        return run_list
def checkProtection():
    plugins = common.getProtectedPlugins()
    for plugin in plugins:
        state = hook.getPluginHookState(plugin)
        if (not state['hooked']) or not (state['uptodate']):
            p=hook.hookPlugin(plugin)
            common.msg("Re-protecting plugin " + p['name'])
Esempio n. 6
0
def print_timestamps():
    msg("Found", len(timestamps), "paths on CCDB")
    for i in timestamps:
        t = timestamps[i]
        msg("Found", len(t), i, "objects.", "First:", t[0],
            convert_timestamp(t[0][list(t[0].keys())[0]]), ".", "Last", t[-1],
            convert_timestamp(t[-1][list(t[0].keys())[0]]))
Esempio n. 7
0
def checkProtection():
    plugins = common.getProtectedPlugins()
    for plugin in plugins:
        state = hook.getPluginHookState(plugin)
        if (not state['hooked']) or not (state['uptodate']):
            p = hook.hookPlugin(plugin)
            common.msg("Re-protecting plugin " + p['name'])
Esempio n. 8
0
def download_objects(input_file="t.root",
                     out_path="/tmp/ccdbtest2/",
                     host="http://ccdb-test.cern.ch:8080",
                     overwrite=False):
    msg("Downloading CCDB objects from input file", input_file)
    out_path = os.path.normpath(out_path)
    f = TFile(input_file, "READ")
    lk = f.GetListOfKeys()
    obj_done = []
    for i in lk:
        name = i.GetName()
        cycle = i.GetCycle()
        if name in obj_done:
            continue
        obj_done.append(name)
        obj = f.Get(f"{name};{cycle}")
        name = name.replace("--", "/")
        limits = [int(obj.GetPointY(j)) for j in range(obj.GetN())]
        verbose_msg(name, len(limits), "First", limits[0],
                    convert_timestamp(limits[0]), "Last", limits[-1],
                    convert_timestamp(limits[-1]))
        for j in limits:
            get_ccdb_obj(name,
                         j,
                         out_path=out_path,
                         host=host,
                         show=False,
                         verbose=True,
                         tag=True,
                         overwrite_preexisting=overwrite)
    f.Close()
    for i in obj_downloaded:
        msg("Downloaded", obj_downloaded[i], i)
Esempio n. 9
0
def check(instream):
    input = json.load(instream)

    # create consul_instance
    consul_instance = consulate.Consul(host=input['source']['host'],
                                       port=input['source'].get('port', 443),
                                       scheme=input['source'].get(
                                           'scheme', 'https'),
                                       token=input['source']['token'])

    # see which key we need to monitor
    key = input['source']['key']
    if not key or len(key) <= 0:
        common.msg(
            "[check] consul singlekey resource expected a non-empty key name")
        exit(1)

    value = consul_instance.kv[key] if key in consul_instance.kv else ""
    common.msg("[check] consul singlekey resource {0} = {1}".format(
        key, value))

    # see if the same as previous version, or different
    version = input.get('version')
    valueOld = version.get('value', "") if version is not None else ""
    if valueOld is None or value == valueOld:
        return [{'value': value}]

    return [{'value': valueOld}, {'value': value}]
Esempio n. 10
0
    def build_list_of_files(file_list):
        if len(file_list) != len(set(file_list)):  # Check that runlist does not have duplicates
            fatal_msg("Runlist has duplicated entries, fix runlist!")
        not_readable = []
        for i in file_list:  # Check that input files can be open
            f = TFile(i.strip(), "READ")
            if not f.IsOpen():
                verbose_msg("Cannot open AOD file:", i, color=bcolors.WARNING)
                not_readable.append(i)
        if len(not_readable) > 0:
            warning_msg(len(not_readable),
                        "files cannot be read and will be skipped")
            for i in not_readable:
                file_list.remove(i)

        files_per_batch = []
        iter_file_list = iter(file_list)
        for i in range(0, len(file_list)):
            sub_set = list(islice(iter_file_list, batch_size))
            if len(sub_set) <= 0:
                continue
            files_per_batch.append(sub_set)
        run_list = []
        if len(files_per_batch) > 0:
            for i, lines in enumerate(files_per_batch):
                p = os.path.join(out_path, f"{i}")
                if not os.path.isdir(p):
                    os.makedirs(p)
                run_list.append(os.path.join(
                    p, f"ListForRun5Analysis.{i}.txt"))
                with open(run_list[-1], "w") as f:
                    for j in lines:
                        f.write(j.strip() + "\n")
        msg("Number of runs:", len(run_list))
        return run_list
Esempio n. 11
0
def run_merge(input_list_name):
    out_aod = bunched_aod_names[input_list_name]["out_aod"]
    file_index = bunched_aod_names[input_list_name]["file_index"]
    total_files = bunched_aod_names[input_list_name]["total_files"]
    input_size = bunched_aod_names[input_list_name]["input_size"]
    run_cmd(f"o2-aod-merger --input {input_list_name} --output {out_aod}",
            time_it=True,
            print_output=False)
    msg(f"Merged #{file_index}/{total_files-1} ({input_size} MB) to", out_aod,
        os.path.getsize(out_aod) * 1E-6, "MB")
Esempio n. 12
0
def update_record(filee, collection):
  with open(filee, 'r') as f:
    try:
      record = json.load(f)
      if '_id' in record.keys() and len(list(collection.find({'_id': ObjectId(record['_id'])}))) > 0:
        id = record['_id']
        del record['_id']
        msg('''record {}''', record)
        collection.find_one_and_update({'_id': ObjectId(id)}, {'$set': record}, upsert=True)  ##pymongo.errors.WriteError: After applying the update, the (immutable) field '_id' was found to have been altered to _id: "5e76a3617be93507462b81b9"
      else: 
        collection.insert_one(record)
    except:
      raise Exception
Esempio n. 13
0
def process_run(run_number):
    processing_time = time.time()
    verbose_msg("> starting run", run_number)
    run_cmd(f"bash runner{run_number}.sh")
    aod_name = f"AODRun5.{run_number}.root"
    if not os.path.isfile(aod_name):
        msg(f"++ something went wrong for run {run_number}, no output AOD file {aod_name} found.",
            f"Please check: 'AODRun5.{run_number}.log'",
            color=bcolors.FAIL)
    verbose_msg("< complete run", run_number)
    processing_time = time.time() - processing_time
    verbose_msg(f"-- took {processing_time} seconds --",
                color=bcolors.BOKGREEN)
def configure():
    common.msg("Perform ", "shell config")
    proxy = apps['shell']

    if True == proxy['use_proxy']:
        #call(["export","HTTP_PROXY={0} ".format(proxy['http_proxy_target'])])
        #call(["export","HTTPS_PROXY={0}".format(proxy['https_proxy_target'])])
        os.environ["HTTP_PROXY"] = "{0}".format(proxy['http_proxy_target'])
        os.environ["HTTPS_PROXY"] = "{0}".format(proxy['https_proxy_target'])

    else:
        call(["unset", "HTTP_PROXY"])
        call(["unset", "HTTPS_PROXY"])
Esempio n. 15
0
 def proceed(handle_exit=True):
     msg(f"Downloading '{toget}'", color=bcolors.OKGREEN)
     print_now()
     if Version == 0:
         cpycmd = "alien_cp -v {} file:{}".format(toget, todir)
     else:
         cpycmd = "alien_cp -v {} file://{}".format(toget, todir)
     verbose_msg("Running command", cpycmd)
     if handle_exit:
         try:
             run_cmd(cpycmd)
         except KeyboardInterrupt:
             return False
     else:
         run_cmd(cpycmd)
         return True
Esempio n. 16
0
def wrapper_Player_play(self, item = None, listitem = None, windowed = False):
    
    if (listitem and hasattr(listitem,"_obj")):
        wrappeditem = listitem
        rating=getMpaaRating(wrappeditem.infoLabels)
        if not common.allowed(rating):
            blockedRating = rating or "Unknown Rating"
            if not codeui.unlockUI("Blocked (%s)" % blockedRating):
                return None
            setUnlockedTime(int(time.time()))
            common.msg("Unlocked for 5 minutes")
        #unwrap before delegating
        listitem=listitem._obj
    if (item and hasattr(item,"_obj")):
        #unwrap before delegating
        item = item._obj
    return self._obj.play(item, listitem, windowed)
Esempio n. 17
0
def wrapper_Player_play(self, item=None, listitem=None, windowed=False):

    if (listitem and hasattr(listitem, "_obj")):
        wrappeditem = listitem
        rating = getMpaaRating(wrappeditem.infoLabels)
        if not common.allowed(rating):
            blockedRating = rating or "Unknown Rating"
            if not codeui.unlockUI("Blocked (%s)" % blockedRating):
                return None
            setUnlockedTime(int(time.time()))
            common.msg("Unlocked for 5 minutes")
        #unwrap before delegating
        listitem = listitem._obj
    if (item and hasattr(item, "_obj")):
        #unwrap before delegating
        item = item._obj
    return self._obj.play(item, listitem, windowed)
Esempio n. 18
0
def check(instream):
    input = json.load(instream)

    # take snapshot of consul key/values
    consul_instance = consulate.Consul(host=input['source']['host'],
                                       port=input['source'].get('port', 443),
                                       scheme=input['source'].get(
                                           'scheme', 'https'),
                                       token=input['source']['token'])

    # collect all keys from all given prefixes
    prefixes = input['source']['prefixes']
    if not isinstance(prefixes, (list, )):
        common.msg(
            "[check] consul resource expected a list of prefixes, but it's not a list"
        )
        exit(1)

    prefixStr = ""
    for prefix in prefixes:
        if len(prefixStr) > 0:
            prefixStr += " "
        prefixStr += prefix

    common.msg("[check] consul resource searching under {0}".format(prefixStr))

    result = {}
    for prefix in prefixes:
        found = consul_instance.kv.find(prefix)
        if found is not None:
            for k, v in found.iteritems():
                if v is not None:
                    result[k] = v
                else:
                    result[k] = ""

    common.msg("[check] consul resource found {0} key/values under {1}".format(
        len(result), prefixStr))

    # hash values from all keys
    hash = hashlib.sha224()
    for k, v in sorted(result.iteritems()):
        hash.update(v.encode("utf-8"))
    hashNew = hash.hexdigest()
    common.msg("[check] consul resource value hash under {0}: {1}".format(
        prefixStr, hashNew))

    # see if the same as previous version, or different
    version = input.get('version')
    hashOld = version.get('hash', "") if version is not None else ""
    if hashOld is None or hashNew == hashOld:
        return [{'hash': hashNew}]

    return [{'hash': hashOld}, {'hash': hashNew}]
Esempio n. 19
0
def merge_aod(in_path="",
              out_path="./",
              input_file="AO2D.root",
              must_have="ctf",
              bunch_size=50,
              skip_already_existing=True):
    in_path = os.path.normpath(in_path)
    out_path = os.path.normpath(out_path)
    file_list = []
    for root, dirs, files in os.walk(in_path):
        for file in files:
            if file == input_file:
                to_merge = os.path.abspath(os.path.join(root, file))
                print(to_merge)
                if must_have is not None and must_have in to_merge:
                    file_list.append(to_merge)
    verbose_msg("Found", len(file_list), "files called", input_file)
    # Divide it in bunches
    file_list = [
        file_list[i:i + bunch_size]
        for i in range(0, len(file_list), bunch_size)
    ]
    for i in enumerate(file_list):
        bunch_size = 0
        with open("inputfile.txt", "w") as f:
            for j in i[1]:
                f.write(f"{j}\n")
                bunch_size += os.path.getsize(j)
        out_aod = os.path.join(out_path, f"AO2D_{i[0]}.root")
        verbose_msg("Merging bunch of", len(i[1]), "files. I.e.",
                    bunch_size * 1e-6, "MB")
        if skip_already_existing and os.path.isfile(out_aod):
            verbose_msg(out_aod, "already existing, skipping")
            continue
        tmp_aod = os.path.join(out_path, "MergedAOD.root")
        run_cmd(
            f"o2-aod-merger --input inputfile.txt --output {tmp_aod} --skip-non-existing-files",
            comment=f"Merging AODs into {out_aod}")
        os.rename(tmp_aod, out_aod)
        merged_size = os.path.getsize(out_aod)
        msg("Produced a merged file of", merged_size * 1e-6, "MB from",
            bunch_size * 1e-6, "MB, compression:", merged_size / bunch_size)
def configure():
    if False == common.is_cmd_installed("npm"):
        common.msg("System", "npm is not installed", "warn")
        return False

    common.msg("Perform ", "npm config")
    proxy = apps['npm']

    if True == proxy['use_proxy']:
        call([
            "npm", "config", "set", "proxy",
            "{0}".format(proxy['http_proxy_target'])
        ])
        call([
            "npm", "config", "set", "https-proxy",
            "{0}".format(proxy['https_proxy_target'])
        ])
    else:
        call(["npm", "config", "rm", "proxy"])
        call(["npm", "config", "rm", "https-proxy"])
Esempio n. 21
0
def load_config():
    config_file=os.path.expanduser('~/.proxx.ini')
    exists = os.path.isfile(config_file)
    if True != exists:
        return
    config = configparser.ConfigParser(allow_no_value=True)
    config.read(config_file)
    try:
        for key in apps:
            if  True == config.has_section(key):
                for sub_key in proxy_var:
                    if  True == config.has_option(key,sub_key):
                        #print (key,sub_key)
                        if 'use_proxy' == sub_key:
                            apps[key][sub_key]=config[key].getboolean(sub_key)
                        else:
                            apps[key][sub_key]=config[key][sub_key]
            set_targets(key)
    except Exception as ex:
        common.msg("Loading config Err:",ex,"fail")
def in_(destdir, instream):
    input = json.load(instream)

    # see which value we need to fetch
    version = input.get('version')
    if not 'value' in version:
        common.msg(
            "[in] consul singlekey resource didn't receive which value to fetch, exiting..."
        )
        exit(1)

    # put on a file system
    value = version['value']
    common.msg(
        "[in] consul singlekey resource, getting value '{0}' and storing in directory {1}"
        .format(value, destdir))
    with safe_open(os.path.join(destdir, "value"), 'w') as f:
        f.write(value.encode("utf-8"))

    return {'version': {'value': value}}
Esempio n. 23
0
def run(payload, dest='.'):
    msg('''IN
  Payload: {}
  ls: {}''', payload, os.listdir(dest))
    source, uri = get_payload_data(payload)

    connection = pm.MongoClient(uri)
    msg('Connection {}', connection)
    try:
        # The ismaster command is cheap and does not require auth.
        connection.admin.command('ismaster')
    except ConnectionFailure:
        msg("Server not available")
    db = connection[source['db']]

    collection = db[source['collection']]

    concourse_input = payload['version']['version']
    find = {'_id': ObjectId(concourse_input)}
    results = list(collection.find(find))
    for result in results:
        result['_id'] = str(result['_id'])
        filename = join(dest, concourse_input)
        with open('{}.json'.format(filename), 'w') as f:
            json.dump(result, f)
    return {"version": {"version": concourse_input}}
Esempio n. 24
0
def copied(fname="", extra_msg="", last_time=None, check_root_files=True):
    """Checks if how many files of a text list were correctly copied from grid to the PC"""
    verbose_msg("Checking how many files were copied from from list", fname)
    fname = fname.strip()
    f = open(fname, "r")
    n_to_copy = 0
    n_copied = 0
    not_sane = []
    for line in f:
        if "%" in line:
            break
        if "#" in line:
            continue
        line = path.normpath("./" + line.strip())
        n_to_copy += 1
        if path.isfile(line):
            n_copied += 1
            if check_root_files:
                if not check_root_file(line):
                    msg(f"'{line}' downloaded but with issues",
                        color=bcolors.WARNING)
                    not_sane.append(line)
        else:
            msg(f"'{line}' yet to download", color=bcolors.OKBLUE)
    if last_time is not None:
        n_copied -= last_time[1]
    msg(
        extra_msg, "downloaded {}/{}, {:.1f}%".format(
            n_copied, n_to_copy, 100 * float(n_copied) / float(n_to_copy)),
        f" -- copied {n_copied} files more, in total copied {last_time[1] + n_copied} files"
        if last_time is not None else "",
        f"{len(not_sane)} are not OK" if len(not_sane) > 0 else "")

    return n_to_copy, n_copied
Esempio n. 25
0
def copylist(fname="",
             jobs=InputArgument(1, "Number of parallel jobs to use",
                                ["--njobs", "-j"], int)):
    """Takes a text file and downloads the files from grid"""
    if jobs is None:
        jobs = 1
    verbose_msg("Copying files from list", fname, "with", jobs, "jobs")
    fname = path.normpath(fname)
    if not path.isfile(fname):
        warning_msg("Input file not provided! Aborting")
        return
    sofar = copied(fname, "So far")
    f = open(fname, "r")
    Group = []
    for line in f:
        if "%" in line:
            msg("Character % encountered! Aborting")
            break
        if "#" in line:
            msg("Character # encountered! Skipping")
            continue
        line = "./" + line
        if jobs == 1:
            copyfile(line)
        else:
            Group.append(line)
    if jobs > 1:
        msg("Copying list in parallel with", jobs, "jobs")
        run_in_parallel(processes=jobs,
                        job_runner=copyfile,
                        job_arguments=Group,
                        job_message="Downloading files",
                        linearize_single_core=True)
    copied(fname, extra_msg="In recent run", last_time=sofar)
Esempio n. 26
0
def in_(destdir, instream):
    input = json.load(instream)

    username = input['source']['username']
    password = input['source']['password']
    version = input.get('version')
    uid = version.get('uid', "") if version is not None else ""

    common.msg("logging into gmail as '{0}'".format(username))
    g = Gmail()

    # login, fail if unable to authenticate
    try:
        g.login(username, password)
    except:
        common.msg("unable to log in")
        exit(1)

    # fetch this particular email
    common.msg("fetching email with uid '{0}'".format(uid))
    msg = g.fetch_multiple_messages({uid: Message(g.inbox(), uid)})[uid]

    # if we haven't found the required email message, then exit
    if msg is None or msg.message is None:
        common.msg("unable to find email with uid '{0}'".format(uid))
        exit(1)

    # put it on a file system
    common.msg("writing email '{0}' to {1}".format(msg.subject, destdir))
    with safe_open(os.path.join(destdir, "email"), 'w') as f:
        f.write(json.dumps(toJSON(msg)))

    # log out and swallow the error
    try:
        g.logout()
    except:
        pass

    metadata = [{'name': 'uid', "value": msg.uid}, {'name': 'subject', "value": msg.subject}]
    return {'version': {'uid': msg.uid}, 'metadata': metadata}
Esempio n. 27
0
def out(basedir, instream):
    input = json.load(instream)

    # create consul_instance
    consul_instance = consulate.Consul(host=input['source']['host'], port=input['source'].get('port', 443), scheme=input['source'].get('scheme', 'https'), token=input['source']['token'])

    # see which key we need to monitor
    key = input['source']['key']
    if not key or len(key) <= 0:
        common.msg("[out] consul singlekey resource expected a non-empty key name")
        exit(1)

    value = None
    if "value" in input['params']:
        value = input['params']['value']
    elif "value_file" in input['params']:
        with open(os.path.join(basedir, input['params']['value_file']), 'r') as file:
            value = file.read()
    else:
        common.msg("[out] consul singlekey resource expected either 'value' or 'value_file' specified")
        exit(1)

    value = value.strip()

    common.msg("[out] consul singlekey resource setting {0} = {1}".format(key, value))
    consul_instance.kv[key] = value

    return {'version': {'value': value}}
Esempio n. 28
0
def save_config():
    #return
    config_file=os.path.expanduser('~/.proxx.ini')

    config = configparser.ConfigParser(allow_no_value=True)
    try:
        header="""[defaults]
http_username      = sam         ; HTTP proxy credential (Not needed if setup in CNTLM)
http_password      = sampwd      ; HTTP proxy credential (Not needed if setup in CNTLM)
http_proxy         = localhost   ; HTTP proxy uri
http_port          = 3128        ; HTTP proxy port
https_username     = sam         ; * The same as above, but for HTTPS
https_password     = sampw       ; * If omitted, the http option is used
https_proxy        = localhost   ; *
https_port         = 3128        ; *
no_proxy           = website.com website2.com website 3.com   ; usually internal sites
use_proxy          = comments    ; enable or disable this proxy config


"""

        #print proxy_var
        for key in apps:
            config[key]={}
            remove_section=True
            for sub_key in apps[key]:
                if sub_key in proxy_var:
                    if None != apps[key][sub_key]:
                        value="{0}".format(apps[key][sub_key])
                        config[key][sub_key]=value
                        remove_section=False
            if True == remove_section:
                config.remove_section(key)

        config.remove_section('defaults')
        with open(config_file, 'w') as configfile:
            #configfile.write(header)
            config.write(configfile)
    except Exception as ex:
        common.msg("Saving config",ex,"fail")
def configure():
    if False == common.is_cmd_installed("git"):
        common.msg("System", "git is not installed", "warn")
        return False

    common.msg("Perform ", "git config")
    proxy = apps['git']

    print proxy['use_proxy']

    if True == proxy['use_proxy']:
        print "Setting"
        call([
            "/usr/bin/git", "config", "--global", "http.proxy",
            "{0}".format(proxy['http_proxy_target'])
        ])
        call([
            "/usr/bin/git", "config", "--global", "https.proxy",
            "{0}".format(proxy['https_proxy_target'])
        ])
    else:
        print("Unsetting")
        call(["/usr/bin/git", "config", "--global", "--unset", "http.proxy"])
        call(["/usr/bin/git", "config", "--global", "--unset", "https.proxy"])
Esempio n. 30
0
def setCodeUI(title="Enter New Code"):
    if (common.getCode()):
        if (not unlockUI("Enter Current Code")):
            return False
    while True:
        code1 = showComboDialog(title)
        if (code1 == None): return False
        code2 = showComboDialog("Re-enter Code")
        if (code2 == None): return False
        if (code1 == code2):
            if (len(code1)>0):
                common.setCode(code1)
                common.msg("Code has been set")
                return True
            else:
                common.msg("Code may not be empty")
        else:
            common.msg("Codes did not match")
Esempio n. 31
0
def setCodeUI(title="Enter New Code"):
    if (common.getCode()):
        if (not unlockUI("Enter Current Code")):
            return False
    while True:
        code1 = showComboDialog(title)
        if (code1 == None): return False
        code2 = showComboDialog("Re-enter Code")
        if (code2 == None): return False
        if (code1 == code2):
            if (len(code1) > 0):
                common.setCode(code1)
                common.msg("Code has been set")
                return True
            else:
                common.msg("Code may not be empty")
        else:
            common.msg("Codes did not match")
Esempio n. 32
0
def writefiles(FileList="",
               Outfile=InputArgument("listoffiles.txt", "Output file", "-o"),
               append="Append to output file or create a new one"):
    """
    Writes the list of file to the output file given content of the path given in input.
    Can also form the output in the xml format so as to run on grid, this is done if the output filename has the xml extension.
    """
    # Printing name of output list
    msg(f"Output will be into file '{Outfile}'")
    # Check on existing list file of this name
    if path.isfile(Outfile) and not append:
        msg("List file already existing, replace it? (y/[n])")
        if "y" not in input():
            return
    fw = open(Outfile, "a" if append else "w")
    written = 0
    for i in FileList:
        fw.writelines(i.strip() + "\n")
        written += 1
    msg(f"Written {written} files to {Outfile}")
    fw.close()
Esempio n. 33
0
def check_root_file(file_name):
    if not file_name.endswith(".root"):
        warning_msg("Testing a non root file:", file_name)
        return True
    if not path.isfile(file_name):
        warning_msg("Testing a non existing file:", file_name)
        return True
    try:
        f = TFile(file_name, "READ")
        if f.TestBit(TFile.kRecovered):
            msg("File", file_name, "was recovered", color=bcolors.WARNING)
            return False
        if not f.IsOpen():
            msg("File", file_name, "is not open", color=bcolors.WARNING)
            return False
    except OSError:
        msg("Issue when checking file", file_name, color=bcolors.WARNING)
        return False
    verbose_msg(file_name, "is ok and has size",
                os.path.getsize(file_name) * 1e-6, "MB")
    return True
Esempio n. 34
0
def iterative_search(maximum_found_objects=2000,
                     max_search_iterations=-20,
                     minimum_timestamp=1615197295100,
                     delta_timestamp=1 * 1000):
    """
    delta_timestamp is in milliseconds
    """
    for i in timestamps:
        verbose_msg("Iteratively searching for", i, "with",
                    max_search_iterations, "iterations")
        delta = delta_timestamp
        iterations = 0
        while True:
            iterations += 1
            if max_search_iterations > 0 and iterations > max_search_iterations:
                msg("Max search iterations for", i,
                    f"({iterations} < {max_search_iterations})")
                break
            last_timestamp = timestamps[i][-1]["Valid-From:"]
            if last_timestamp - delta < minimum_timestamp:
                msg("Found old enough", i,
                    f"({last_timestamp} < {minimum_timestamp})")
                break
            listing_status = list_ccdb_object(i,
                                              timestamp=last_timestamp - delta)
            if listing_status == 0:
                verbose_msg(
                    "++ Found an object",
                    (last_timestamp - timestamps[i][-1]["Valid-From:"]) *
                    0.001, "seconds younger with delta", delta, "ms")
                delta = delta_timestamp
            else:
                delta += delta_timestamp
            if maximum_found_objects > 0 and len(
                    timestamps[i]) >= maximum_found_objects:
                msg("Found enough", i, f"({maximum_found_objects})")
                break
    print_timestamps()
Esempio n. 35
0
            common.addProtectedPlugin(p['id'])

if (common.getCode()):
    allowed=codeui.unlockUI('Enter your code')
else:
    #prompt to choose code if first time
    allowed=codeui.setCodeUI("Choose a Code")
    #prompt to protect plugins
    if (allowed):
        controlAddonsUI()
    

#present main settings window
while (allowed):
    action = chooseAction()
    if (action == 0):
        codeui.setCodeUI()
    elif (action == 1):
        setTVRatingUI()
    elif (action == 2):
        setMovieRatingUI()
    elif (action == 3):
        controlAddonsUI()
    else:
        if ((not common.getCode()) and xbmcgui.Dialog().yesno("Set Code Now?","You haven't set a code yet.\nParental controls will not be enabled until you do")):
                codeui.setCodeUI()
        if (not common.getCode()):
            common.msg("Not enabled")
        break

Esempio n. 36
0
import time
import traceback
import xbmcaddon
import os
import xbmc

import common
import serviceiter

__addonpath__   = xbmcaddon.Addon().getAddonInfo('path')
        
common.msg("Started")
lastMessage = time.time()
while (not xbmc.abortRequested):
    try:
        files = os.listdir(__addonpath__ + "/resources/lib/parentalcontrols")
        for file in files:
            if file.endswith(".py") and file != "service.py" and file != "settings.py":
                module=file[:-3]
                #reimport the module
                try:
                    reload(eval(module))
                except NameError:
                    exec("import " + module)
        serviceiter.iterate()
    except:
        traceback.print_exc()
        time.sleep(10)