def check_and_replace_orchestrator_systems():
    for old_filename in os.listdir(sesam_checkout_dir + "/unpacked/systems/"):
        with open(
                os.path.join(sesam_checkout_dir + "/unpacked/systems/",
                             old_filename), 'r') as f:  # open in readonly mode
            old_file = load_json(f.read())
            try:
                old_file["metadata"]["orchestrator"]["original_configuration"]
                for new_filename in os.listdir(git_cloned_dir +
                                               "/sesam-node/systems/"):
                    with open(
                            os.path.join(
                                git_cloned_dir + "/sesam-node/systems/",
                                new_filename),
                            'r') as g:  # open in readonly mode
                        new_file = load_json(g.read())
                        if old_file["metadata"]["orchestrator"][
                                "original_configuration"] == new_file:
                            logging.info(
                                "The system %s is restored to orchestrator mode"
                                % new_file["_id"])
                            with open(
                                    os.path.join(payload_dir + "/systems/",
                                                 new_filename), 'w') as h:
                                h.write(dump_json(old_file))
            except KeyError:
                None
Example #2
0
    def formdata(self, handler, instance):
        request = handler.request

        id = int(request.REQUEST.get('id', -1))
        if request.method == "POST":
            formdata = load_json(request.POST.get('data', '{}'))
            extra = load_json(request.POST.get('extra', '{}'))
            formname = extra.get('name', 'new form')
            spokes = ",".join(extra.get('spokes', []))

            if id == -1:
                pf = PropertyForm(conf=instance, name=formname,
                                  form=dump_json(formdata),
                                  types=spokes)
                pf.save()
                id = pf.id
            else:
                pf = PropertyForm.objects.get(pk=id)
                pf.name = formname
                pf.form = dump_json(formdata)
                pf.types = spokes
                pf.save()

        if id != -1:
            pf = PropertyForm.objects.get(pk=id)
            return dict(form=load_json(pf.form),
                        extra=dict(name=pf.name, id=pf.id,
                                   spokes=pf.types.split(",")))
        return dict(form=[], extra={'name':'', 'spokes':[]})
Example #3
0
def properties_data_handler(handler, request, action):
    """ return relevant form data """
    ## return first form that matches. TODO: combine forms into groups

    ## combine with data

    # import pdb; pdb.set_trace()
    spoke = handler.spoke()
    if request.method == "POST":
        data = load_json(request.POST.get('data'))
        for (id, formdata) in data.iteritems():
            form_obj_storage, _ = PropertyFormData.objects.get_or_create(form_id=id, content=spoke.instance)
            form_obj_storage.properties = dump_json(formdata)
            form_obj_storage.save()

    spokename = handler.spoke().name()
    forms = []
    for pf in PropertyForm.objects.all():
        types = pf.types.split(",")
        if spokename in types:
            forms.append(dict(id=pf.id, name=pf.name, form=load_json(pf.form)))

    data = {}
    for pfd in spoke.instance.properties.all():
        data[pfd.form_id] = load_json(pfd.properties)
    return dict(forms=forms, data=data)
Example #4
0
    def get_templates(self, path):
        for f in listdir(path):
            if f == 'node-metadata.conf.json':
                self.node_metadata = load_json(open(f'{path}{f}', 'r').read())

            # Extra pipes
            if f == 'pipe_on_extra_from_extra_to_master.json':
                self.pipe_on_extra_from_extra_to_master = load_json(open(f'{path}{f}', 'r').read())
            if f == 'pipe_on_extra_from_master_to_extra.json':
                self.pipe_on_extra_from_master_to_extra = load_json(open(f'{path}{f}', 'r').read())
            # Extra systems
            if f == 'system_on_extra_from_extra_to_master.json':
                self.system_on_extra_from_extra_to_master = load_json(open(f'{path}{f}', 'r').read())
            if f == 'system_on_extra_from_master_to_extra.json':
                self.system_on_extra_from_master_to_extra = load_json(open(f'{path}{f}', 'r').read())

            # Master pipes
            if f == 'pipe_on_master_from_extra_to_master.json':
                self.pipe_on_master_from_extra_to_master = load_json(open(f'{path}{f}', 'r').read())
            if f == 'pipe_on_master_from_master_to_extra.json':
                self.pipe_on_master_from_master_to_extra = load_json(open(f'{path}{f}', 'r').read())
            # Master systems
            if f == 'system_on_master_from_extra_to_master.json':
                self.system_on_master_from_extra_to_master = load_json(open(f'{path}{f}', 'r').read())
            if f == 'system_on_master_from_master_to_extra.json':
                self.system_on_master_from_master_to_extra = load_json(open(f'{path}{f}', 'r').read())
Example #5
0
def test_code():
    with open("/tango/src/demo.R", "w") as r_code:
        r_code.write(request.form["code"])
    with open("/tango/testcases/demo.R", "w") as r_test:
        r_test.write(request.form["tests"])
    sp_run([
        "docker", "run", "--rm", "--mount",
        "type=bind,src=/tmp/tango/src,dst=/home/tango/src,readonly=true",
        "--mount",
        "type=bind,src=/tmp/tango/testcases,dst=/home/tango/testcases,readonly=true",
        "--mount", "type=bind,src=/tmp/tango/out,dst=/home/tango/out",
        "awoolley10/tango-demo"
    ])
    with open("/tango/out/validation.json") as v_json:
        validation_result = from_validation(load_json(v_json)["runners"][0])
    if len(validation_result["successes"]) == 3:
        with open("/tango/out/quality.json") as q_json:
            q_res = load_json(q_json)["runners"][0]
        q_e = from_quality(q_res["errors"])  # Quality errors
        q_score = round(100 * q_res["score"])

        with open("/tango/out/evaluation.json") as e_json:
            e_res = load_json(e_json)["runners"][0]
        try:
            e_s = e_res["successes"]
        except KeyError:
            e_s = []
        try:
            e_f = e_res["failures"]
        except KeyError:
            e_f = []
        try:
            e_e = e_res["errors"]
        except KeyError:
            e_e = []
    else:
        q_e = None
        q_score = None
        e_s = None
        e_f = None
        e_e = None

    return render_template("feedback.j2",
                           v_s=validation_result["successes"],
                           v_f=validation_result["failures"],
                           v_e=validation_result["errors"],
                           q_score=q_score,
                           q_e=q_e,
                           e_s=e_s,
                           e_f=e_f,
                           e_e=e_e)
def load_sesam_files_as_json(dir):
    node_config = []
    for name in os.listdir(dir):
        path = os.path.join(dir, name)
        if os.path.isfile(path) and fnmatch.fnmatch(name,
                                                    'node-metadata.conf.json'):
            node_config.append(load_json(open(path).read()))
        elif os.path.isdir(path):
            if fnmatch.fnmatch(name, 'pipes') or fnmatch.fnmatch(
                    name, 'systems'):
                pipes_or_systems = os.listdir(path)
                for p_s in pipes_or_systems:
                    local_path = os.path.join(path, p_s)
                    node_config.append(load_json(open(local_path).read()))
    return node_config
Example #7
0
    def __iter__(self):
        """Iterate over lines looking for resources."""
        resource_class = self._resource_class
        resource_args = self._resource_args
        session = self._session
        response = self._response

        for chunk in self._read(response):
            event_data = ""
            for line in chunk.splitlines():
                # Ignore empty lines
                if not line.strip():
                    continue

                data = line.split(self._FIELD_SEPARATOR, 1)
                field = data[0]
                data = data[1]

                if field == 'data':
                    event_data += data

            if not event_data:
                # Don't report on events with no data
                continue

            event_data = load_json(event_data).get('data')
            yield resource_class(event_data, session, **resource_args)
Example #8
0
    def runTest(self):
        outfn = self.outfn
        with open(self.fn, 'rb') as f:
            compiler = Popen([COMPILER_EXE, outfn],
                             stdin=f,
                             stdout=PIPE,
                             stderr=STDOUT)
            output = compiler.communicate()[0]
            if compiler.returncode == -11:  # SIGSEGV
                self.fail('Optimizer segfaulted')
            elif compiler.returncode != 0:
                self.fail('Optimizer terminated with signal %d' %
                          (compiler.returncode))

            f.seek(0)
            origtm = TM(f.read().decode('latin-1'))
            with open(outfn, 'rb') as outf:
                optitm = TM(outf.read().decode('latin-1'))
            f.seek(0)
            for line in f:
                line = line.decode('latin-1')
                if not line.startswith('*:run'):
                    continue
                inp, dm = load_json('[' + line[5:].strip() + ']')
                origout, origtime = origtm.run(inp, dm)
                optiout, optitime = optitm.run(inp, dm)
                if origout != optiout:
                    print('FAIL: with input ' + str(inp) + ' and data ' +
                          str(dm) + ', original program outputs ' +
                          str(origout) + ' but optimized version outputs ' +
                          str(optiout))
                if optitime > origtime:
                    slower.add((self.fn, tuple(inp), tuple(dm)))
                elif optitime == origtime:
                    notfaster.add((self.fn, tuple(inp), tuple(dm)))
Example #9
0
def get_error_message(error):
    if "[errno 32] broken pipe" in error:
        return format_error_message(32)
    if "[errno 111] connection refused" in error or " operation timed out" in error or "reset by peer" in error:
        return format_error_message(111)
    res = re_twitter_error.search(error)
    code = int(res.group(1)) if res else 0
    if code != 500 and str(code).startswith('5') and '"code":' not in error:
        return format_error_message(503)
    message = ""
    try:
        jsonerr = load_json(res.group(2))["errors"]
        if isinstance(jsonerr, list):
            jsonerr = jsonerr[0]
        if isinstance(jsonerr, unicode):
            jsonerr = {"message": jsonerr}
        message = jsonerr["message"][0].upper() + jsonerr["message"][1:] if jsonerr["message"] else ""
        if "code" in jsonerr and jsonerr["code"] in [183,187]:
            code = jsonerr["code"]
        elif code == 403 and "statuses/retweet" in error:
            code = 187
    except:
        if config.DEBUG:
            loggerr("%s: %s" % (code, error))
    if code == 404 and "direct_messages/new" in error or "friendships" in error:
        code = 403
        message = "No twitter account found with this name"
    return format_error_message(code, message)
    def runTest(self):
        compiler = Popen(['python',COMPILER_EXE], stdin = open(self.fn, 'rb'), stdout = PIPE, stderr = STDOUT)
        output = compiler.communicate()[0].decode('latin-1').strip()
        if compiler.returncode == -11: # SIGSEGV
            self.fail('Compiler segfaulted')
        elif compiler.returncode < 0:
            self.fail('Compiler terminated with signal %d'%(compiler.returncode))

        
        seenDirective = False
        for line in open(self.fn, 'r'):
            if not line.startswith('#:'):
                continue
            seenDirective = True
            if line.startswith('#:compile'):
                if compiler.returncode:
                    self.fail('Compile failed: ' + repr(output))
            elif line.startswith('#:error'):
                if not compiler.returncode:
                    self.fail('Compile incorrectly succeeded')
                words = load_json('['+line[7:].strip()+']')
                if not CHECK_ERROR_CONTENTS:
                    if len(words) > 0 and type(words[0]) == int:
                        words = words[:1]
                    else:
                        words = []
                for word in words:
					if sys.version_info[0]+.1*sys.version_info[1] >= 2.7:
						self.assertIn(str(word), output)
					else:
						self.assertTrue(str(word) in output)
            else:
                self.fail('Unrecognized test directive ' + repr(line))
        if not seenDirective:
            self.fail('No test directive')
def geojson_to_polygons(json, original_project, desired_projection):
    """Returns generator of GEOSGeometry polygons"""
    def to_geos(json_dict_polygon):
        """Return GEOSGeometry polygon from polygon dictionary"""
        json_dict_polygon = json_dict_polygon['geometry']
        geo = GEOSGeometry(dump_json(json_dict_polygon))
        geo.set_srid(original_project)
        geo.transform(desired_projection)
        return geo

    if json == '':
        raise ValueError('Json string is empty')

    try:
        json_dict = load_json(json)
    except ValueError:
        raise ValueError('Incorrect json format')

    json_type = json_dict.get('type', 'fail')

    if json_type == 'Feature':
        json_dict_polygons = json_dict
    elif json_type == 'FeatureCollection':
        json_dict_polygons = json_dict.get('features')
    else:
        raise Exception('Incorrect json format')

    return (to_geos(poly) for poly in json_dict_polygons)
Example #12
0
def load_extension(extension_name):
    json_file = open(join(dirname(__file__), "extensions", extension_name,
                          "ext.json"),
                     "r",
                     encoding="utf-8")
    extension_info = load_json(json_file)
    json_file.close()
    globals_env = {
        "run_in_sockethttp": True,
        "sockethttp_version": __version__,
        "request": request,
        "get": get,
        "post": post
    }
    for i in extension_info["extfiles"]:
        globals_env[i] = open(
            join(dirname(__file__), "extensions", extension_name,
                 extension_info['extfiles'][0][0]),
            *extension_info['extfiles'][0][1:],
            **extension_info['extfiles'][1])
    locals_env = {}
    extension_script = open(join(dirname(__file__), "extensions",
                                 extension_name, "ext.py"),
                            "r",
                            encoding="utf-8")
    exec(compile(
        "\n".join([(i if "# DELETELINE" not in i else "")
                   for i in extension_script.readlines()]),
        "<sockethttp extension>", "exec"),
         locals=locals_env,
         globals=globals_env)
    return locals_env['ext'] or globals_env['ext']
Example #13
0
def puzzle():
    # Check user has started
    if not request.cookies.get("data"):
        return render_template("tamperer" + DEV + ".html")

    # Get cookie data
    cookie = get_data_from_cookie()

    # Validate data
    if not verify_data(cookie):
        # Update tamper statistics
        STATISTICS["Tamper Attempts"] += 1

        # Remove data cookie
        resp = make_response(render_template("tamperer" + DEV + ".html"))
        resp.set_cookie("data", "", expires=0)
        return resp

    # Select & return current puzzle's html
    player = UserData.query.filter_by(id=cookie[0]).first()
    data = Puzzles.query.filter_by(id=player.current).first()
    return render_template(
        "puzzle" + DEV + ".html",
        title=data.title,
        prompt=data.prompt,
        number=load_json(player.pages).index(player.current) + 1)
Example #14
0
    def finisher(player, hs=False):
        # Get puzzles
        puzzles = ""
        for i, p in enumerate(load_json(player[1])):
            puzzles += "\t\t" + str(i + 1) + ": " + p + "\n"

        body = "New Finisher on " + format_time("%m-%d-%Y %H:%M:%S") + \
            ":\n\tName: " + STATISTICS["Finishers"][len(STATISTICS["Finishers"]) - 1]["name"] + ",\n" + \
            "\tEmail: " + STATISTICS["Finishers"][len(STATISTICS["Finishers"]) - 1]["email"] + ",\n" + \
            "\tTime: " + str(STATISTICS["Finishers"][len(STATISTICS["Finishers"]) - 1]["time"]) + " seconds\n" + \
            "\tAssigned Puzzles: " + puzzles

        if hs:
            body += "New Highscore! Contact them & give them their reward."

        a = ("api", MG_APIKEY)
        d = {
            "from": FROM,
            "to": TO,
            "subject": "Krantz's Challenge: New Finisher",
            "text": body
        }
        return post("https://api.mailgun.net/v3/" + DOMAIN + "/messages",
                    auth=a,
                    data=d)
    def _split_generators(self, dl_manager):
        xml_urls = []
        total_bytes = 0

        # Re-use the generated status.json
        with open(STATUS_FILE) as fh:
            dump_info = load_json(fh)

        multistream_dump_info = dump_info["jobs"]["articlesmultistreamdump"]

        assert multistream_dump_info["status"] == "done"

        for fname, info in multistream_dump_info["files"].items():
            if ".xml" not in fname:
                continue

            total_bytes += info["size"]
            xml_urls.append(DUMP_URL + fname)

        downloaded_files = dl_manager.download_and_extract({"xml": xml_urls})

        # Max 128MB
        max_bytes = int(ceil(total_bytes / (128 * 2**20)))

        return [
            tfds.core.SplitGenerator(name=tfds.Split.TRAIN,
                                     num_shards=max_bytes,
                                     gen_kwargs={
                                         "filepaths": downloaded_files["xml"],
                                         "language": LANGUAGE
                                     })
        ]
Example #16
0
 def put(self, request, *args, **kwargs):
     # Отличие от POST. POST вызывается со страницы товара. А ПУТ со страницы корзины, когда лишь обновляем цифры.
     raw_data = request.data.get('items')
     if raw_data:
         try:
             data_dict = load_json(raw_data)
         except:
             return JsonResponse({
                 "Status": False,
                 "Error": "Wrong input format"
             })
         else:
             for item in data_dict:
                 order_item = OrderInfo.objects.filter(id=item['id'])
                 if not order_item:
                     return JsonResponse({
                         "Status":
                         False,
                         "Error":
                         "Такого товара нет в корзине."
                     })
                 order_item.update(quantity=item['quantity'])
             return JsonResponse({"Status": True})
     return JsonResponse({
         "Status": False,
         "Error": "Не все аргументы указаны."
     })
Example #17
0
def get_train_data(data_config):
    with open(data_config, 'r') as f:
        sre_data = load_json(f.read())
    data_root = sre_data['ROOT']
    data_loc = sre_data['LOCATION']
    speaker_key = sre_data['SPEAKER_KEY']

    sre04 = make_old_sre_data(data_root, data_loc['SRE04'], 2004, speaker_key)
    sre05_train = make_old_sre_data(data_root, data_loc['SRE05_TRAIN'], 2005, speaker_key)
    sre05_test = make_old_sre_data(data_root, data_loc['SRE05_TEST'], 2005, speaker_key)
    sre06 = make_old_sre_data(data_root, data_loc['SRE06'], 2006, speaker_key)
    sre08 = make_sre08_data(data_root, data_loc['SRE08_TRAIN'], data_loc['SRE08_TEST'])
    sre10 = make_sre10_data(data_root, data_loc['SRE10'])
    sre16 = make_sre16_data(data_root, data_loc['SRE16_EVAL'])
    swbd_c1 = make_swbd_cellular(data_root, data_loc['SWBD_C1'], 1)
    swbd_c2 = make_swbd_cellular(data_root, data_loc['SWBD_C2'], 2)
    swbd_p1 = make_swbd_phase(data_root, data_loc['SWBD_P1'], 1)
    swbd_p2 = make_swbd_phase(data_root, data_loc['SWBD_P2'], 2)
    swbd_p3 = make_swbd_phase(data_root, data_loc['SWBD_P3'], 3)
    mx6_calls = make_mixer6_calls(data_root, data_loc['MX6'])
    mx6_mic = make_mixer6_mic(data_root, data_loc['MX6'])
    train_data = np.hstack([sre04, sre05_train, sre05_test, sre06, sre08, sre10, sre16, swbd_c1, swbd_c2, swbd_p1,
                            swbd_p2, swbd_p3, mx6_calls, mx6_mic]).T
    print('Removing Duplicates...')
    train_data, n_dup = remove_duplicates(train_data)
    print('Removed {} duplicates.'.format(n_dup))
    print('Sorting train data by index...')
    return sort_by_index(train_data)
Example #18
0
def read_capabilities(filename):

    data_file = open(filename, 'r')
    data = load_json(data_file)

    addons = data['addons']
    disclaimers = data['disclaimers']

    ret = {}

    for addon_name, info in addons.iteritems():
        infos = []
        for cap in CAPABILITY_SET:
            status = info[cap].get('status') or ''
            text = info[cap].get('text') or ''
            if status == 'NA':
                continue
            infos.append({
                'function': cap,
                'status': status,
                'detail': text,
                'class': CLASS_MAP[status],
            })
        ret[addon_name] = {
            'capabilities': infos,
            'terms': disclaimers,
        }

    data_file.close()
    return ret
Example #19
0
    def withdraw(self, cointype, address, volume, mark='AUTO', memo=None):
        parms = {
            'coinType': cointype,
            'address': address,
            'quantity': volume,
            'mark': mark
        }
        action = 'withdraw'
        if memo:
            parms['extendParam'] = memo
        ts = str(int(DateTime.now().timestamp() * 1000))
        data = {
            'apiKey': self.__secret.api_key,
            'bizCode': action,
            'msgNo': ts,
            'timestamp': ts,
        }
        data.update(parms)

        data['signature'] = self.__secret.sign(data)

        response = self.session.post(url=URL_WITHDRAW, json=data, timeout=15)
        response = load_json(response.text)

        if response['code'] != '0':
            raise BithumbGlobalError(response['code'], response['msg'])
        return response['data']
Example #20
0
    def variable_verification(self):
        if self.verify_vars is True:
            if len(self.verify_vars_from_files) == 0:
                self.LOGGER.critical(
                    'Verify vars is true but files to verify from is not specified!'
                )
                exit(-4)
            else:
                all_vars = {}
                for f in self.verify_vars_from_files:
                    all_vars.update(load_json(open(f).read()))
                missing_vars = []
                for var in self.config_vars:
                    if var not in all_vars:
                        missing_vars.append(var)
                if len(missing_vars) != 0:
                    self.LOGGER.critical(
                        f'Variables verification failed! Missing vars: "{missing_vars}"'
                    )
                    return False
                else:
                    self.LOGGER.info(f'Variables verification succeeded :)')

                    return True
        return True  # If verify vars is false
Example #21
0
def recursive_set_env_var(triple_tuple_env_var):
    for var, t, child_required_vars in triple_tuple_env_var:
        curvar = getenv(var, None)
        if curvar is None:
            if var not in OPTIONAL_ENV_VARS:
                missing_vars.append(var)
        else:
            if t == bool:
                curvar = curvar.lower() == 'true'
                setattr(config, var, curvar)
                if curvar and child_required_vars is not None:
                    recursive_set_env_var(child_required_vars)
            elif t == list:
                setattr(config, var, curvar.split(sep=';'))
            elif t == dict:
                jsoned_curvar = load_json(curvar.replace('`', ''))
                if child_required_vars is not None:
                    for k in child_required_vars:
                        if k not in jsoned_curvar:
                            if k not in OPTIONAL_ENV_VARS:
                                missing_vars.append(f'{var}->{k}')
                        else:
                            curtype = child_required_vars[k]
                            if curtype == bool:
                                jsoned_curvar[k] = jsoned_curvar[k].lower(
                                ) == 'true'
                setattr(config, var, jsoned_curvar)
            else:
                setattr(config, var, curvar)
Example #22
0
 def get(self,
         resource,
         key='no_key',
         environment='production',
         parser='yaml'):
     """Query Puppet information using REST API and client SSL cert"""
     url = '/'.join((self.endpoint, environment, resource, key))
     request = urllib2.Request(url)
     request.add_header('Accept', parser)
     try:
         try:
             response = self.opener.open(request)
         except urllib2.HTTPError as e:
             raise APIError((str(e) + ": " + url))
         if parser == 'yaml':
             value = load_yaml(response)
         elif parser == 'pson':
             value = load_json(response)
         else:
             value = response.read()
     finally:
         try:
             response.close()
         except NameError:
             # HTTPError would be raised, don't preempt it
             pass
     return value
Example #23
0
def edit_docente(docForm):
    form = DocenteForm(docForm)
    form.is_valid()
    form_values = form.getCleanedData()
    old_values = load_json(docForm['olddoc'])
    old_code = old_values['code']
    try:
        code = form_values['code']
    except KeyError:
        code = None
    try:
        name = form_values['name'] if form_values[
            'name'] is not None else old_values['name']
    except KeyError:
        name = old_values['name']
    try:
        email = form_values['email'] if form_values[
            'email'] is not None else old_values['email']
    except KeyError:
        email = old_values['email']
    old_docente = DocenteData.getDocente(old_code)
    if old_code != code and code is not None:
        old_docente.delete()
        DocenteData.createDocente(codigo=code, nombre=name, correo=email)
    else:
        old_docente.nombre = name
        old_docente.correo = email
        old_docente.save()
    msg = {
        'title': 'Información modificada',
        'content': f'La información del docente ha sido modificada con éxito'
    }
    content = {'showModal': True, 'msg': msg, 'view': 0}
    return content
Example #24
0
    def runTest(self):
        outfn = self.outfn
        with open(self.fn, 'rb') as f:
            compiler = Popen([COMPILER_EXE, outfn], stdin = f, stdout = PIPE, stderr = STDOUT)
            output = compiler.communicate()[0]
            if compiler.returncode == -11: # SIGSEGV
                self.fail('Optimizer segfaulted')
            elif compiler.returncode != 0:
                self.fail('Optimizer terminated with signal %d'%(compiler.returncode))

            f.seek(0)
            origtm = TM(f.read().decode('latin-1'))
            with open(outfn, 'rb') as outf:
                optitm = TM(outf.read().decode('latin-1'))
            f.seek(0)
            for line in f:
                line = line.decode('latin-1')
                if not line.startswith('*:run'):
                    continue
                inp, dm = load_json('['+line[5:].strip()+']')
                origout, origtime = origtm.run(inp, dm)
                optiout, optitime = optitm.run(inp, dm)
                if origout != optiout:
                    print('FAIL: with input ' + str(inp) + ' and data ' + str(dm) + ', original program outputs ' + str(origout) + ' but optimized version outputs ' + str(optiout))
                if optitime > origtime:
                    slower.add((self.fn, tuple(inp), tuple(dm)))
                elif optitime == origtime:
                    notfaster.add((self.fn, tuple(inp), tuple(dm)))
def verify_node(node):
    node_string = dump_json(node)
    variables = None
    secrets = None
    if upload_variables:
        variables_in_conf = regex_findall(r'\$ENV\((\S*?)\)',
                                          node_string)  # Find env vars
        variables: dict = load_json(
            open(git_cloned_dir + sync_root + 'node/' + var_file_path).read())
        for var in variables_in_conf:  # Verify they exist in git repo
            if var not in variables:
                logging.error(
                    f'Missing env var {var} in variables file {var_file_path}')
    if upload_secrets:
        secrets_in_conf = regex_findall(r'\$SECRET\((\S*?)\)',
                                        node_string)  # Find secrets
        vault = Vaulter(vault_url, vault_git_token,
                        vault_mounting_point)  # Create keyvault object
        secrets: dict = vault.get_secrets(
            secrets_in_conf)  # Get the secrets from keyvault
        if vault.verify() is False:  # Verify all secrets exist.
            logging.error(
                f'These secrets do not exist in the vault {vault.get_missing_secrets()}'
            )

    return variables, secrets
Example #26
0
    def __iter__(self):
        """Iterate over lines looking for resources."""
        resource_class = self._resource_class
        resource_args = self._resource_args
        session = self._session
        response = self._response

        for chunk in self._read(response):
            event_data = ""
            for line in chunk.splitlines():
                # Ignore empty lines
                if not line.strip():
                    continue

                data = line.split(self._FIELD_SEPARATOR, 1)
                field = data[0]
                data = data[1]

                if field == 'data':
                    event_data += data

            if not event_data:
                # Don't report on events with no data
                continue

            event_data = load_json(event_data).get('data')
            yield resource_class(event_data, session, **resource_args)
Example #27
0
def owoify(text: str) -> str:
    """ Converts your text to OwO """
    smileys = [';;w;;', '^w^', '>w<', 'UwU', '(・`ω´・)', '(´・ω・`)']
    with open(f"{module_dir}/assets/replacements.json") as fp:
        replacements = load_json(fp)
    for expression in replacements:
        replacement = replacements[expression]
        text = sub(expression, replacement, text, flags=IGNORECASE)
    words = text.split()
    first_letter = words[0][0]
    letter_stutter = f"{first_letter}-{first_letter.lower()}-{first_letter.lower()}"
    if len(words[0]) > 1:
        words[0] = letter_stutter + words[0][1:]
    else:
        words[0] = letter_stutter
    text = " ".join(words)
    text = text.replace('L', 'W').replace('l', 'w')
    text = text.replace('R', 'W').replace('r', 'w')
    text = '! {}'.format(choice(smileys)).join(text.rsplit('!', 1))
    text = '? OwO'.join(text.rsplit('?', 1))
    text = '. {}'.format(choice(smileys)).join(text.rsplit('.', 1))
    text = f"{text} desu"
    for v in ['a', 'o', 'u', 'A', 'O', 'U']:
        if 'n{}'.format(v) in text:
            text = text.replace('n{}'.format(v), 'ny{}'.format(v))
        if 'N{}'.format(v) in text:
            text = text.replace('N{}'.format(v), 'N{}{}'.format('Y' if v.isupper() else 'y', v))
    return text
Example #28
0
def make_sre16_trials_file(sre_config, trials_file):
    with open(sre_config, 'r') as f:
        sre_data = load_json(f.read())
    data_root = sre_data['ROOT']
    data_loc = sre_data['LOCATION']['SRE16_EVAL']
    sre_loc = join_path(data_root, data_loc)

    segment_key = join_path(sre_loc, 'docs/sre16_eval_segment_key.tsv')
    language_key = join_path(sre_loc, 'metadata/calls.tsv')
    trial_key = join_path(sre_loc, 'docs/sre16_eval_trial_key.tsv')

    utt_to_call = dict()
    with open(segment_key, 'r') as f:
        for line in f.readlines()[1:]:
            tokens = re.split('[\s]+', line.strip())
            utt_to_call[tokens[0]] = tokens[1]

    call_to_language = dict()
    with open(language_key, 'r') as f:
        for line in f.readlines()[1:]:
            tokens = re.split('[\s]+', line.strip())
            call_to_language[tokens[0]] = tokens[1]

    trials_list = []
    with open(trial_key, 'r') as f:
        for line in f.readlines()[1:]:
            tokens = re.split('[\s]+', line.strip())
            speaker_id = tokens[0]
            file_name = tokens[1]
            target_type = tokens[3]
            trials_list.append('sre16_eval_enroll_{} sre16_eval_test_{} {}'.format(speaker_id, file_name, target_type))

    with open(trials_file, 'w') as f:
        for trial in trials_list:
            f.write('{}\n'.format(trial))
Example #29
0
    def user_data(self, handler, instance):
        if handler.request.method == "POST":
            data = load_json(handler.request.POST.get('data', "{}"))

            for user in data.get('existing', []):
                state = user.get('state', '')

                if state == 'deleted':
                    try:
                        User.objects.get(pk=user['id']).delete()
                    except (ValueError, User.DoesNotExist):
                        pass
                elif state in ("added", "modified"):
                    try:
                        u = User.objects.get(pk=user['id'])
                        u.username = user.get('username').strip()
                        u.first_name = user.get('firstname', '')
                        u.last_name = user.get('lastname', '')
                        u.email = user.get('email', '')
                        ## active?
                    except (ValueError, User.DoesNotExist):
                        u = User(username=user.get('username').strip(),
                                 first_name=user.get('firstname', ''),
                                 last_name=user.get('lastname', ''),
                                 email=user.get('email', ''))
                        u.save()

                    password = user.get('password', '').strip()
                    if password:
                        u.set_password(password)

                    u.roles.all().delete()

                    for role, isset in user.get('roles', {}).iteritems():
                        if isset:
                            WheelRole(role=Role(role), user=u).save()

                    u.save()

                ## roles


        users = User.objects.all()
        data = {}
        data['existing'] = [dict(id=u.id,
                              username=u.username,
                              firstname=u.first_name,
                              lastname=u.last_name,
                              email=u.email,
                              active=u.is_active,
                              superuser=u.is_superuser,
                              roles=dict((role.role.id, True)
                                         for role in u.roles.all())
                              ) for u in users]
        data['roles'] = [dict(id=role.id,
                              name=role.name,
                              description=role.description)
                         for role in Role.all()]

        return data
Example #30
0
def submit(recaptcha_response_field, private_key, remoteip):

    if not (recaptcha_response_field and len(recaptcha_response_field)):
        return RecaptchaResponse(is_valid=False,
                                 error_code='incorrect-captcha-sol')

    def encode_if_necessary(s):
        if isinstance(s, unicode):
            return s.encode('utf-8')
        return s

    params = urllib.urlencode({
        'secret':
        encode_if_necessary(private_key),
        'remoteip':
        encode_if_necessary(remoteip),
        'response':
        encode_if_necessary(recaptcha_response_field),
    })

    request = urllib2.Request(
        url="https://%s/recaptcha/api/siteverify" % VERIFY_SERVER,
        data=params,
        headers={
            "Content-type": "application/x-www-form-urlencoded",
            "User-agent": "reCAPTCHA Python"
        })

    json_values = load_json(urllib2.urlopen(request))
    if json_values["success"]:
        return RecaptchaResponse(is_valid=True)
    else:
        return RecaptchaResponse(is_valid=False,
                                 error_code=json.dumps(json_values))
Example #31
0
    def post(self, request, *args, **kwargs):
        if not request.user.is_authenticated:
            return JsonResponse({'Status': False, 'Error': 'Log in required'}, status=403)

        items_sting = request.data.get('items')
        if items_sting:
            try:
                items_dict = load_json(items_sting)
            except ValueError:
                JsonResponse({'Status': False, 'Errors': 'Неверный формат запроса'})
            else:
                basket, _ = Order.objects.get_or_create(user_id=request.user.id, state='basket')
                objects_created = 0
                for order_item in items_dict:
                    order_item.update({'order': basket.id})
                    serializer = OrderItemSerializer(data=order_item)
                    if serializer.is_valid():
                        try:
                            serializer.save()
                        except IntegrityError as error:
                            return JsonResponse({'Status': False, 'Errors': str(error)})
                        else:
                            objects_created += 1

                    else:

                        JsonResponse({'Status': False, 'Errors': serializer.errors})

                return JsonResponse({'Status': True, 'Создано объектов': objects_created})
        return JsonResponse({'Status': False, 'Errors': 'Не указаны все необходимые аргументы'})
Example #32
0
def data(request):
    redis = get_redis_connection('cirrus')

    data_id = request.GET.get('uuid', '')
    df = _get_data(data_id)

    key = KEY_DIGEST_PREFIX + data_id
    header = load_json(redis.hget(key, 'header'))

    new_limits = request.GET.get('new_limits', None)
    if new_limits:
        name, limits_lower, limits_upper = new_limits.split('|')
    else:
        name = None
    for row in header:
        if row['name'] == name:
            row['limits'] = [limits_lower, limits_upper]
            redis.hset(key, 'header', dump_json(header))
            
        if row['limits'][0] != '':
            if row['type'].startswith('float'):
                lim = float(row['limits'][0])
            elif row['type'].startswith('int'):
                lim = int(row['limits'][0])
            df = df[df[row['name']] >= lim]
        elif row['limits'][1] != '':
            if row['type'].startswith('float'):
                lim = float(row['limits'][1])
            elif row['type'].startswith('int'):
                lim = int(row['limits'][1])
            df = df[df[row['name']] <= lim]
    
    print (df)
    orders = request.GET.get('order', '')
    orders = [row for row in orders.split(',') if row]
    if orders:
        sort_func = request.GET.get('sort', 'quicksort')
        ascending = [
            row[0] != '-'
            for row in orders
        ]
        by = [
            row[1:] if row[0] == '-' else row
            for row in orders
        ]
        df.sort_values(by=by, ascending=ascending, inplace=True, kind=sort_func)
    
    columns = {}
    for row in df.columns:
        if str(df[row].dtype).startswith('float'):
            columns[row] = [(i if isfinite(i) else None) for i in df[row]]
        else:
            columns[row] = [i for i in df[row]]
    
    return {
        'index': [row for row in df.index],
        'data': columns,
        'order': orders
    }
Example #33
0
 def get_oauth2_token(self):
     res = self.conn.oauth2.token(grant_type="client_credentials")
     obj = load_json(res)
     if "token_type" not in obj or obj[
             "token_type"] != "bearer" or "access_token" not in obj:
         raise Exception("Wrong token type given by twitter, weird : %s" %
                         res)
     return obj["access_token"]
Example #34
0
 def __call__(self, value):
     if type(value) in (str, bytes):
         try:
             return load_json(value)
         except Exception:
             raise ValueError('Incorrectly formatted JSON provided')
     else:
         return value
def load_config():
    global config
    try:
        with open("config.json", "r") as fh:
            raw_data = fh.read()
        config = load_json(raw_data)
    except Exception as e:
        print("Could not open config. Error: " + str(e))
Example #36
0
def search_produto(request):
    data = {'produto': False}
    if request.body:
        rget = load_json(request.body)
        produto = models.Produto.objects.filter(
            nomeproduto=rget.get('produto'))
        data = serialize('json', produto)
        return JsonResponse(data, safe=False)
Example #37
0
File: types.py Project: void4/hug
 def __call__(self, value):
     if type(value) in (str, bytes):
         try:
             return load_json(value)
         except Exception:
             raise ValueError('Incorrectly formatted JSON provided')
     else:
         return value
Example #38
0
def json(value):
    '''Accepts a JSON formatted data structure'''
    if type(value) in (str, bytes):
        try:
            return load_json(value)
        except Exception:
            raise ValueError('Incorrectly formatted JSON provided')
    else:
        return value
Example #39
0
def json(value):
    '''Accepts a JSON formatted data structure'''
    if type(value) in (str, bytes):
        try:
            return load_json(value)
        except Exception:
            raise ValueError('Incorrectly formatted JSON provided')
    else:
        return value
Example #40
0
def check_file(filename):
    # test filename are structured as inXX.vtt,
    # and result filenames are structured as outXX.json
    resultname = "out%s.json" % (filename[2:-4])
    with open(join(DIR, filename)) as infile:
        got = parse(infile, True)
    with open(join(DIR, resultname)) as outfile:
        exp = load_json(outfile)
    assert got == exp, dumps(got, indent=4)
Example #41
0
	def __init__(self, fname):
		data = load_json(open(fname).read())

		topic = Comment(data[0]['data']['children'][0])

		comments = [Comment(c) for c in data[1]['data']['children'] if 'body' in c['data']]

		topic.children = comments

		self.comment_tree = topic
		self.comment_list = flatten_comment_tree(self.comment_tree)
def get_pypi_hash(baseurl):
    logger = logging.getLogger(__name__)
    dist = find_distributions('.').next()
    logger.debug('dist found: {0}'.format(dist))
    targer_url = urljoin(baseurl, 'pypi/{0}/json'.format(dist.project_name))
    logger.debug('opening url: {0}'.format(targer_url))
    response = urlopen(targer_url)
    pypi_json = load_json(response.read())
    md5_digest = pypi_json['releases'][dist.version][0]['md5_digest']
    logger.debug('pypi md5: {0}'.format(md5_digest))
    return md5_digest
Example #43
0
    def diagnostics(self):
        """Return diagnostics string.

        :return: Dictionary with disk parition information
        :rtype: dict
        """
        status = run(['sfdisk', '--json', self.path])
        disk_info = load_json(status.stdout)
        # TBD:
        # - check status
        # - log stderr
        return disk_info
    def handle(self, *args, **options):
        dir_path = options['files_path']

        with open(dir_path) as fp:
            all_data = load_json(fp)

        for i, data in enumerate(all_data):
            self.load_event(data)
            if i % 10 == 0:
                sys.stdout.write('.')
                sys.stdout.flush()
        sys.stdout.write('\nDone\n')
Example #45
0
def build_features(feature_type, file_in):
    '''
    Build a feature object

    :param feature_type: type of the feature to be built
    :param file_in: the input data base
    :return: the feature object containing the input data base
    '''
    features_json = load_json(open(file_in))
    features = Sidewalk(features_json)

    return features
Example #46
0
def content_json(response):
    """
    :type response: HttpResponse
    :rtype: dict
    """
    content = response.content.decode("utf-8")
    if not content:
        return {}

    try:
        return load_json(content)
    except ValueError:
        raise ValueError('invalid json content in response')
Example #47
0
    def get_user_data(self, cookies):
        API_KEY = str(settings.FB_API_KEY)
        fbs_cookie = cookies['fbs_%s' % API_KEY]
        parsed_fbs = parse_qs(smart_unicode(fbs_cookie))

        # Communicate with the access token to the Facebook oauth interface.
        json = load_json(urlopen('https://graph.facebook.com/me?access_token=%s' % parsed_fbs['access_token'][0]))

        # Return the user data.
        return {
            'username': '******' % (smart_unicode(json['first_name']), smart_unicode(json['last_name'])),
            'email': smart_unicode(json['email']),
        }
Example #48
0
    def from_http_request(cls, http_request):
        """
        :type http_request: HttpRequest
        :rtype: JsonRequest
        :raises ValueError: if the request body is not valid JSON

        """
        try:
            json_data = load_json(http_request.body.decode("utf-8"))
        except ValueError:
            raise ValueError('incorrect json request')

        return cls(http_request, json_data)
Example #49
0
 def login(self, username, password):
     params = {
         'client_id': CLIENT_ID,
         'client_secret': CLIENT_SECRET,
         'grant_type': 'password',
         'username': username,
         'password': password
     }
     path = quote('/api/users/{}/login/'.format(username))
     response = self.client.post(path, params)
     self.assertEqual(response.status_code, status.HTTP_200_OK)
     token = load_json(response.content.decode())['access_token']
     self.set_token(token)
Example #50
0
def get_user_data(cookies):
    request_data = {
        'method': 'Users.getInfo',
        'api_key': settings.FB_API_KEY, 
        'call_id': time(), 
        'v': '1.0', 
        'uids': cookies[settings.FB_API_KEY + '_user'], 
        'fields': 'name,first_name,last_name',
        'format': 'json',
    }
    
    request_data['sig'] = generate_sig(request_data)
    fb_response = urlopen(REST_SERVER, urlencode(request_data))
    #print(fb_response)
    return load_json(fb_response)[0]
Example #51
0
    async def __anext__(self):
        """Iterate over lines looking for resources."""
        resource_class = self._resource_class
        resource_args = self._resource_args
        session = self._session
        response = self._response

        async for line in response.content:
            line = line.decode('utf-8')
            if len(line) == 0 and response.at_eof:
                raise StopAsyncIteration

            if len(line.strip()) > 0:
                field, data = line.split(self._FIELD_SEPARATOR, 1)
                if field.strip() == 'data':
                    json = load_json(data).get('data')
                    return resource_class(json, session, **resource_args)
Example #52
0
def go( input_file_name,
        sampling_number,
        eigenvalue_zero_threshold,
        interesting_signature_parameter,
        periodicity_selection_parameter,
        caution ):

    with open(input_file_name) as f:
        data = load_json(f)

    input_parsing_start_time = process_time()

    indeterminates = symbols(data["indeterminates"])

    e_mat = [[parse_expr(s) for s in row] for row in data["matrix"]]

    # TODO: use `logging` module instead of printing to `stderr`:
    #
    #     https://docs.python.org/3/library/logging.html
    #
    print( "Input parsing took {:.3g}s."
           .format(process_time() - input_parsing_start_time),
           file = stderr )

    initialization_start_time = process_time()

    matrix_sampler = make_matrix_sampler( e_mat,
                                          indeterminates,
                                          sampling_number )

    sample_index_iterator_maker = make_sample_index_iterator_maker(
        len(indeterminates),
        sampling_number,
        periodicity_selection_parameter
    )

    # TODO: use `logging` module instead of printing to `stderr`
    print( "Initialization took {:.3g}s."
           .format(process_time() - initialization_start_time),
           file = stderr )

    process_data( matrix_sampler,
                  sample_index_iterator_maker,
                  eigenvalue_zero_threshold,
                  interesting_signature_parameter,
                  caution = caution )
Example #53
0
def build_features(feature_type, file_in):
    '''
    Build a feature object

    :param feature_type: type of the feature to be built
    :param file_in: the input data base
    :return: the feature object containing the input data base
    '''
    features = None
    features_json = load_json(open(file_in))

    if feature_type == 'sidewalks':
        features = Sidewalk(features_json)
    if feature_type == 'curbramps':
        features = CurbRamp(features_json)
    if feature_type == 'crossings':
        features = Crossing(features_json)
    return features
Example #54
0
    def get_user_data(self, key):
        request_data = {
            'method': 'Users.getInfo',
            'api_key': settings.FB_API_KEY,
            'call_id': time(),
            'v': '1.0',
            'uids': key,
            'fields': 'name,first_name,last_name,email',
            'format': 'json',
        }

        request_data['sig'] = self.generate_signature(request_data)
        fb_response = load_json(urlopen(REST_SERVER, urlencode(request_data)))[0]

        return {
            'username': fb_response['first_name'] + ' ' + fb_response['last_name'],
            'email': fb_response['email']
        }
def get_userIds(hostname, token):
    '''Get the list of profiles to send the notification to.

:param str hostname: The name of the host to use.
:param str token: The token to use for authentication.
:raises NotOk: When the page does not return an HTTP 200 status code.
:returns: A list of strings
:rtype: list'''
    fields = {'token': token, 'get': '', }
    status, reason, data = post_multipart(hostname, PROFILE_URI,
                                          fields)
    if status != HTTP_OK:
        m = '{reason} ({status} <{host}>)'
        msg = m.format(reason=reason, status=status, host=hostname)
        raise NotOk(msg)

    retval = load_json(data)
    return retval
def send_status(hostname, userId, token):
    '''Send a profile-status notification for a particular person

:param str hostname: The name of the host to use.
:param str userId: The identifier for the person.
:param str token: The token to use for authentication.
:raises NotOk: When the page does not return an HTTP 200 status code.'''
    fields = {
        'profileId': userId,
        'token': token,
        'send': 'Send'}
    status, reason, data = post_multipart(hostname, SEND_STATUS_URI,
                                          fields)
    if status != HTTP_OK:
        m = '{reason} ({status} <{host}>)'
        msg = m.format(reason=reason, status=status, host=hostname)
        raise NotOk(msg)
    retval = load_json(data)
    return retval
def get_digest_groups(hostname, token):
    '''Get the list of groups to send the digest to.

:param str hostname: The name of the host to use.
:param str token: The token to use for authentication.
:raises NotOk: When the page does not return an HTTP 200 status code.
:returns: A list of 2-tuples ``(siteId, groupId)``, sorted alphabetically
:rtype: tuple'''
    fields = {'token': token, 'get': '', }
    status, reason, data = post_multipart(hostname, DIGEST_GROUPS_URI,
                                          fields)
    if status != HTTP_OK:
        m = '{reason} ({status} <{host}>)'
        msg = m.format(reason=reason, status=status, host=hostname)
        raise NotOk(msg)

    retval = load_json(data)
    retval.sort(key=itemgetter(0, 1))  # Nicer when sorted by site & group
    return retval
Example #58
0
def do_fetch_nodemap(fqdn):
    """
    Fetches an existing nodemap from a remote esgf node

    fqdn
      Fully-qualified domain name of the server.  Rest of url assumed for Node Manager service
    """
    arr = []

    try:

        f = open ('/esg/config/esgf_supernodes_list.json')
        arr = load_json(f.read())
    except:
        pass
    
    data = None

    for host in arr:

        
        data_str = ""

        if host != fqdn:

            url = "https://" + host + "/esgf-nm/api?action=sync_node_map_file"

            try:
                resp = requests.get(url, verify=False)
                data_str = resp.text
                data = json.loads(data_str)

            except Exception as e:
                print "An Error occurred:", str(e)

            
            if not data is None and len(data_str) > 10:
                write_json_file(MAP_FN, str(data_str)) 
                return True
            
        
    print "Could not retrieve the node map from another site.  Will generate a fresh one.  Advised that you attempt to retrieve the map in order to have a complete view of member nodes in the federation." 
    return False
Example #59
0
    def get_user_data(self, access_token):
        profile = load_json(urlopen("https://graph.facebook.com/me?" + urlencode(dict(access_token=access_token))))

        name = smart_unicode(profile.get("name", ""));
        # If the name is longer than 30 characters - leave it blank
        if len(name) > 30:
            name = ''

        # Check whether the length if the email is greater than 75, if it is -- just replace the email
        # with a blank string variable, otherwise we're going to have trouble with the Django model.
        email = smart_unicode(profile.get('email', ''))
        if len(email) > 75:
            email = ''

        # Return the user data.
        return {
            'id' : profile['id'],
            'username': name,
            'email': email,
        }