Ejemplo n.º 1
0
 def get_jsn_description(self):
     json_cart = self.cart.get_jsn_description()
     json_purchase_history = self.get_jsn_purchase_history()
     return jsons.dumps({
         'username': self.username,
         'cart': json_cart,
         'purchase_history': json_purchase_history
     })
Ejemplo n.º 2
0
    def save(self, universe):
        """Write a universe to disk."""

        json = jsons.dumps(universe, strip_properties=True)
        path = self._get_path(universe.id_)

        with open(path, "w") as file:
            file.write(json)
Ejemplo n.º 3
0
    def submitLocation(self, data):
        lbl = {'device_id': self.device_id, **promlabels}
        trackertimegauge.labels(**lbl).set(int(time.time()))

        update = {'device_id': self.device_id}

        if data.data.gps:
            # FIXME: something something with the data.data.gps.cs.latitude, data.data.gps.cs.longitude flags
            update['lat'] = data.data.gps.latitude
            update['lng'] = data.data.gps.longitude

        print(jsons.dumps(update))
        resp = requests.post(ENDPOINT,
                             headers=headers,
                             data=jsons.dumps(update))
        print(resp)
        print(resp.text)
Ejemplo n.º 4
0
def redis_sslyze_scan_domains_to_json(domains_json: str) -> str:
    logger.debug(f"config.SensorCollector.SEND_RESULTS_OVER_HTTP: {SensorCollector.SEND_RESULTS_OVER_HTTP}\n"
         f"os.environ.get('SENSOR_COLLECTOR_SEND_RESULTS_OVER_HTTP') {os.environ.get('SENSOR_COLLECTOR_SEND_RESULTS_OVER_HTTP')}")
    twe = object_models.load_json_to_targets_with_extra(domains_json)
    list_of_results_as_json = sslyze_scanner.scan_domains_to_arr_of_dicts(twe)
    answer = {'results_attached': True, 'results': list_of_results_as_json}
    app.actions.sensor_collector.sslyze_save_scan_results(answer)
    return jsons.dumps(answer)
Ejemplo n.º 5
0
 def get_stores_description(self):
     stores_description = {}  # {store_name: [store_details]}
     for store in self.stores_manager.stores.values():
         stores_description[store.name] = store
     return jsons.dumps({
         'ans': True,
         'stores_description': stores_description
     })
Ejemplo n.º 6
0
def test_marshal_unmarshal() -> None:
    foo = Foo("12")
    name = type(foo).__name__
    clazz = globals()[name]
    js = jsons.dumps(foo)
    again = jsons.loads(js, cls=clazz)
    assert DeepDiff(foo, again, truncate_datetime="second") == {}
    assert 4 == 4
Ejemplo n.º 7
0
 def dataset_reorder(self, dataset: str, resource_id: str):
     try:
         self.client.post(self.dataset_reorder_url,
                          headers=self.headers,
                          data=jsons.dumps(
                              dict(id=dataset, order=[resource_id])))
     except HTTPError:
         logger.error("error occurred getting resources to sort")
         return False
Ejemplo n.º 8
0
 def asJson(self,result):
     '''convert the given result to JSON '''
     events=[]
     for title in result:
         events.extend(title.events)
     jsonResult={"count": len(events), "events": events}
     jsons.suppress_warnings()
     jsonText=jsons.dumps(jsonResult,indent=4,sort_keys=True)
     return jsonText
Ejemplo n.º 9
0
 def get_basket_description(self, permitted_user: str, product_tup_list):
     basket_dict = {}
     for product_tup in product_tup_list:
         basket_dict[product_tup[0].name] = {
             "amount": product_tup[1],
             "price_after_disc": product_tup[2],
             "original_price": product_tup[3]
         }  # [product_tup[1], product_tup[2], product_tup[3]]
     return jsons.dumps(basket_dict)
Ejemplo n.º 10
0
    def SendOutput(self, commands):
        """ Send final robot positions to output """
        output = ""

        for command in commands:
            output += jsons.dumps(command) + os.linesep

        output = output.replace("Bearing.", "").lower()
        print(output)
Ejemplo n.º 11
0
    def test_poll(self):
        """
        Tests correct message polls
        """
        httpretty.register_uri(httpretty.GET,
                               self.sources_url,
                               body=jsons.dumps([REST_SOURCE]),
                               match_querystring=False)

        httpretty.register_uri(
            httpretty.GET,
            f"{self.sources_url}/{SENSORS[0].tdmq_id}/timeseries",
            body=jsons.dumps(REST_TIME_SERIES),
            match_querystring=False)

        consumer = TDMQConsumer(self.client)
        records = consumer.poll(SENSORS[0].type)
        self.assertEqual(len(records), 1)
Ejemplo n.º 12
0
    def login_request(self, username: str, password: str):
        logger.debug(f"Will login using username '{username[5:]}...'")
        digest_username = self.encryption.sha_digest_username(username)
        logger.debug(f"Username digest: ...{digest_username[:5]}")

        login_device_params = LoginDeviceParams()
        login_device_params.set_password(
            helpers.mime_encoder(password.encode("UTF-8")))
        login_device_params.set_username(
            helpers.mime_encoder(digest_username.encode("UTF-8")))

        l_ldp = jsons.dumps(login_device_params).replace(
            helpers.mime_encoder(password.encode("UTF-8")), "PASSWORD_REMOVED")
        logger.debug(f"Login device params: {l_ldp}")

        login_device_method = LoginDeviceMethod(login_device_params)
        l_ldm = jsons.dumps(login_device_method).replace(
            helpers.mime_encoder(password.encode("UTF-8")), "PASSWORD_REMOVED")
        logger.debug(f"Login device method: {l_ldm}")

        ldm_encrypted = self.tp_link_cipher.encrypt(
            jsons.dumps(login_device_method))
        logger.debug(f"Login device method encrypted: {ldm_encrypted}")

        secure_passthrough_method = SecurePassthroughMethod(ldm_encrypted)
        logger.debug(
            f"Secure passthrough method: {jsons.dumps(secure_passthrough_method)}"
        )
        request_body = jsons.loads(jsons.dumps(secure_passthrough_method))
        logger.debug(f"Request body: {request_body}")

        response = Http.make_post_cookie(self.url, request_body,
                                         {'TP_SESSIONID': self.cookie_token})
        resp_dict: dict = response.json()
        logger.debug(f"Device responded with: {resp_dict}")

        self.__validate_response(resp_dict)

        decrypted_inner_response = jsons.loads(
            self.tp_link_cipher.decrypt(resp_dict['result']['response']))

        logger.debug(f"Device inner response: {decrypted_inner_response}")

        self.token = decrypted_inner_response['result']['token']
Ejemplo n.º 13
0
    def test_get_sources_count(self):
        """
        Tests source count
        """
        client = Client(self.url)
        httpretty.register_uri(httpretty.GET, client.sources_url,
                               body=jsons.dumps([REST_SOURCE]), match_querystring=False)

        res = client.sources_count()
        self.assertEqual(res, 1)
Ejemplo n.º 14
0
    def test_no_time_series(self):
        """
        Tests that, in case the server returns no time series, the consumer returns an empty list
        """
        httpretty.register_uri(httpretty.GET,
                               self.sources_url,
                               body=jsons.dumps([REST_SOURCE]),
                               match_querystring=False)

        httpretty.register_uri(
            httpretty.GET,
            f"{self.sources_url}/{SENSORS[0].tdmq_id}/timeseries",
            body=jsons.dumps({"coords": {
                "time": []
            }}))

        consumer = TDMQConsumer(self.client)
        records = consumer.poll(SENSORS[0].type)
        self.assertEqual(len(records), 0)
Ejemplo n.º 15
0
    def saveAs(self):
        name = QFileDialog.getSaveFileName(self, 'Save File')[0]

        f = open(name, 'w')

        with f:
            data = {"version": 1, "verts": self.verts}
            res = jsons.dumps(data)
            f.write(res)
            print("Saved data to {}!".format(name))
Ejemplo n.º 16
0
    def handleHeartbeat(self, data):
        self.serial += 1
        resp = self.packet.build(
            dict(start=b"\x78\x78",
                 fields=dict(value=dict(length=1 + 2 + 2,
                                        protocol=0x23,
                                        data=bytes(),
                                        serial=self.serial))))
        self.write(resp)

        update = {
            'device_id': self.device_id,
            'battery_voltage': data.data.voltage
        }
        print(jsons.dumps(update))
        resp = requests.post(ENDPOINT,
                             headers=headers,
                             data=jsons.dumps(update))
        print(resp)
Ejemplo n.º 17
0
def run(dataset: str, preprocessing_params: str, bpe_base_repr: Optional[str],
        bpe_n_merges: Optional[int], splitting_file: Optional[str]):
    path_to_dataset = os.path.join(DEFAULT_PARSED_DATASETS_DIR, args.dataset)
    full_src_dir = os.path.join(path_to_dataset, PARSED_DIR)

    if not os.path.exists(full_src_dir):
        logger.error(f"Dir does not exist: {full_src_dir}")
        exit(3)
    logger.info(f"Reading parsed files from: {os.path.abspath(full_src_dir)}")

    preprocessing_params = PrepConfig.from_encoded_string(preprocessing_params)
    init_splitting_config(dataset, preprocessing_params, bpe_base_repr,
                          bpe_n_merges, splitting_file)

    repr = str(preprocessing_params)

    full_dest_dir = os.path.join(path_to_dataset, REPR_EXTENSION, repr)
    full_metadata_dir = os.path.join(path_to_dataset, METADATA_DIR, repr)
    logger.info(
        f"Writing preprocessed files to {os.path.abspath(full_dest_dir)}")
    if not os.path.exists(full_dest_dir):
        os.makedirs(full_dest_dir)
    if not os.path.exists(full_metadata_dir):
        os.makedirs(full_metadata_dir)

    with open(os.path.join(full_dest_dir, 'preprocessing_types.json'),
              "w") as f:
        json_str = jsons.dumps(preprocessing_params)
        f.write(json_str)

    params = []
    for root, dirs, files in os.walk(full_src_dir):
        for file in files:
            if file.endswith(f".{PARSED_FILE_EXTENSION}"):

                full_dest_dir_with_sub_dir = os.path.join(
                    full_dest_dir, os.path.relpath(root, full_src_dir))
                if not os.path.exists(full_dest_dir_with_sub_dir):
                    os.makedirs(full_dest_dir_with_sub_dir)
                params.append((os.path.join(root, file),
                               os.path.join(full_dest_dir_with_sub_dir,
                                            file), preprocessing_params))
    files_total = len(params)
    current_file = 0
    start_time = time.time()
    with Pool() as pool:
        it = pool.imap_unordered(preprocess_and_write, params)
        for _ in it:
            current_file += 1
            logger.info(f"Processed {current_file} out of {files_total}")
            time_elapsed = time.time() - start_time
            logger.info(
                f"Time elapsed: {time_elapsed:.2f} s, estimated time until completion: "
                f"{time_elapsed / current_file * files_total - time_elapsed:.2f} s"
            )
Ejemplo n.º 18
0
    async def post(self):
        checker = self.settings['checker']
        scoped_logger = self.settings['logger']
        try:
            collection: MotorCollection = self.settings['mongo']['checker_storage']
            checker_task = jsons.loads(self.request.body, CheckerTaskMessage)

            # create LoggerAdapter
            extra = { 'checker_task': checker_task, 'checker': checker }
            scoped_logger = logging.LoggerAdapter(scoped_logger, extra=extra)
            scoped_logger.info("Received task (id={}, teamid={}, method={}, index={})".format(checker_task.runId, checker_task.teamId, checker_task.method, checker_task.flagIndex))

            # call method
            if checker_task.method == CheckerTaskType.CHECKER_TASK_TYPE_PUTFLAG.value:
                await checker.putflag(scoped_logger, checker_task, collection)
            elif checker_task.method == CheckerTaskType.CHECKER_TASK_TYPE_GETFLAG.value:
                await checker.getflag(scoped_logger, checker_task, collection)
            elif checker_task.method == CheckerTaskType.CHECKER_TASK_TYPE_PUTNOISE.value:
                await checker.putnoise(scoped_logger, checker_task, collection)
            elif checker_task.method == CheckerTaskType.CHECKER_TASK_TYPE_GETNOISE.value:
                await checker.getnoise(scoped_logger, checker_task, collection)
            elif checker_task.method == CheckerTaskType.CHECKER_TASK_TYPE_HAVOC.value:
                await checker.havoc(scoped_logger, checker_task, collection)
            else:
                raise Exception("Unknown rpc method {}".format(checker_task.method))
            scoped_logger.info("Task finished OK (id={}, teamid={}, method={}, index={})".format(checker_task.runId, checker_task.teamId, checker_task.method, checker_task.flagIndex))
            self.write(jsons.dumps(CheckerResultMessage(CheckerTaskResult.CHECKER_TASK_RESULT_OK.value)))
        except OfflineException as ex:
            stacktrace = ''.join(traceback.format_exception(None, ex, ex.__traceback__))
            scoped_logger.warn("Task finished DOWN: {}".format(stacktrace))
            self.write( jsons.dumps(CheckerResultMessage(CheckerTaskResult.CHECKER_TASK_RESULT_DOWN.value)))
            return
        except BrokenServiceException as ex:
            stacktrace = ''.join(traceback.format_exception(None, ex, ex.__traceback__))
            scoped_logger.warn("Task finished MUMBLE: {}".format(stacktrace))
            self.write(jsons.dumps(CheckerResultMessage(CheckerTaskResult.CHECKER_TASK_RESULT_MUMBLE.value)))
            return
        except Exception as ex:
            stacktrace = ''.join(traceback.format_exception(None, ex, ex.__traceback__))
            scoped_logger.error("Task finished INTERNAL_ERROR: {}".format(stacktrace))
            self.write(jsons.dumps(CheckerResultMessage(CheckerTaskResult.CHECKER_TASK_RESULT_INTERNAL_ERROR.value)))
            return
Ejemplo n.º 19
0
def test_demotywatory_parse(files, snapshot, monkeypatch):
    def fake_download(url):
        f = "demot-{}.html".format(utils.get_last_part_url(url))
        if f in files:
            return files[f]
        raise Exception()

    monkeypatch.setattr("parsers.demoty.download", fake_download)

    snapshot.assert_match(jsons.dumps(demoty.parse(
        files["demotywatory.html"])))
Ejemplo n.º 20
0
def count_ballot():
    req_data = request.get_json()
    ballot_number = req_data['ballot_number']
    chosen_candidate_id = req_data['chosen_candidate_id']
    voter_comments = req_data['voter_comments']
    voter_national_id = req_data['voter_national_id']

    ballot = Ballot(ballot_number, chosen_candidate_id, voter_comments)
    result = balloting.count_ballot(ballot, voter_national_id)
    return {"status": jsons.dumps(result.value)}, \
        status.HTTP_202_ACCEPTED if result == BallotStatus.BALLOT_COUNTED else status.HTTP_409_CONFLICT
Ejemplo n.º 21
0
    def test_create_sources(self):
        """
        Tests correct client answer
        """
        expected_response = [s.id_ for s in SENSORS]

        client = Client(self.url)
        httpretty.register_uri(httpretty.POST, client.sources_url, body=jsons.dumps(expected_response))

        res = client.create_sources(SENSORS)
        self.assertEqual(res, expected_response)
Ejemplo n.º 22
0
    def test_create_entity_types(self):
        """
        Tests correct client answer
        """
        expected_response = [s.name for s in SENSORS_TYPE]

        client = Client(self.url)
        httpretty.register_uri(httpretty.POST, client.entity_types_url, body=jsons.dumps(expected_response))

        res = client.create_entity_types(SENSORS_TYPE)
        self.assertEqual(res, expected_response)
Ejemplo n.º 23
0
    def annotate(self, dataset):
        filename_json = os.path.join(
            self.path_annotated,
            os.path.splitext(dataset["filename_relative"])[0] + '.json')

        dataset_annotated = None
        do_annotate = True
        if self.skip_if_json_exists and os.path.isfile(filename_json):
            t1 = pathlib.Path(dataset["filename"]).stat().st_mtime
            t2 = pathlib.Path(filename_json).stat().st_mtime

            if t1 <= t2:
                logging.debug("using cached result: " + filename_json)
                with open(filename_json, 'r') as infile:
                    dataset_annotated = jsons.loads(infile.read())
                do_annotate = False
            else:
                logging.debug("cached result is outdated")

        if do_annotate:
            filename_log = os.path.join(
                self.path_annotated,
                os.path.splitext(dataset["filename_relative"])[0] + '.log')

            os.makedirs(os.path.dirname(filename_log), exist_ok=True)
            with open(filename_log, "w") as log:
                try:
                    measurements, statistics = self.importer.read(
                        dataset["filename"],
                        user_id=dataset["user_id"],
                        dataset_id=dataset["filename_relative"],
                        log=log)
                    measurements = self.annotator.annotate(measurements)

                    measurements = self.measurement_filter.filter(measurements,
                                                                  log=log)

                    dataset_annotated = {
                        "measurements": measurements,
                        "statistics": statistics
                    }
                    # write out
                    os.makedirs(os.path.dirname(filename_json), exist_ok=True)
                    with open(filename_json, 'w') as outfile:
                        outfile.write(jsons.dumps(dataset_annotated))

                except ValueError as e:
                    print("FAILED: " + str(e))
                    dataset_annotated = None
                except IOError as e:
                    print("FAILED: " + str(e))
                    dataset_annotated = None

        return dataset_annotated
Ejemplo n.º 24
0
 def publish(self, event_name: str, msg: Any) -> Any:
     exchange = self.event_map.get_exchange_name(event_name)
     queue = self.event_map.get_queue_name(event_name)
     body = jsons.dumps(msg)
     ch = self.connection.channel()
     ch.exchange_declare(exchange=exchange, exchange_type='fanout', durable=True)
     log_info('PUBLISH EVENT', exchange=exchange, routing_key=queue, body=body)
     ch.basic_publish(
         exchange=exchange,
         routing_key=queue,
         body=body.encode()
     )
Ejemplo n.º 25
0
 def write_message(self, message):
     if self.output:
         print(f'  KataGo::write_message() called...')
     if not self._ready:
         raise Exception('KataGo is not ready!  Learn some damn patience.')
     command = jsons.dumps(message, strip_nulls=True) + os.linesep
     # print('??', command)
     encoded = command.encode('utf-8')
     self._process.stdin.write(encoded)
     self._process.stdin.flush()
     if self.output:
         print(f'  Passed message to KataGo: {encoded}')
Ejemplo n.º 26
0
    def test_dumps_with_class_method(self):
        class A:
            def __init__(self):
                self.name = 'A'

            @classmethod
            def my_method(cls):
                pass

        sdumped = jsons.dumps(A())
        s = json.dumps({'name': 'A'})
        self.assertEqual(eval(s), eval(sdumped))
Ejemplo n.º 27
0
def api_get_user_profile():
    user_id = authentication_utils.get_user_id_from_jwt_or_exception()

    res: db_models.User = db_models.db.session \
        .query(db_models.User) \
        .get(user_id)

    return jsons.dumps({
        "username": res.username,
        "main_api_key": res.main_api_key,
        "email": res.email
    }), 200
Ejemplo n.º 28
0
 def to_json(self, strip_privates: bool = True) -> str:
     """
     Serialize to json
     :param strip_privates: strip private variables
     :return: the json representation of this object
     """
     try:
         return cast(
             str,
             dumps(self, strip_privates=strip_privates, strip_nulls=True))
     except JsonsError:
         return JSON_PARSE_ERROR
Ejemplo n.º 29
0
def scrap(url):
    response = urlopen(url)
    bsObj = bsObj = soup.BeautifulSoup(response.read(), 'lxml')
    model = Model()
    model.language1 = "polski"
    model.language2 = "angielski"
    file_name = create_file_name(bsObj.head.title.getText())
    with io.open(file_name, 'w+', encoding='utf-8') as openedFile:
        for tableRow in bsObj.find_all('tr'):
            word = get_word_and_save_photo(tableRow, file_name)
            model.words.append(word)
        openedFile.write(jsons.dumps(model))
Ejemplo n.º 30
0
 def get_jsn_description(self):
     json_cart = self.cart.get_jsn_description()
     json_purchase_history = self.get_jsn_purchase_history()
     json_managed_stores = self.get_json_managed_stores()
     json_notifications = self.get_json_notifications()
     return jsons.dumps({
         'username': self.username,
         'cart': json_cart,
         'purchase_history': json_purchase_history,
         'managed_stores': json_managed_stores,
         'notifications': json_notifications
     })