def test_do_get_alarm_definitions(self):
        res = mock.Mock()
        req = mock.Mock()

        req_result = mock.Mock()

        req_result.json.return_value = json.loads(response_str)
        req_result.status_code = 200

        with mock.patch.object(requests, 'get', return_value=req_result):
            self.dispatcher_get.do_get_alarm_definitions(
                req, res, id="72df5ccb-ec6a-4bb4-a15c-939467ccdde0")

        # test that the response code is 200
        self.assertEqual(res.status, getattr(falcon, 'HTTP_200'))
        obj = json.loads(res.body)
        self.assertEqual(obj[0]['id'],
                         "72df5ccb-ec6a-4bb4-a15c-939467ccdde0")
        self.assertEqual(obj[0]['name'], "CPU usage test")
        self.assertEqual(obj[0]['alarm_actions'],
                         "c60ec47e-5038-4bf1-9f95-4046c6e9a719")
        self.assertEqual(obj[0]['undetermined_actions'],
                         "c60ec47e-5038-4bf1-9t95-4046c6e9a759")
        self.assertEqual(obj[0]['ok_actions'],
                         "c60ec47e-5038-4bf1-9f95-4046cte9a759")
        self.assertEqual(obj[0]['match_by'], "hostname")
        self.assertEqual(obj[0]['severity'], "LOW")
        self.assertEqual(obj[0]['expression'],
                         "max(cpu.usage{os=linux},600)>15")
        self.assertEqual(obj[0]['description'], "Max CPU 15")
        self.assertEqual(len(obj), 1)
Beispiel #2
0
    def setUp(self):
        self.CONF = self.useFixture(fixture_config.Config()).conf
        self.CONF.set_override('doc_type', 'fake', group='alarms')
        self.CONF.set_override('uri', 'fake_es_uri', group='es_conn')
        super(TestAlarmDispatcher, self).setUp()

        self.dispatcher_get = (
            alarms.AlarmDispatcher({}))

        self.dispatcher_get_by_id = (
            alarms.AlarmDispatcher({}))

        self.dispatcher_put = (
            alarms.AlarmDispatcher({}))

        self.dispatcher_delete = (
            alarms.AlarmDispatcher({}))

        dir_path = os.path.dirname(os.path.realpath(__file__))
        alarms_data_json = open(os.path.join(dir_path,
                                             'test_alarms_data')
                                ).read().replace('\n', '')
        self.data = json.loads(alarms_data_json)
        get_alarms_data = open(os.path.join(dir_path,
                                            'test_get_alarms_data')
                               ).read().replace('\n', '')
        self.get_alarms_data = json.loads(get_alarms_data)
Beispiel #3
0
    def testEncodeUnicodeBMP(self):
        s = u'\U0001f42e\U0001f42e\U0001F42D\U0001F42D' # 🐮🐮🐭🐭
        encoded = ujson.dumps(s)
        encoded_json = json.dumps(s)
		
        if len(s) == 4:
            self.assertEqual(len(encoded), len(s) * 12 + 2)
        else:
            self.assertEqual(len(encoded), len(s) * 6 + 2) 
          
        self.assertEqual(encoded, encoded_json)
        decoded = ujson.loads(encoded)
        self.assertEqual(s, decoded)

        # ujson outputs an UTF-8 encoded str object
        if PY3:
            encoded = ujson.dumps(s, ensure_ascii=False)
        else:
            encoded = ujson.dumps(s, ensure_ascii=False).decode("utf-8")

        # json outputs an unicode object
        encoded_json = json.dumps(s, ensure_ascii=False)
        self.assertEqual(len(encoded), len(s) + 2) # original length + quotes
        self.assertEqual(encoded, encoded_json)
        decoded = ujson.loads(encoded)
        self.assertEqual(s, decoded)
def main(j, args, params, tags, tasklet):
    doc = args.doc
    nid = args.getTag('nid')
    nidstr = str(nid)
    rediscl = j.clients.redis.getByInstance('system')

    out = list()

    out.append('||Port||Status||Memory Used||')

    rstatus = rediscl.hget('healthcheck:monitoring', 'results')
    errors = rediscl.hget('healthcheck:monitoring', 'errors')
    rstatus = ujson.loads(rstatus) if rstatus else dict()
    errors = ujson.loads(errors) if errors else dict()

    for data in [rstatus, errors]:
        if nidstr in data:
            if 'redis' in data.get(nidstr, dict()):
                rnstatus = data[nidstr].get('redis', dict())
                for stat in rnstatus:
                    if 'state' not in stat:
                        continue
                    state = j.core.grid.healthchecker.getWikiStatus(stat.get('state', 'UNKNOWN'))
                    usage = "%s / %s" % (stat.get('memory_usage', ''), stat.get('memory_max', ''))
                    out.append('|%s|%s|%s|' % (stat.get('port', -1), state, usage))

    out = '\n'.join(out)

    params.result = (out, doc)
    return params
Beispiel #5
0
 def test_decodeArrayFaultyUnicode(self):
     try:
         ujson.loads('[18446744073709551616]')
     except ValueError:
         pass
     else:
         assert False, "expected ValueError"
Beispiel #6
0
 def test_decodeArrayWithBigInt(self):
     try:
         ujson.loads('[18446098363113800555]')
     except ValueError:
         pass
     else:
         assert False, "expected ValueError"
Beispiel #7
0
 def test_ReadBadObjectSyntax(self):
     try:
         ujson.loads('{"age", 44}')
     except ValueError:
         pass
     else:
         assert False, "expected ValueError"
Beispiel #8
0
 def file_reader(self, options_filename=None, json_string=None):
     """
     Read arbitrary input file(s) or standard Python str. When passing file_reader() a 
     JSON string, assign it to the json_string arg. Yields a tuple of (line number, record).
     """
     line_number = 0
     if json_string is not None: 
         hook = self.string_hook 
         options_filename = json_string 
     else:
         hook = fileinput.hook_compressed
     for r in fileinput.FileInput(options_filename, openhook=hook):  
         line_number += 1
         try:
             recs = [json.loads(r.strip())]
         except ValueError:
             try:
                 # maybe a missing line feed?
                 recs = [json.loads(x) for x in r.strip().replace("}{", "}GNIP_SPLIT{")
                     .split("GNIP_SPLIT")]
             except ValueError:
                 sys.stderr.write("Invalid JSON record (%d) %s, skipping\n"
                     %(line_number, r.strip()))
                 continue
         for record in recs:
             if len(record) == 0:
                 continue
             # hack: let the old source modules still have a self.cnt for error msgs
             self.cnt = line_number
             yield line_number, record
Beispiel #9
0
def post(request):
    """Sets a key to a value on the currently logged in users preferences

    :param key: Key to set
    :type key: str
    :param val: Value to set
    :type val: primitive
    :returns: json
    """
    data = request.POST or json.loads(request.body)['body']
    key = data.get('key', None)
    val = data.get('val', None)
    res = Result()
    if key is not None and val is not None:
        obj, created = UserPref.objects.get_or_create(user=request.user)
        if created:
            obj.data = json.dumps(DefaultPrefs.copy())
            obj.save()
        try:
            val = json.loads(val)
        except (TypeError, ValueError):
            pass
        obj.setKey(key, val)
        obj.save()
        res.append(obj.json())

    return JsonResponse(res.asDict())
 def handle(self, *args, **options):
     what = get_what_client(lambda: None, True)
     with open('what_manager2_torrents.jsonl', 'rb') as torrents_input:
         for line in torrents_input:
             data = ujson.loads(line)
             info = ujson.loads(data['what_torrent']['info'])
             what_torrent_id = info['torrent']['id']
             what_group_id = info['group']['id']
             try:
                 TorrentGroupMapping.objects.get(what_group_id=what_group_id)
                 continue
             except TorrentGroupMapping.DoesNotExist:
                 pass
             try:
                 migration_status = WhatTorrentMigrationStatus.objects.get(
                     what_torrent_id=what_torrent_id)
             except WhatTorrentMigrationStatus.DoesNotExist:
                 continue
             if migration_status.status != WhatTorrentMigrationStatus.STATUS_COMPLETE:
                 continue
             pth_torrent_id = migration_status.pth_torrent_id
             if not pth_torrent_id:
                 continue
             try:
                 pth_torrent = what.request('torrent', id=pth_torrent_id)['response']
             except BadIdException:
                 continue
             pth_group_id = pth_torrent['group']['id']
             print 'Saving {} mapping to {}'.format(what_group_id, pth_group_id)
             TorrentGroupMapping.objects.create(
                 what_group_id=what_group_id,
                 pth_group_id=pth_group_id,
             )
Beispiel #11
0
    def test_set_active(self):
        # type: () -> None
        self.login("*****@*****.**")
        client = 'website'

        self.client_post("/json/users/me/presence", {'status': 'idle'})
        result = self.client_post("/json/get_active_statuses", {})

        self.assert_json_success(result)
        json = ujson.loads(result.content)
        self.assertEqual(json['presences']["*****@*****.**"][client]['status'], 'idle')

        email = "*****@*****.**"
        self.login("*****@*****.**")
        self.client_post("/json/users/me/presence", {'status': 'idle'})
        result = self.client_post("/json/get_active_statuses", {})
        self.assert_json_success(result)
        json = ujson.loads(result.content)
        self.assertEqual(json['presences'][email][client]['status'], 'idle')
        self.assertEqual(json['presences']['*****@*****.**'][client]['status'], 'idle')

        self.client_post("/json/users/me/presence", {'status': 'active'})
        result = self.client_post("/json/get_active_statuses", {})
        self.assert_json_success(result)
        json = ujson.loads(result.content)
        self.assertEqual(json['presences'][email][client]['status'], 'active')
        self.assertEqual(json['presences']['*****@*****.**'][client]['status'], 'idle')
def test_ia_search_itemlist(capsys):
    with responses.RequestsMock(assert_all_requests_are_fired=True) as rsps:
        url1 = ('{0}//archive.org/services/search/beta/scrape.php'
                '?q=collection%3Aattentionkmartshoppers'
                '&REQUIRE_AUTH=true&size=10000'.format(protocol))
        url2 = ('{0}//archive.org/services/search/beta/scrape.php?'
                'cursor=W3siaWRlbnRpZmllciI6IjE5NjEtTC0wNTkxNCJ9XQ%3D%3D'
                '&REQUIRE_AUTH=true&q=collection%3Aattentionkmartshoppers'
                '&size=10000'.format(protocol))
        rsps.add(responses.POST, url1,
                 body=TEST_SCRAPE_RESPONSE,
                 status=200,
                 match_querystring=True)
        _j = json.loads(TEST_SCRAPE_RESPONSE)
        del _j['cursor']
        _r = json.dumps(_j)
        rsps.add(responses.POST, url2,
                 body=_r,
                 status=200,
                 match_querystring=True)

        sys.argv = ['ia', 'search', 'collection:attentionkmartshoppers', '--itemlist']
        try:
            ia.main()
        except SystemExit as exc:
            assert not exc.code

    out, err = capsys.readouterr()
    j = json.loads(TEST_SEARCH_RESPONSE)
    assert len(out.split()) == 200
Beispiel #13
0
 def loads(*args, **kwargs):
     try:
         if json.__name__ == "ujson":
             return json.loads(*args, **kwargs)
         return json.loads(strict=False, *args, **kwargs)
     except ValueError:
         raise ResultParseError("The JSON result could not be parsed")
def main(JSONinput):

    query = json.loads(JSONinput)
    # Set up the query.
    p = SQLAPIcall(query)

    # run the query.
    resp = p.execute()

    if query['method'] == 'data' and 'format' in query and query['format'] == 'json':
        try:
            resp = json.loads(resp)
        except:
            resp = dict(status="error", code=500,
                        message="Internal error: server did not return json")

        # Print appropriate HTML headers
        if 'status' in resp and resp['status'] == 'error':
            code = resp['code'] if 'code' in resp else 500
            headers(query['method'], errorcode=code)
        else:
            headers(query['method'])
        print json.dumps(resp)
    else:
        headers(query['method'])
        print resp

    return True
Beispiel #15
0
def from_json(text):
   # Skip lines containing nothing but whitespace and lines that contain just
   # a hexadecimal number. The latter arises when the tweet stream is using
   # "Transfer-Encoding: chunked" and the chunk delimeters made their way into
   # the output file (i.e., before issue #92 was fixed).
   if (re.search(r'^[0-9a-f\s]*$', text)):
      raise Nothing_To_Parse_Error()
   try:
      j = json.loads(text.replace(r'\\"', r'\"'))  # Isaac: was having problems parsing PostgreSQL JSONs
   except ValueError:
      j = json.loads(text)  # raises ValueError on parse failure
   if ('delete' in j):
      return Deletion_Notice.from_json(j)
   elif ('limit' in j):
      return Limit_Notice.from_json(j)
   elif ('scrub_geo' in j):
      return Scrub_Geo_Notice.from_json(j)
   elif ('status_withheld' in j):
      return Status_Withheld.from_json(j)
   elif ('text' in j):
      return Tweet.from_json(j)
   elif ('warning' in j):
      return Warning.from_json(j)
   else:
      raise Unknown_Object_Error()
Beispiel #16
0
    def __load_settings_from_file(self):
        """
        Loads settings info from the settings json file

        :returns: True if the settings info is valid
        :rtype: boolean
        """
        filename = self.get_base_path() + 'settings.json'

        if not exists(filename):
            raise OneLogin_Saml2_Error(
                'Settings file not found: %s',
                OneLogin_Saml2_Error.SETTINGS_FILE_NOT_FOUND,
                filename
            )

        # In the php toolkit instead of being a json file it is a php file and
        # it is directly included
        with open(filename, 'r') as json_data:
            settings = json.loads(json_data.read())

        advanced_filename = self.get_base_path() + 'advanced_settings.json'
        if exists(advanced_filename):
            with open(advanced_filename, 'r') as json_data:
                settings.update(json.loads(json_data.read()))  # Merge settings

        return self.__load_settings_from_dict(settings)
Beispiel #17
0
        def notify(result):
            stream = result.get("events")
            if stream:
                max_position = stream["position"]
                for row in stream["rows"]:
                    position = row[0]
                    internal = json.loads(row[1])
                    event_json = json.loads(row[2])
                    event = FrozenEvent(event_json, internal_metadata_dict=internal)
                    extra_users = ()
                    if event.type == EventTypes.Member:
                        extra_users = (event.state_key,)
                    notifier.on_new_room_event(
                        event, position, max_position, extra_users
                    )

            notify_from_stream(
                result, "push_rules", "push_rules_key", user="******"
            )
            notify_from_stream(
                result, "user_account_data", "account_data_key", user="******"
            )
            notify_from_stream(
                result, "room_account_data", "account_data_key", user="******"
            )
            notify_from_stream(
                result, "tag_account_data", "account_data_key", user="******"
            )
            notify_from_stream(
                result, "receipts", "receipt_key", room="room_id"
            )
            notify_from_stream(
                result, "typing", "typing_key", room="room_id"
            )
Beispiel #18
0
    def create_service_request(self, path, method=_DEFAULT_SERVICE_REQUEST_METHOD, arguments={}, msg={}, headers={}):
        """ path - string, used to route to proper handler
            method - used to map to the proper method of the handler
            arguments - dict, used within the method call if needed
            These are not used anymore, but I feel they belong. 

            If not to only hold the original request
                headers - dict, contains the accepted method to call on handler
                msg - dict, the body of the message to process
        """
        if not isinstance(headers, dict):
            headers = json.loads(headers)
        if not isinstance(msg, dict):
            msg = json.loads(msg)

        data = {
            # Not sure if this is the socket_id, but it is used to return the message to the originator
            "origin_sender_id": self.message.sender,
            # This is the connection id used by the originator and is needed for Mongrel2
            "origin_conn_id": self.message.conn_id,
            # This is the socket address for the reply to the client
            "origin_out_addr": self.application.msg_conn.out_addr,
            # used to route the request
            "path": path,
            "method": method,
            "arguments": arguments,
            # a dict, right now only METHOD is required and must be one of: ['get', 'post', 'put', 'delete','options', 'connect', 'response', 'request']
            "headers": headers,
            # a dict, this can be whatever you need it to be to get the job done.
            "body": msg,
        }
        return ServiceRequest(**data)
Beispiel #19
0
def get_triplet_data(raw_json):
    triplets = []
    for entry in raw_json['data']:
        if 'custom_triplet' in entry['trialdata']['trial_type']:
            triplet = {}
            triplet['stimuli'] = ujson.loads(entry['trialdata']['stimulus'])
            triplet['stimuli'][1] = triplet['stimuli'][1][24:-4]
            triplet['stimuli'][0] = triplet['stimuli'][0][24:-4]
            triplet['stimuli'][2] = triplet['stimuli'][2][24:-4]
            triplet['pressed'] = ujson.loads(entry['trialdata']['pressed'])
            triplet['response'] = ujson.loads(entry['trialdata']['response'])
            triplet['type'] = entry['trialdata']['type']
            if (triplet['stimuli'][0] == triplet['stimuli'][1] or triplet['stimuli'][0] == triplet['stimuli'][2] or triplet['stimuli'][1] == triplet['stimuli'][2]):
                triplet['type'] = '3'
                if (triplet['response'][0]==triplet['response'][1]):
                    triplet['pressed'] = ['true','true']
                else:
                    triplet['pressed'] = ['false','false']
            if len(triplet['response']) > 2 and triplet['type'] != '3':
                triplet['response'] = list(set(triplet['response']))
            if len(triplet['response']) > 2 and triplet['type'] == '3':
                triplet['response'] =triplet['response'][:2]
            triplet['response'][0] = triplet['response'][0][24:-4]
            triplet['response'][1] = triplet['response'][1][24:-4]
            triplet['react_time'] = entry['trialdata']['rt']
            triplet['trial_index_global'] = entry['trialdata']['trial_index_global']
            triplet['set'] = entry['trialdata']['set']
            triplets.append(triplet)
    return triplets
Beispiel #20
0
def etr_circ():

    try:
        data = loads(request.args.get('q', '{}'))
    except (TypeError, ValueError, OverflowError):
        return jsonify_status_code(400, message='Unable to decode data')

    if not data:
        query = World_Circonscriptions.query.filter(
                    World_Circonscriptions.cir_num != None).all()
    else:
        query = World_Circonscriptions.query.filter(World_Circonscriptions.cir_num.in_(data['cirid'])).all()
    
    geojs = {"crs" : None, "type" : "FeatureCollection", "features" : list()}
    
    for circo in query:
        geomjs = db.session.scalar(circo.geom.geojson)
        geompy = loads(geomjs)
        geojs['features'].append(
            {'geometry': geompy, 
            'type':'Feature',
            'id': circo.gid, 
            'properties': {'name' : circo.name, 'cir_num' : circo.cir_num}
        })
    
    return Response(dumps(geojs), mimetype='application/json')
Beispiel #21
0
 def test_tornado_auth(self) -> Generator[str, 'TornadoTestCase', None]:
     user_profile = self.example_user('hamlet')
     cookies = self._get_cookies(user_profile)
     cookie_header = self.get_cookie_header(cookies)
     ws = yield self.ws_connect('/sockjs/366/v8nw22qe/websocket', cookie_header=cookie_header)
     yield ws.read_message()
     queue_events_data = self._get_queue_events_data(user_profile.email)
     request_id = ':'.join((queue_events_data['response']['queue_id'], '0'))
     response = yield self._websocket_auth(ws, queue_events_data, cookies)
     self.assertEqual(response[0][0], 'a')
     self.assertEqual(
         ujson.loads(response[0][1:]),
         [
             {
                 "type": "ack",
                 "req_id": request_id
             }
         ])
     self.assertEqual(response[1][0], 'a')
     self.assertEqual(
         ujson.loads(response[1][1:]),
         [
             {"req_id": request_id,
              "response": {
                  "result": "success",
                  "status_inquiries": {},
                  "msg": ""
              },
              "type": "response"}
         ])
     yield self.close(ws)
Beispiel #22
0
def filterObjects(request, obj_id):
    """
    Filters Gallery for the requested ImageVideo objects.  Returns a Result object with 
    serialized objects
    """
    print obj_id
    obj = Gallery.objects.get(pk=obj_id)

    if request.user.is_anonymous() and obj.security != Gallery.PUBLIC:
        res = Result()
        res.isError = True
        res.message = 'This gallery is not public'

        return JsonResponse(res)

    
    tags = json.loads(request.GET.get('filters', '[[]]'))
    rng = request.GET.get('rng', None)
    more = json.loads(request.GET.get('more', 'false'))
    models = request.GET.get('models', 'image,video')
    if models == '':
        models = 'image,video'

    tags = filter(None, tags)

    models = [ContentType.objects.get(app_label='frog', model=x) for x in models.split(',')]

    return _filter(request, obj, tags=tags, rng=rng, models=models, more=more)
Beispiel #23
0
    def test_get_samples(self):
        res = mock.Mock()
        req = mock.Mock()

        def _side_effect(arg):
            if arg == 'name':
                return 'tongli'
            elif arg == 'dimensions':
                return 'key1:100, key2:200'
        req.get_param.side_effect = _side_effect

        req_result = mock.Mock()

        req_result.json.return_value = json.loads(self.response_str)
        req_result.status_code = 200

        with mock.patch.object(requests, 'post', return_value=req_result):
            self.dispatcher.get_samples(req, res)

        # test that the response code is 200
        self.assertEqual(res.status, getattr(falcon, 'HTTP_200'))
        obj = json.loads(res.body)
        self.assertEqual(obj[0]['meter'], 'BABMGD')
        self.assertEqual(obj[0]['id'], 'AVOziWmP6-pxt0dRmr7j')
        self.assertEqual(obj[0]['type'], 'metrics')
        self.assertEqual(obj[0]['user_id'],
                         'efd87807-12d2-4b38-9c70-5f5c2ac427ff')
        self.assertEqual(obj[0]['project_id'],
                         '35b17138-b364-4e6a-a131-8f3099c5be68')
        self.assertEqual(obj[0]['timestamp'], 1461337094000)
        self.assertEqual(obj[0]['volume'], 4)
        self.assertEqual(len(obj), 1)
    def init_thread_proc(self):
        try:
            if os.path.isfile(DUMP_PATH + '/' + DUMP_FILE):
                fpdump = open(DUMP_PATH + '/' + DUMP_FILE)
                totaldict = pickle.load(fpdump)
                fpdump.close()
                self._spellcitydict = totaldict['spellcitydict']
                self._citydict = totaldict['citydict']
            else:
                self.get_city_service()
        except:
            pass

        self._selfparmeter = sys.argv
        
        post_service_data('192.168.0.24', '8030', 'register', sys.argv)
        rawretdata = get_active_service('192.168.0.24', '8030', 'getinstance')
        retdata = json.loads(rawretdata)
        self._totalservicelist = retdata['data']
        if len(retdata['data']) != 1:
            tempnode = retdata['data'][0]
            rawtotaldict = get_active_service(tempnode['host'], tempnode['port'], 'othersget')
            totaldict = json.loads(rawtotaldict)['data']
            self._citydict = totaldict['citydict']
            self._spellcitydict = totaldict['spellcitydict']

        self.set_auto_update(1)

        return {'result' : '0'}
Beispiel #25
0
        def get_updated_account_data_for_user_txn(txn):
            sql = (
                "SELECT account_data_type, content FROM account_data"
                " WHERE user_id = ? AND stream_id > ?"
            )

            txn.execute(sql, (user_id, stream_id))

            global_account_data = {
                row[0]: json.loads(row[1]) for row in txn.fetchall()
            }

            sql = (
                "SELECT room_id, account_data_type, content FROM room_account_data"
                " WHERE user_id = ? AND stream_id > ?"
            )

            txn.execute(sql, (user_id, stream_id))

            account_data_by_room = {}
            for row in txn.fetchall():
                room_account_data = account_data_by_room.setdefault(row[0], {})
                room_account_data[row[1]] = json.loads(row[2])

            return (global_account_data, account_data_by_room)
Beispiel #26
0
def list_of_all_data_in_bioactivities(request):
    """Lists all requested data for filtering bioactivities"""
    exclude_questionable = json.loads(request.GET.get('exclude_questionable'))
    pubchem = json.loads(request.GET.get('pubchem'))
    target_types = json.loads(request.GET.get('target_types'))
    organisms = json.loads(request.GET.get('organisms'))

    desired_target_types = [
        x.get(
            'name'
        ) for x in target_types
        if x.get(
            'is_selected'
        ) is True
    ]

    desired_organisms = [
        x.get(
            'name'
        ) for x in organisms
        if x.get(
            'is_selected'
        ) is True
    ]

    return JSONResponse(
        generate_list_of_all_data_in_bioactivities(
            exclude_questionable,
            pubchem,
            desired_organisms,
            desired_target_types
        )
    )
def main(j, args, params, tags, tasklet):
    doc = args.doc
    nid = args.getTag('nid')
    nidstr = str(nid)
    rediscl = j.clients.redis.getGeventRedisClient('127.0.0.1', 9999)

    out = list()

    disks = rediscl.hget('healthcheck:monitoring', 'results')
    errors = rediscl.hget('healthcheck:monitoring', 'errors')
    disks = ujson.loads(disks) if disks else dict()
    errors = ujson.loads(errors) if errors else dict()

    out.append('||Disk||Free Space||Status||')
    for type, data in (('error', errors), ('disk', disks)):
        if nidstr in data:
            if 'disks' in data.get(nidstr, dict()):
                ddata = data[nidstr].get('disks', list())
                for diskstat in ddata:
                    if type == 'error':
                        diskstat = diskstat.values()[0]
                    if 'state' not in diskstat:
                        continue
                    state = j.core.grid.healthchecker.getWikiStatus(diskstat.get('state', 'UNKNOWN'))
                    out.append('|%s|%s|%s|' % (diskstat.get('path', ''), diskstat.get('message', ''), state))
                out.append('\n')

    out = '\n'.join(out)

    params.result = (out, doc)
    return params
Beispiel #28
0
    def loadFromFilesystem(self):
        if os.path.isfile(PROFILE_DIR+'UserProfile/UserProfile.json'):
            # We already have a JSON file. Load the details from the file at the start.
            with open(PROFILE_DIR+'UserProfile/UserProfile.json', 'rb') as f:
                self.__settingsAndProfile = ujson.loads(f.read())

                # Check for old version.
                if 'selectedTopics' in self.__settingsAndProfile:
                    # This is a 1.1.2 JSON file. needs to be migrated.
                    migrationResult = self.__migrateFrom112to120(self.__settingsAndProfile)
                    with open(PROFILE_DIR+'UserProfile/UserProfile.json', 'wb') as f:
                        f.write(ujson.encode(migrationResult))
                    self.__settingsAndProfile = ujson.loads(f.read())
                else:
                    # The main
                    self.__updateBootStatus()
        else:
            # We don't have a JSON file. This means it's not created yet. Create it.
            with open(PROFILE_DIR+'UserProfile/UserProfile.json', 'wb') as f:
            # Now, time to set some defaults.
                newProfileFile = self.__produceProfileWithDefaults()
                newProfileFile['machineDetails']['listeningPort'] = self.__getRandomOpenPort()
                f.write(ujson.encode(newProfileFile))
            # This is the first load ever.
            with open(PROFILE_DIR+'UserProfile/UserProfile.json', 'rb') as f:
                self.__settingsAndProfile = ujson.loads(f.read())
Beispiel #29
0
        def thread_main():
            delta = None
            rpc_count = 0
            while True:
                want = self.want_axis_video
                req = {
                    '__type': 'AxisCameraDaemonReq',
                    'traceSpec': {
                        '__type': 'VideoTraceSpec',
                        'traceDir': path.join(remote_traces_dir, self.trace_name),
                        'traceName': self.trace_name,
                        'timeseqName': timeseq_name,
                    },
                    'cameraConfig': {
                        '__type': 'AxisCameraConfig',
                        'ipaddr': ipaddr,
                        'url': '/axis-cgi/mjpg/video.cgi?compression=30&rotation=0&resolution=' + resolution,
                        'authHeader': auth_header,
                    },
                    'txTime': time.time(),
                    'recordFor': 10.0 if want else 0.0,
                }
                rpc_id = 'rpc%d' % rpc_count
                rpc_count += 1
                tx = ['record', rpc_id, ujson.dumps(req, escape_forward_slashes=False)]
                if 1: logger.info('camera %s < %s', daemon_endpoint, tx)
                self.sock.send_multipart(tx, flags=0, copy=True, track=False)

                rx = self.sock.recv_multipart(flags=0, copy=True, track=False)
                rx_time = time.time()
                if 1: logger.info('%s > %s %s', daemon_endpoint, rx[1], rx[2])
                rpc_id2 = rx[0]
                rpc_err = ujson.loads(rx[1])
                rpc_result = ujson.loads(rx[2])
                if rpc_err:
                    logger.info('%s > error %s', daemon_endpoint, rpc_err)
                    break

                min_delta = rpc_result['txTime'] - rx_time
                max_delta = rpc_result['txTime'] - rpc_result['reqTxTime']
                if delta is None:
                    delta = (min_delta + max_delta) * 0.5
                delta = max(min_delta, min(max_delta, delta))
                if 0: logger.info("timing %0.6f %+0.6f %+0.6f min_delta=%+0.6f max_delta=%+0.6f delta=%+0.6f",
                    rpc_result['reqTxTime'], rpc_result['txTime'], rx_time, min_delta, max_delta, delta)

                rep_times = rpc_result['times']
                rep_samples = rpc_result['samples']
                for ts, sample in zip(rep_times, rep_samples):
                    self.add(ts - delta, timeseq_name, sample)
                rep_chunks = rpc_result['chunks']
                for chunk_ts in rep_chunks:
                    chunk_fn = 'chunk_%s_%.0f.video' % (timeseq_name, chunk_ts)
                    chunk_path = path.join(self.trace_dir, chunk_fn)
                    src_file = path.join(local_link_prefix, self.trace_name, chunk_fn)
                    logger.info('Create symlink(%s, %s)  llp=%s rtd=%s', src_file, chunk_path, local_link_prefix, remote_traces_dir)
                    os.symlink(src_file, chunk_path)

                if not want: break
                time.sleep(0.1)
Beispiel #30
0
def load_results(dir, raw_episodes=False):
    fnames = get_monitor_files(dir)
    if not fnames:
        raise LoadMonitorResultsError("no monitor files of the form *%s found in %s" % (Monitor.EXT, dir))
    episodes = []
    headers = []
    for fname in fnames:
        with open(fname, 'rt') as fh:
            lines = fh.readlines()
        header = json.loads(lines[0])
        headers.append(header)
        for line in lines[1:]:
            episode = json.loads(line)
            episode['abstime'] = header['t_start'] + episode['t']
            del episode['t']
            episodes.append(episode)
    header0 = headers[0]
    for header in headers[1:]:
        assert header['env_id'] == header0['env_id'], "mixing data from two envs"
    episodes = sorted(episodes, key=lambda e: e['abstime'])
    if raw_episodes:
        return episodes
    else:
        return {
            'env_info': {'env_id': header0['env_id'], 'gym_version': header0['gym_version']},
            'episode_end_times': [e['abstime'] for e in episodes],
            'episode_lengths': [e['l'] for e in episodes],
            'episode_rewards': [e['r'] for e in episodes],
            'initial_reset_time': min([min(header['t_start'] for header in headers)])
        }
Beispiel #31
0
def import_uploads_local(import_dir: Path, processing_avatars: bool=False,
                         processing_emojis: bool=False) -> None:
    records_filename = os.path.join(import_dir, "records.json")
    with open(records_filename) as records_file:
        records = ujson.loads(records_file.read())

    re_map_foreign_keys_internal(records, 'records', 'realm_id', related_table="realm",
                                 id_field=True)
    if not processing_emojis:
        re_map_foreign_keys_internal(records, 'records', 'user_profile_id',
                                     related_table="user_profile", id_field=True)
    for record in records:
        if processing_avatars:
            # For avatars, we need to rehash the user ID with the
            # new server's avatar salt
            avatar_path = user_avatar_path_from_ids(record['user_profile_id'], record['realm_id'])
            file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", avatar_path)
            if record['s3_path'].endswith('.original'):
                file_path += '.original'
            else:
                file_path += '.png'
        elif processing_emojis:
            # For emojis we follow the function 'upload_emoji_image'
            emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
                realm_id=record['realm_id'],
                emoji_file_name=record['file_name'])
            file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", emoji_path)
        else:
            # Should be kept in sync with its equivalent in zerver/lib/uploads in the
            # function 'upload_message_image'
            s3_file_name = "/".join([
                str(record['realm_id']),
                random_name(18),
                sanitize_name(os.path.basename(record['path']))
            ])
            file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "files", s3_file_name)
            path_maps['attachment_path'][record['path']] = s3_file_name

        orig_file_path = os.path.join(import_dir, record['path'])
        os.makedirs(os.path.dirname(file_path), exist_ok=True)
        shutil.copy(orig_file_path, file_path)

    if processing_avatars:
        # Ensure that we have medium-size avatar images for every
        # avatar.  TODO: This implementation is hacky, both in that it
        # does get_user_profile_by_id for each user, and in that it
        # might be better to require the export to just have these.
        upload_backend = LocalUploadBackend()
        for record in records:
            if record['s3_path'].endswith('.original'):
                user_profile = get_user_profile_by_id(record['user_profile_id'])
                avatar_path = user_avatar_path_from_ids(user_profile.id, record['realm_id'])
                medium_file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars",
                                                avatar_path) + '-medium.png'
                if os.path.exists(medium_file_path):
                    # We remove the image here primarily to deal with
                    # issues when running the import script multiple
                    # times in development (where one might reuse the
                    # same realm ID from a previous iteration).
                    os.remove(medium_file_path)
                upload_backend.ensure_medium_avatar_image(user_profile=user_profile)
Beispiel #32
0
 def read(self, uid, collection=COLLECTIONS.resource):
     try:
         return json.loads(self._r.get(self._make_key(collection, uid)))
     except TypeError:
         return None
Beispiel #33
0
 def get(self, key):
     value = self.r.hget(self.collection, key)
     if value:
         return json.loads(value)
     return None
Beispiel #34
0
    def from_influx(cls, row):
        """ Factory to make Listen objects from an influx row
        """
        def convert_comma_seperated_string_to_list(string):
            if not string:
                return []
            return [val for val in string.split(',')]

        t = convert_to_unix_timestamp(row['time'])

        data = {
            'release_msid':
            row.get('release_msid'),
            'release_mbid':
            row.get('release_mbid'),
            'recording_mbid':
            row.get('recording_mbid'),
            'release_group_mbid':
            row.get('release_group_mbid'),
            'artist_mbids':
            convert_comma_seperated_string_to_list(row.get('artist_mbids',
                                                           '')),
            'tags':
            convert_comma_seperated_string_to_list(row.get('tags', '')),
            'work_mbids':
            convert_comma_seperated_string_to_list(row.get('work_mbids', '')),
            'isrc':
            row.get('isrc'),
            'spotify_id':
            row.get('spotify_id'),
            'tracknumber':
            row.get('tracknumber'),
            'track_mbid':
            row.get('track_mbid'),
        }

        # The influx row can contain many fields that are user-generated.
        # We only need to add those fields which have some value in them to additional_info.
        # Also, we need to make sure that we don't add fields like time, user_name etc. into
        # the additional_info.
        for key, value in row.items():
            if key not in data and key not in Listen.TOP_LEVEL_KEYS + Listen.PRIVATE_KEYS and value is not None:
                try:
                    value = ujson.loads(value)
                    data[key] = value
                    continue
                except (ValueError, TypeError):
                    pass

                # there are some lists in the database that were converted to string
                # via str(list) so they can't be loaded via json.
                # Example: "['Blank & Jones']"
                # However, yaml parses them safely and correctly
                try:
                    value = yaml.safe_load(value)
                    data[key] = value
                    continue
                except ValueError:
                    pass

                data[key] = value

        return cls(timestamp=t,
                   user_name=row.get('user_name'),
                   artist_msid=row.get('artist_msid'),
                   recording_msid=row.get('recording_msid'),
                   release_msid=row.get('release_msid'),
                   inserted_timestamp=row.get('inserted_timestamp'),
                   data={
                       'additional_info': data,
                       'artist_name': row.get('artist_name'),
                       'track_name': row.get('track_name'),
                   })
    def test_profile_load_simple(self):
        data = [
            {
                "entity_id": "Q123",
                "mentions": [["dog", 10.0], ["dogg", 7.0], ["animal", 4.0]],
                "title": "Dog",
                "types": {"hyena": ["animal"], "wiki": ["dog"]},
                "relations": [
                    {"relation": "sibling", "object": "Q345"},
                    {"relation": "sibling", "object": "Q567"},
                ],
            },
            {
                "entity_id": "Q345",
                "mentions": [["cat", 10.0], ["catt", 7.0], ["animal", 3.0]],
                "title": "Cat",
                "types": {"hyena": ["animal"], "wiki": ["cat"]},
                "relations": [{"relation": "sibling", "object": "Q123"}],
            },
            # Missing type system
            {
                "entity_id": "Q567",
                "mentions": [["catt", 6.5], ["animal", 3.3]],
                "title": "Catt",
                "types": {"hyena": ["animal", "animall"]},
                "relations": [{"relation": "sibling", "object": "Q123"}],
            },
            # No KG/Types
            {
                "entity_id": "Q789",
                "mentions": [["animal", 12.2]],
                "title": "Dogg",
            },
        ]
        self.write_data(self.profile_file, data)
        gold_qid2title = {"Q123": "Dog", "Q345": "Cat", "Q567": "Catt", "Q789": "Dogg"}
        gold_alias2qids = {
            "dog": [["Q123", 10.0]],
            "dogg": [["Q123", 7.0]],
            "cat": [["Q345", 10.0]],
            "catt": [["Q345", 7.0], ["Q567", 6.5]],
            "animal": [["Q789", 12.2], ["Q123", 4.0], ["Q567", 3.3], ["Q345", 3.0]],
        }
        gold_type_systems = {
            "hyena": {
                "Q123": ["animal"],
                "Q345": ["animal"],
                "Q567": ["animal", "animall"],
                "Q789": [],
            },
            "wiki": {"Q123": ["dog"], "Q345": ["cat"], "Q567": [], "Q789": []},
        }
        gold_qid2relations = {
            "Q123": {"sibling": ["Q345", "Q567"]},
            "Q345": {"sibling": ["Q123"]},
            "Q567": {"sibling": ["Q123"]},
            "Q789": {},
        }
        (
            qid2title,
            alias2qids,
            type_systems,
            qid2relations,
        ) = EntityProfile._read_profile_file(self.profile_file)

        self.assertDictEqual(gold_qid2title, qid2title)
        self.assertDictEqual(gold_alias2qids, alias2qids)
        self.assertDictEqual(gold_type_systems, type_systems)
        self.assertDictEqual(gold_qid2relations, qid2relations)

        # Test loading/saving from jsonl
        ep = EntityProfile.load_from_jsonl(self.profile_file, edit_mode=True)
        ep.save_to_jsonl(self.profile_file)
        read_in_data = [ujson.loads(li) for li in open(self.profile_file)]

        assert len(read_in_data) == len(data)

        for qid_obj in data:
            found_other_obj = None
            for possible_match in read_in_data:
                if qid_obj["entity_id"] == possible_match["entity_id"]:
                    found_other_obj = possible_match
                    break
            assert found_other_obj is not None
            self.assertDictEqual(qid_obj, found_other_obj)
Beispiel #36
0
 def loads(data_string):
     return ujson.loads(data_string, precise_float=True)
Beispiel #37
0
def import_uploads_s3(bucket_name: str, import_dir: Path, processing_avatars: bool=False,
                      processing_emojis: bool=False) -> None:
    upload_backend = S3UploadBackend()
    conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
    bucket = conn.get_bucket(bucket_name, validate=True)

    records_filename = os.path.join(import_dir, "records.json")
    with open(records_filename) as records_file:
        records = ujson.loads(records_file.read())

    re_map_foreign_keys_internal(records, 'records', 'realm_id', related_table="realm",
                                 id_field=True)
    timestamp = datetime_to_timestamp(timezone_now())
    if not processing_emojis:
        re_map_foreign_keys_internal(records, 'records', 'user_profile_id',
                                     related_table="user_profile", id_field=True)
    for record in records:
        key = Key(bucket)

        if processing_avatars:
            # For avatars, we need to rehash the user's email with the
            # new server's avatar salt
            avatar_path = user_avatar_path_from_ids(record['user_profile_id'], record['realm_id'])
            key.key = avatar_path
            if record['s3_path'].endswith('.original'):
                key.key += '.original'
        elif processing_emojis:
            # For emojis we follow the function 'upload_emoji_image'
            emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
                realm_id=record['realm_id'],
                emoji_file_name=record['file_name'])
            key.key = emoji_path
            record['last_modified'] = timestamp
        else:
            # Should be kept in sync with its equivalent in zerver/lib/uploads in the
            # function 'upload_message_image'
            s3_file_name = "/".join([
                str(record['realm_id']),
                random_name(18),
                sanitize_name(os.path.basename(record['path']))
            ])
            key.key = s3_file_name
            path_maps['attachment_path'][record['s3_path']] = s3_file_name

        # Exported custom emoji from tools like Slack don't have
        # the data for what user uploaded them in `user_profile_id`.
        if not processing_emojis:
            user_profile_id = int(record['user_profile_id'])
            # Support email gateway bot and other cross-realm messages
            if user_profile_id in id_maps["user_profile"]:
                logging.info("Uploaded by ID mapped user: %s!" % (user_profile_id,))
                user_profile_id = id_maps["user_profile"][user_profile_id]
            user_profile = get_user_profile_by_id(user_profile_id)
            key.set_metadata("user_profile_id", str(user_profile.id))

        key.set_metadata("orig_last_modified", record['last_modified'])
        key.set_metadata("realm_id", str(record['realm_id']))

        # Zulip exports will always have a content-type, but third-party exports might not.
        content_type = record.get("content_type", guess_type(record['s3_path'])[0])
        headers = {'Content-Type': content_type}

        key.set_contents_from_filename(os.path.join(import_dir, record['path']), headers=headers)

    if processing_avatars:
        # Ensure that we have medium-size avatar images for every
        # avatar.  TODO: This implementation is hacky, both in that it
        # does get_user_profile_by_id for each user, and in that it
        # might be better to require the export to just have these.
        upload_backend = S3UploadBackend()
        for record in records:
            if record['s3_path'].endswith('.original'):
                user_profile = get_user_profile_by_id(record['user_profile_id'])
                upload_backend.ensure_medium_avatar_image(user_profile=user_profile)
Beispiel #38
0
    def main(self):
        self.set_header('server', 'GFW')
        terminal_id = self.get_argument('terminal_id', '')
        terminal_id_A = terminal_id + 'A'
        terminal_id_P = terminal_id + 'P'

        redis_pipe = RedisPool().redis_pipe
        # physical_all_info, current, week, month
        redis_result = redis_pipe.get(terminal_id_A).zrange(
            terminal_id,
            -1, -1, withscores=True).zrange(terminal_id_P,
                                            -7,
                                            -1,
                                            withscores=True).zrange(
                                                terminal_id_P,
                                                -30,
                                                -1,
                                                withscores=True).execute()
        logging.info('redis_result: {}'.format(redis_result))

        physical_info_ori = ujson.loads(redis_result[0])
        physical_info = dict()
        for key, value in physical_info_ori.items():
            physical_info[translate_dict[key]] = value
        logging.info('physical_info: {}'.format(physical_info))

        redis_result_current = redis_result[1][0]
        current_info_ori = ujson.loads(redis_result_current[0])
        current_info = dict()
        for key, value in current_info_ori.items():
            current_info[translate_dict[key]] = value
        current_date = str(int(redis_result_current[1]))[:8]
        logging.info('current_date: {}, current_info: {}'.format(
            current_date, current_info))

        redis_result_weekly = redis_result[2]
        weekly_date = [str(int(i[1]))[:8] for i in redis_result_weekly]
        weekly_list = [ujson.loads(i[0]) for i in redis_result_weekly]
        #weekly_info = dict(date=weekly_date,)
        weekly_info = dict()
        for j in weekly_list[0].keys():
            result = []
            for i in weekly_list:
                result.append(str(i[j]).replace('%', ''))
            weekly_info[translate_dict[j]] = result
        logging.info('weekly_info: {}'.format(weekly_info))

        redis_result_monthly = redis_result[3]
        monthly_date = [str(int(i[1]))[:8] for i in redis_result_monthly]
        monthly_list = [ujson.loads(i[0]) for i in redis_result_monthly]
        #monthly_info = dict(date=monthly_date,)
        monthly_info = dict()
        for j in monthly_list[0].keys():
            result = []
            for i in monthly_list:
                result.append(str(i[j]).replace('%', ''))
            monthly_info[translate_dict[j]] = result
        logging.info('monthly_info: {}'.format(monthly_info))

        result_dict = dict(
            physical_info=physical_info,
            current_info=current_info,
            weekly_info=weekly_info,
            weekly_date=weekly_date,
            monthly_info=monthly_info,
            monthly_date=monthly_date,
        )
        logging.info('result_dict: {}'.format(result_dict))
        '''
		result_json = ujson.dumps(result_dict)
		#self.write(result_json)
		'''
        self.render('index.html', **result_dict)
Beispiel #39
0
    def on_message(self, msg_raw: str) -> None:
        log_data = dict(extra='[transport=%s' %
                        (self.session.transport_name, ))
        record_request_start_data(log_data)
        msg = ujson.loads(msg_raw)

        if self.did_close:
            logger.info(
                "Received message on already closed socket! transport=%s user=%s client_id=%s"
                % (self.session.transport_name, self.session.user_profile.email
                   if self.session.user_profile is not None else 'unknown',
                   self.client_id))

        self.session.send_message({'req_id': msg['req_id'], 'type': 'ack'})

        if msg['type'] == 'auth':
            log_data['extra'] += ']'
            try:
                self.authenticate_client(msg)
                # TODO: Fill in the correct client
                write_log_line(log_data,
                               path='/socket/auth',
                               method='SOCKET',
                               remote_ip=self.session.conn_info.ip,
                               email=self.session.user_profile.email,
                               client_name='?')
            except JsonableError as e:
                response = e.to_json()
                self.session.send_message({
                    'req_id': msg['req_id'],
                    'type': 'response',
                    'response': response
                })
                write_log_line(log_data,
                               path='/socket/auth',
                               method='SOCKET',
                               remote_ip=self.session.conn_info.ip,
                               email='unknown',
                               client_name='?',
                               status_code=403,
                               error_content=ujson.dumps(response))
            return
        else:
            if not self.authenticated:
                response = {'result': 'error', 'msg': "Not yet authenticated"}
                self.session.send_message({
                    'req_id': msg['req_id'],
                    'type': 'response',
                    'response': response
                })
                write_log_line(log_data,
                               path='/socket/service_request',
                               method='SOCKET',
                               remote_ip=self.session.conn_info.ip,
                               email='unknown',
                               client_name='?',
                               status_code=403,
                               error_content=ujson.dumps(response))
                return

        redis_key = req_redis_key(msg['req_id'])
        with redis_client.pipeline() as pipeline:
            pipeline.hmset(redis_key, {'status': 'received'})
            pipeline.expire(redis_key, 60 * 60 * 24)
            pipeline.execute()

        record_request_stop_data(log_data)
        queue_json_publish(
            "message_sender",
            dict(request=msg['request'],
                 req_id=msg['req_id'],
                 server_meta=dict(user_id=self.session.user_profile.id,
                                  client_id=self.client_id,
                                  return_queue="tornado_return",
                                  log_data=log_data,
                                  request_environ=dict(
                                      REMOTE_ADDR=self.session.conn_info.ip))),
            lambda x: None,
            call_consume_in_tests=True)
Beispiel #40
0
#FIXME create a string based tokens
import ujson as json

import redis

from redis.exceptions import ResponseError
from rediscluster import RedisCluster

import re

import config
rc_list = json.loads(config.config(section='rediscluster')['rediscluster'])

rediscluster_client = RedisCluster(startup_nodes=rc_list,
                                   decode_responses=True)
redis_client = redis.Redis(host=config.config()['host'],
                           port=config.config()['port'],
                           charset="utf-8",
                           decode_responses=True)

import ahocorasick
import joblib
import itertools
from spacy.lang.en.stop_words import STOP_WORDS
from string import punctuation
from common.utils import *


def loadAutomata():
    from urllib.request import urlopen
    import ahocorasick
Beispiel #41
0
def recv_ujson(socket, flags=0):
    """Load object from ujson serialization"""
    z = socket.recv(flags)
    m = zlib.decompress(z)
    return ujson.loads(m)
Beispiel #42
0
 def Loads(self, string_obj):
     return ujson.loads(string_obj)
Beispiel #43
0
 def readJsonFile(p):
     if not os.path.exists(p):
         return Status.FAILURE
     with open(p, 'r', encoding='utf-8') as f:
         return json.loads(f.read())
Beispiel #44
0
    def authenticate_client(self, msg: Dict[str, Any]) -> None:
        if self.authenticated:
            self.session.send_message({
                'req_id': msg['req_id'],
                'type': 'response',
                'response': {
                    'result': 'error',
                    'msg': 'Already authenticated'
                }
            })
            return

        user_profile = get_user_profile(self.browser_session_id)
        if user_profile is None:
            raise JsonableError(_('Unknown or missing session'))
        self.session.user_profile = user_profile

        if 'csrf_token' not in msg['request']:
            # Debugging code to help with understanding #6961
            logging.error("Invalid websockets auth request: %s" %
                          (msg['request'], ))
            raise JsonableError(_('CSRF token entry missing from request'))
        if not _compare_salted_tokens(msg['request']['csrf_token'],
                                      self.csrf_token):
            raise JsonableError(_('CSRF token does not match that in cookie'))

        if 'queue_id' not in msg['request']:
            raise JsonableError(_("Missing 'queue_id' argument"))

        queue_id = msg['request']['queue_id']
        client = get_client_descriptor(queue_id)
        if client is None:
            raise BadEventQueueIdError(queue_id)

        if user_profile.id != client.user_profile_id:
            raise JsonableError(
                _("You are not the owner of the queue with id '%s'") %
                (queue_id, ))

        self.authenticated = True
        register_connection(queue_id, self)

        response = {
            'req_id': msg['req_id'],
            'type': 'response',
            'response': {
                'result': 'success',
                'msg': ''
            }
        }

        status_inquiries = msg['request'].get('status_inquiries')
        if status_inquiries is not None:
            results = {}  # type: Dict[str, Dict[str, str]]
            for inquiry in status_inquiries:
                status = redis_client.hgetall(
                    req_redis_key(inquiry))  # type: Dict[bytes, bytes]
                if len(status) == 0:
                    result = {'status': 'not_received'}
                elif b'response' not in status:
                    result = {'status': status[b'status'].decode('utf-8')}
                else:
                    result = {
                        'status': status[b'status'].decode('utf-8'),
                        'response': ujson.loads(status[b'response'])
                    }
                results[str(inquiry)] = result
            response['response']['status_inquiries'] = results

        self.session.send_message(response)
        ioloop = tornado.ioloop.IOLoop.instance()
        ioloop.remove_timeout(self.timeout_handle)
Beispiel #45
0
    async def get(self, campaign_id, page_num, *args, **kwargs):

        api = BaseApi(self.current_user.access_token)

        param = {
            'pageNo': page_num,
            'pageSize': 20,
            'needDetail': 'false',
            'needFreight': 'false'
        }

        # 获得卖家商品列表某一页
        try:
            rp = await api.send_request(
                api_url='1/com.alibaba.product/alibaba.product.list.get',
                param=param,
                timestamp=False)
        except HTTPClientError as e:
            await self.write_log(str(self.current_user.access_token),
                                 str(param),
                                 str(e.response.body.decode('utf8')),
                                 '获取商家商品列表失败',
                                 filename='get_offer')
            self.set_status(404)
            return await self.finish(e.response.body.decode('utf8'))

        else:
            # 解析数据
            rp = loads(rp)

            # 如果是空, 继续执行没有意义了
            if not rp['result']['pageResult']['resultList']:
                return await self.finish({'result': [], 'msg': '用户供应商品列表为空'})

            # 获取数据的所有商品id
            list_1 = []  # 商品id列表
            [
                list_1.append(result['productID'])
                for result in rp['result']['pageResult']['resultList']
            ]

        # =======================================================================================================

        # 商品id列表
        list_2 = []

        # 获取指定推广计划下的所有所有推广单元
        param = {
            'campaignId': campaign_id,
            'pageNo': 1,
            'pageSize': 200,
        }

        num = 1  # 固定请求最大次数
        while num <= 5:
            try:
                resp = await api.send_request(
                    api_url=
                    '1/com.alibaba.p4p/alibaba.cnp4p.adgroup.bycampaignids.list',
                    param=param)
            except HTTPClientError as e:
                await self.write_log(str(self.current_user.access_token),
                                     str(param),
                                     str(e.response.body.decode('utf8')),
                                     '获取推广计划下所有单元失败',
                                     filename='get_adgroup')
                self.set_status(404)
                return await self.finish(e.response.body.decode('utf8'))
            else:
                # 解析数据
                resp = loads(resp)

                # 第一次请求或后续请求没有结果, 退出循环
                if not resp['adgroups']:
                    break

                # 获取数据的所有商品id
                [
                    list_2.append(result['offerId'])
                    for result in resp['adgroups']
                ]

                # 获得单元总数
                total_num = resp['totalRow']

                # 单元总数大于等于200, value肯定大于等于1.0
                # 单元数小于200200, value肯定小于1.0
                value = total_num / 200

                # 请求一次就好, 数据要么小于200条, 要么正好等于200条
                if value <= 1:
                    break

                param['pageNo'] += 1
                num += 1

        # 只有一种情况, 当前推广计划下推广单元列表是空的, 直接返回全部商品
        if not list_2:
            return await self.finish({'result': rp, 'repeat_offer': []})

        result = list(set(list_1) & set(list_2))

        # 请求的当前商品列表 里 没有在投的推广单元
        if not result:
            return await self.finish({'result': rp, 'repeat_offer': []})

        # 当前商品列表里, 有在投的推广单元
        await self.finish({'result': rp, 'repeat_offer': result})
Beispiel #46
0
def dummy_func(ctx, data=None):
    body = ujson.loads(data) if len(data) > 0 else {"name": "World"}
    return "Hello {0}".format(body.get("name"))
Beispiel #47
0
# QRCode Example
#
# Reads a QRCode and then uses ujson to convert JSON into object

import sensor, image, ujson

sensor.reset()
sensor.set_pixformat(sensor.RGB565)
sensor.set_framesize(sensor.QVGA)
sensor.set_vflip(True)
sensor.set_hmirror(True)
sensor.skip_frames(time=2000)
sensor.set_auto_gain(False)  # must turn this off to prevent image washout...

while (True):
    img = sensor.snapshot()
    img.lens_corr(1.8)  # strength of 1.8 is good for the 2.8mm lens.
    for code in img.find_qrcodes():
        img.draw_rectangle(code.rect(), color=(255, 0, 0))
        obj = ujson.loads(str(code.payload()))
        print(obj)
Beispiel #48
0
def api_pivotal_webhook_v5(request, user_profile, stream):
    # type: (HttpRequest, UserProfile, Text) -> Tuple[Text, Text]
    payload = ujson.loads(request.body)

    event_type = payload["kind"]

    project_name = payload["project"]["name"]
    project_id = payload["project"]["id"]

    primary_resources = payload["primary_resources"][0]
    story_url = primary_resources["url"]
    story_type = primary_resources["story_type"]
    story_id = primary_resources["id"]
    story_name = primary_resources["name"]

    performed_by = payload.get("performed_by", {}).get("name", "")

    story_info = "[%s](https://www.pivotaltracker.com/s/projects/%s): [%s](%s)" % (
        project_name, project_id, story_name, story_url)

    changes = payload.get("changes", [])

    content = ""
    subject = "#%s: %s" % (story_id, story_name)

    def extract_comment(change):
        # type: (Dict[str, Dict]) -> Optional[Text]
        if change.get("kind") == "comment":
            return change.get("new_values", {}).get("text", None)
        return None

    if event_type == "story_update_activity":
        # Find the changed valued and build a message
        content += "%s updated %s:\n" % (performed_by, story_info)
        for change in changes:
            old_values = change.get("original_values", {})
            new_values = change["new_values"]

            if "current_state" in old_values and "current_state" in new_values:
                content += "* state changed from **%s** to **%s**\n" % (
                    old_values["current_state"], new_values["current_state"])
            if "estimate" in old_values and "estimate" in new_values:
                old_estimate = old_values.get("estimate", None)
                if old_estimate is None:
                    estimate = "is now"
                else:
                    estimate = "changed from %s to" % (old_estimate, )
                new_estimate = new_values["estimate"] if new_values[
                    "estimate"] is not None else "0"
                content += "* estimate %s **%s points**\n" % (estimate,
                                                              new_estimate)
            if "story_type" in old_values and "story_type" in new_values:
                content += "* type changed from **%s** to **%s**\n" % (
                    old_values["story_type"], new_values["story_type"])

            comment = extract_comment(change)
            if comment is not None:
                content += "* Comment added:\n~~~quote\n%s\n~~~\n" % (
                    comment, )

    elif event_type == "comment_create_activity":
        for change in changes:
            comment = extract_comment(change)
            if comment is not None:
                content += "%s added a comment to %s:\n~~~quote\n%s\n~~~" % (
                    performed_by, story_info, comment)
    elif event_type == "story_create_activity":
        content += "%s created %s: %s\n" % (performed_by, story_type,
                                            story_info)
        for change in changes:
            new_values = change.get("new_values", {})
            if "current_state" in new_values:
                content += "* State is **%s**\n" % (
                    new_values["current_state"], )
            if "description" in new_values:
                content += "* Description is\n\n> %s" % (
                    new_values["description"], )
    elif event_type == "story_move_activity":
        content = "%s moved %s" % (performed_by, story_info)
        for change in changes:
            old_values = change.get("original_values", {})
            new_values = change["new_values"]
            if "current_state" in old_values and "current_state" in new_values:
                content += " from **%s** to **%s**" % (
                    old_values["current_state"], new_values["current_state"])
    elif event_type in [
            "task_create_activity", "comment_delete_activity",
            "task_delete_activity", "task_update_activity",
            "story_move_from_project_activity", "story_delete_activity",
            "story_move_into_project_activity"
    ]:
        # Known but unsupported Pivotal event types
        pass
    else:
        logging.warning("Unknown Pivotal event type: %s" % (event_type, ))

    return subject, content
Beispiel #49
0
    def run(self):
        # build monitored prefix tree
        prefix_tree = {"v4": pytricia.PyTricia(32), "v6": pytricia.PyTricia(128)}
        for prefix in self.prefixes:
            ip_version = get_ip_version(prefix)
            prefix_tree[ip_version].insert(prefix, "")

        # start producing
        validator = MformatValidator()
        with Producer(self.connection) as producer:
            for csv_file in glob.glob("{}/*.csv".format(self.input_dir)):
                if not self.shared_memory_manager_dict["data_worker_should_run"]:
                    break

                try:
                    with open(csv_file, "r") as f:
                        csv_reader = csv.reader(f, delimiter="|")
                        for row in csv_reader:
                            if not self.shared_memory_manager_dict[
                                "data_worker_should_run"
                            ]:
                                break

                            try:
                                if len(row) != 9:
                                    continue
                                if row[0].startswith("#"):
                                    continue
                                # example row: 139.91.0.0/16|8522|1403|1403 6461 2603 21320
                                # 5408
                                # 8522|routeviews|route-views2|A|"[{""asn"":1403,""value"":6461}]"|1517446677
                                this_prefix = row[0]
                                if row[6] == "A":
                                    as_path = row[3].split(" ")
                                    communities = json.loads(row[7])
                                else:
                                    as_path = []
                                    communities = []
                                service = "historical|{}|{}".format(row[4], row[5])
                                type_ = row[6]
                                timestamp = float(row[8])
                                peer_asn = int(row[2])
                                ip_version = get_ip_version(this_prefix)
                                if this_prefix in prefix_tree[ip_version]:
                                    msg = {
                                        "type": type_,
                                        "timestamp": timestamp,
                                        "path": as_path,
                                        "service": service,
                                        "communities": communities,
                                        "prefix": this_prefix,
                                        "peer_asn": peer_asn,
                                    }
                                    try:
                                        if validator.validate(msg):
                                            msgs = normalize_msg_path(msg)
                                            for msg in msgs:
                                                key_generator(msg)
                                                log.debug(msg)
                                                producer.publish(
                                                    msg,
                                                    exchange=self.update_exchange,
                                                    routing_key="update",
                                                    serializer="ujson",
                                                )
                                                time.sleep(0.01)
                                        else:
                                            log.warning(
                                                "Invalid format message: {}".format(msg)
                                            )
                                    except BaseException:
                                        log.exception(
                                            "Error when normalizing BGP message: {}".format(
                                                msg
                                            )
                                        )
                            except Exception:
                                log.exception("row")
                except Exception:
                    log.exception("exception")

        # run until instructed to stop
        while True:
            if not self.shared_memory_manager_dict["data_worker_should_run"]:
                break
            time.sleep(1)
Beispiel #50
0
# config = util.initialize_from_env()
# model = cm.CorefModel(config)
# with tf.Session() as session:
#     model.restore(session)
#     model.evaluate(session, official_stdout=True)
# all_count = dict()
# all_count['NP'] = 0
# for pronoun_type in all_pronouns_by_type:
#     all_count[pronoun_type] = 0
if __name__ == "__main__":
    os.environ["CUDA_VISIBLE_DEVICES"] = "2"
    test_data = list()
    print('Start to process data...')
    with open('test.english.jsonlines', 'r') as f:
        for line in f:
            tmp_example = json.loads(line)
            all_sentence = list()
            for s in tmp_example['sentences']:
                all_sentence += s
            all_clusters = list()
            for c in tmp_example['clusters']:
                tmp_c = list()
                for w in c:
                    tmp_w = list()
                    for token in all_sentence[w[0]:w[1] + 1]:
                        tmp_w.append(token)
                    tmp_c.append((w, tmp_w))
                all_clusters.append(tmp_c)
            tmp_all_NPs = list()
            Pronoun_dict = dict()
            for pronoun_type in interested_pronouns:
Beispiel #51
0
def get_basket_table_info(req, resp, basket_data, users_id):
    # basket_data dict:
    # - key is json string
    #   {"id_sale": **, "id_shop": **, "id_attr": **, "id_variant": **, "id_price_type": **}
    # - value is quantity

    from common.data_access import data_access
    all_sales = data_access(REMOTE_API_NAME.GET_SALES, req, resp)
    user_country_code, user_province_code = \
            user_country_province(req, resp, users_id)
    is_business_account = user_is_business_account(req, resp, users_id)
    basket = []
    for item, quantity in basket_data.iteritems():
        try:
            item_info = ujson.loads(item)
        except:
            continue
        id_sale = str(item_info['id_sale'])
        id_shop = str(item_info['id_shop'])
        if id_sale not in all_sales:
            continue

        sale_info = all_sales[id_sale]
        _type = sale_info.get('type', {})

        variant = get_valid_attr(
                        sale_info.get('variant'),
                        item_info.get('id_variant'))
        type =  get_valid_attr(
                        sale_info.get('type', {}).get('attribute'),
                        item_info.get('id_attr'))

        id_variant = variant.get('@id')
        id_type = type.get('@id')

        external_id = ''
        external_list = sale_info.get('external') or []
        if external_list and not isinstance(external_list, list):
            external_list = [external_list]
        for external in external_list:
            if ((id_variant and id_variant == external.get('@variant') or not id_variant) and
                (id_type and id_type == external.get('@attribute') or not id_type)):
                external_id = external.get('#text')
                break

        one = {
            'id_sale': id_sale,
            'item': item,
            'quantity': quantity,
            'variant': variant,
            'type': type,
            'product': get_brief_product(sale_info, req, resp,
                calc_price=False,
                is_business_account=is_business_account),
            'link': get_url_format(FRT_ROUTE_ROLE.PRDT_INFO) % {
                'id_type': _type.get('@id', 0),
                'type_name': get_normalized_name(FRT_ROUTE_ROLE.PRDT_INFO,
                                                 'type_name',
                                                 _type.get('name', '')),
                'id_sale': id_sale,
                'sale_name': get_normalized_name(FRT_ROUTE_ROLE.PRDT_INFO,
                                                 'sale_name',
                                                 sale_info.get('name', '')),
            },
            'external_id': external_id
        }
        price = get_product_default_display_price(sale_info, one['type'])[1]
        if one['variant']:
            price = cal_price_with_premium(one['variant'], price)
        one['price'] = price

        if int(id_shop):
            country_code, province_code = _get_shop_addr(sale_info, id_shop)
        else:
            country_code, province_code = _get_brand_addr(sale_info)
        _cate_id = sale_info.get('category', {}).get('@id', 0)
        tax_info = get_category_tax_info(req, resp,
                country_code, province_code,
                user_country_code, user_province_code,
                _cate_id, is_business_account)
        if calc_before_tax_price(req, resp):
            one['price_with_tax_calc'] = price / (1 + tax_info['rate'] / 100.0)
        else:
            one['price_with_tax_calc'] = price * (1 + tax_info['rate'] / 100.0)
        one['tax'] = one['price_with_tax_calc'] - price
        one['show_final_price'] = tax_info['show_final_price']
        one['out_of_stock'] = _out_of_stock(sale_info,
                                id_variant, id_type, id_shop, quantity)

        basket.append(one)
    return basket
Beispiel #52
0
def fetch_initial_state_data(user_profile,
                             event_types,
                             queue_id,
                             include_subscribers=True):
    # type: (UserProfile, Optional[Iterable[str]], str, bool) -> Dict[str, Any]
    state = {'queue_id': queue_id}  # type: Dict[str, Any]

    if event_types is None:
        want = lambda msg_type: True
    else:
        want = set(event_types).__contains__

    if want('alert_words'):
        state['alert_words'] = user_alert_words(user_profile)

    if want('custom_profile_fields'):
        fields = custom_profile_fields_for_realm(user_profile.realm.id)
        state['custom_profile_fields'] = [f.as_dict() for f in fields]

    if want('attachments'):
        state['attachments'] = user_attachments(user_profile)

    if want('hotspots'):
        state['hotspots'] = get_next_hotspots(user_profile)

    if want('message'):
        # The client should use get_messages() to fetch messages
        # starting with the max_message_id.  They will get messages
        # newer than that ID via get_events()
        messages = Message.objects.filter(
            usermessage__user_profile=user_profile).order_by('-id')[:1]
        if messages:
            state['max_message_id'] = messages[0].id
        else:
            state['max_message_id'] = -1

    if want('muted_topics'):
        state['muted_topics'] = ujson.loads(user_profile.muted_topics)

    if want('pointer'):
        state['pointer'] = user_profile.pointer

    if want('presence'):
        state['presences'] = get_status_dict(user_profile)

    if want('realm'):
        for property_name in Realm.property_types:
            state['realm_' + property_name] = getattr(user_profile.realm,
                                                      property_name)

        # Most state is handled via the property_types framework;
        # these manual entries are for those realm settings that don't
        # fit into that framework.
        state[
            'realm_authentication_methods'] = user_profile.realm.authentication_methods_dict(
            )
        state[
            'realm_allow_message_editing'] = user_profile.realm.allow_message_editing
        state[
            'realm_message_content_edit_limit_seconds'] = user_profile.realm.message_content_edit_limit_seconds
        state['realm_icon_url'] = realm_icon_url(user_profile.realm)
        state['realm_icon_source'] = user_profile.realm.icon_source
        state['max_icon_file_size'] = settings.MAX_ICON_FILE_SIZE
        state['realm_bot_domain'] = user_profile.realm.get_bot_domain()
        state['realm_uri'] = user_profile.realm.uri
        state['realm_presence_disabled'] = user_profile.realm.presence_disabled
        state['realm_show_digest_email'] = user_profile.realm.show_digest_email
        state[
            'realm_is_zephyr_mirror_realm'] = user_profile.realm.is_zephyr_mirror_realm
        state['realm_password_auth_enabled'] = password_auth_enabled(
            user_profile.realm)
        if user_profile.realm.notifications_stream and not user_profile.realm.notifications_stream.deactivated:
            notifications_stream = user_profile.realm.notifications_stream
            state['realm_notifications_stream_id'] = notifications_stream.id
        else:
            state['realm_notifications_stream_id'] = -1

    if want('realm_domains'):
        state['realm_domains'] = get_realm_domains(user_profile.realm)

    if want('realm_emoji'):
        state['realm_emoji'] = user_profile.realm.get_emoji()

    if want('realm_filters'):
        state['realm_filters'] = realm_filters_for_realm(user_profile.realm_id)

    if want('realm_user'):
        state['realm_users'] = get_realm_user_dicts(user_profile)
        state['avatar_source'] = user_profile.avatar_source
        state['avatar_url_medium'] = avatar_url(user_profile, medium=True)
        state['avatar_url'] = avatar_url(user_profile)
        state['can_create_streams'] = user_profile.can_create_streams()
        state['cross_realm_bots'] = list(get_cross_realm_dicts())
        state['is_admin'] = user_profile.is_realm_admin
        state['user_id'] = user_profile.id
        state['enter_sends'] = user_profile.enter_sends
        state['email'] = user_profile.email
        state['full_name'] = user_profile.full_name

    if want('realm_bot'):
        state['realm_bots'] = get_owned_bot_dicts(user_profile)

    if want('subscription'):
        subscriptions, unsubscribed, never_subscribed = gather_subscriptions_helper(
            user_profile, include_subscribers=include_subscribers)
        state['subscriptions'] = subscriptions
        state['unsubscribed'] = unsubscribed
        state['never_subscribed'] = never_subscribed

    if want('update_message_flags') and want('message'):
        # Keeping unread_msgs updated requires both message flag updates and
        # message updates. This is due to the fact that new messages will not
        # generate a flag update so we need to use the flags field in the
        # message event.
        state['unread_msgs'] = get_unread_message_ids_per_recipient(
            user_profile)

    if want('stream'):
        state['streams'] = do_get_streams(user_profile)
    if want('default_streams'):
        state['realm_default_streams'] = streams_to_dicts_sorted(
            get_default_streams_for_realm(user_profile.realm))

    if want('update_display_settings'):
        for prop in UserProfile.property_types:
            state[prop] = getattr(user_profile, prop)
        state['emojiset_choices'] = user_profile.emojiset_choices()
        state['autoscroll_forever'] = user_profile.autoscroll_forever

    if want('update_global_notifications'):
        for notification in UserProfile.notification_setting_types:
            state[notification] = getattr(user_profile, notification)
        state[
            'default_desktop_notifications'] = user_profile.default_desktop_notifications

    if want('zulip_version'):
        state['zulip_version'] = ZULIP_VERSION

    return state
Beispiel #53
0
    def build_message_dict(
            message,
            message_id,
            last_edit_time,
            edit_history,
            content,
            subject,
            pub_date,
            rendered_content,
            rendered_content_version,
            sender_id,
            sender_realm_id,
            sending_client_name,
            recipient_id,
            recipient_type,
            recipient_type_id,
            reactions
    ):
        # type: (Optional[Message], int, Optional[datetime.datetime], Optional[Text], Text, Text, datetime.datetime, Optional[Text], Optional[int], int, int, Text, int, int, int, List[Dict[str, Any]]) -> Dict[str, Any]

        obj = dict(
            id                = message_id,
            sender_id         = sender_id,
            content           = content,
            recipient_type_id = recipient_type_id,
            recipient_type    = recipient_type,
            recipient_id      = recipient_id,
            subject           = subject,
            timestamp         = datetime_to_timestamp(pub_date),
            client            = sending_client_name)

        obj['sender_realm_id'] = sender_realm_id

        obj['raw_display_recipient'] = get_display_recipient_by_id(
            recipient_id,
            recipient_type,
            recipient_type_id
        )

        obj['subject_links'] = bugdown.subject_links(sender_realm_id, subject)

        if last_edit_time is not None:
            obj['last_edit_timestamp'] = datetime_to_timestamp(last_edit_time)
            assert edit_history is not None
            obj['edit_history'] = ujson.loads(edit_history)

        if Message.need_to_render_content(rendered_content, rendered_content_version, bugdown.version):
            if message is None:
                # We really shouldn't be rendering objects in this method, but there is
                # a scenario where we upgrade the version of bugdown and fail to run
                # management commands to re-render historical messages, and then we
                # need to have side effects.  This method is optimized to not need full
                # blown ORM objects, but the bugdown renderer is unfortunately highly
                # coupled to Message, and we also need to persist the new rendered content.
                # If we don't have a message object passed in, we get one here.  The cost
                # of going to the DB here should be overshadowed by the cost of rendering
                # and updating the row.
                # TODO: see #1379 to eliminate bugdown dependencies
                message = Message.objects.select_related().get(id=message_id)

            assert message is not None  # Hint for mypy.
            # It's unfortunate that we need to have side effects on the message
            # in some cases.
            rendered_content = render_markdown(message, content, realm=message.get_realm())
            message.rendered_content = rendered_content
            message.rendered_content_version = bugdown.version
            message.save_rendered_content()

        if rendered_content is not None:
            obj['rendered_content'] = rendered_content
        else:
            obj['rendered_content'] = ('<p>[Zulip note: Sorry, we could not ' +
                                       'understand the formatting of your message]</p>')

        if rendered_content is not None:
            obj['is_me_message'] = Message.is_status_message(content, rendered_content)
        else:
            obj['is_me_message'] = False

        obj['reactions'] = [ReactionDict.build_dict_from_raw_db_row(reaction)
                            for reaction in reactions]
        return obj
Beispiel #54
0
def get_order_table_info(order_id, order_resp, all_sales=None):
    user_profile = order_resp['user_info']
    user_name = '%s %s %s' % (
                    user_profile.get('title') or '',
                    user_profile.get('first_name') or '',
                    user_profile.get('last_name') or '',
                )
    dest_user_name = order_resp['shipping_dest']['full_name'] or user_name
    dest_addr = ' '.join([
            order_resp['shipping_dest']['address'],
            order_resp['shipping_dest']['postalcode'],
            order_resp['shipping_dest']['city'],
            order_resp['shipping_dest']['province'],
            order_resp['shipping_dest']['country'],
    ])
    order_items = []
    shipments = {}
    order_status = int(order_resp['order_status'])
    order_created = format_epoch_time(order_resp['confirmation_time'])
    for item_id, item_info in order_resp.get('order_items', []):
        id_sale = str(item_info['sale_id'])

        if item_info['id_variant'] == 0:
            product_name = item_info['name']
            variant_name = ''
        else:
            product_name, variant_name = item_info['name'].rsplit('-', 1)
        one = {
            'id_sale': id_sale,
            'quantity': item_info['quantity'],
            'product_name': product_name,
            'variant_name': variant_name,
            'type_name': item_info.get('type_name') or '',
            'price': item_info['price'],
            'picture': item_info['picture'],
            'external_id': item_info['external_id'],
        }

        sale_info = None
        if all_sales and id_sale in all_sales:
            sale_info = all_sales[id_sale]
            _type = sale_info.get('type', {})
            one['link'] = get_url_format(FRT_ROUTE_ROLE.PRDT_INFO) % {
                'id_type': _type.get('@id', 0),
                'type_name': get_normalized_name(FRT_ROUTE_ROLE.PRDT_INFO,
                                                 'type_name',
                                                 _type.get('name', '')),
                'id_sale': id_sale,
                'sale_name': get_normalized_name(FRT_ROUTE_ROLE.PRDT_INFO,
                                                 'sale_name',
                                                 sale_info.get('name', '')),
            }

        one['out_of_stock'] = False #TODO get from user server
        order_items.append(one)

        item_invoice_info = {}
        for iv in item_info['invoice_info']:
            iv_item_info = ujson.loads(iv['invoice_item'])
            if iv_item_info:
                taxes = as_list(iv_item_info.get('tax', {}))
                iv['tax'] = sum([float(t['#text']) for t in taxes
                                 if t.get('@to_worldwide') == 'True'
                                    or t.get('@show') == 'True'])
                iv['tax_per_item'] = iv['tax'] / int(iv_item_info['qty'])
                iv['show_final_price'] = len(
                    [t for t in taxes if t.get('@show') == 'True']) > 0
            else:
                iv['tax'] = 0
                iv['tax_per_item'] = 0
                iv['show_final_price'] = False
            item_invoice_info[iv['shipment_id']] = iv

        for _shipment_info in item_info['shipment_info']:
            shipment_id = _shipment_info.get('shipment_id')
            if not shipment_id:
                # sth. wrong when create order
                continue
            shipping_list = _shipment_info.copy()
            shipping_list.update(item_invoice_info.get(shipment_id))

            shipping_list['item'] = order_items[-1]
            shipping_list['item']['quantity'] = shipping_list['shipping_list_quantity']
            shipping_list['status_name'] = SHIPMENT_STATUS.toReverseDict().get(
                                           int(shipping_list['status']))
            shipping_list['due_within'] = shipping_list['due_within'] or 1
            shipping_list['shipping_within'] = shipping_list['shipping_within'] or 7
            shipping_list['shipping_msg'] = get_shipping_msg(order_status,
                            order_created, shipping_list['shipping_date'],
                            shipping_list['shipping_within'])
            if shipment_id not in shipments:
                shipments[shipment_id] = []
            shipments[shipment_id].append(shipping_list)

    data = {
        'order_id': order_id,
        'order_created': order_created,
        'order_status': order_status,
        'status_name': get_order_status_msg(order_status),
        'user_name': user_name,
        'dest_user_name': dest_user_name,
        'first_name': user_profile.get('first_name') or '',
        'dest_addr': dest_addr,
        'shipments': shipments,
        'order_invoice_url': get_url_format(FRT_ROUTE_ROLE.ORDER_INVOICES)
                             % {'id_order': order_id},
    }
    return data
Beispiel #55
0
 def json(self):
     import ujson
     return ujson.loads(self.content)
Beispiel #56
0
def extract_message_dict(message_bytes):
    # type: (binary_type) -> Dict[str, Any]
    return dict_with_str_keys(ujson.loads(zlib.decompress(message_bytes).decode("utf-8")))
async def get_wallets(request: Request) -> UJSONResponse:
    customer_id = request.path_params["customer_id"]
    wallets = CustomerWallet.filter(customer_id=customer_id)
    response = await OutputCustomerWalletListSchema.from_queryset(wallets)
    response = sort_by_key(loads(response.json()), "modified_at")
    return UJSONResponse({"wallets": response})
		return user_ids_by_interest

	def interests_by_user_id(self):
		# create a dic storing user_ids by interests
		interests_by_user_id = defaultdict(list)
		for user_id, interest in self.interests:
			interests_by_user_id[user_id].append(interest)
		return interests_by_user_id

	def most_common_interests_with(self, user):
		# find friends with common interests
		return Counter(user_id 
			for interest in self.interests_by_user_id()[user['id']]
			for user_id in self.user_ids_by_interest()[interest] 
			if user_id != user['id'])

	def topic_of_interest(self):
		# show the frequency of interests
		words_and_counts = Counter(word 
			for user_id, interest in self.interests 
			for word in interest.lower().split())
		return words_and_counts

if __name__ == '__main__':
	path = 'data/data.json'
	data = json.loads(open(path).read())
	dsc = DataSciencester(data)
	#dsc.list_friends()
	print dsc.topic_of_interest()

Beispiel #59
0
def read_json(url, data=None):
    request = urllib2.Request(url, data, {'Content-Type': 'application/json'})
    response = urllib2.urlopen(request).read()
    return ujson.loads(response)
Beispiel #60
0
    def to_value(self, value):
        import ujson

        return ujson.loads(value)