Exemplo n.º 1
0
    def test_cannot_add_mandate_events_twice(self):
        mandate = MandateFactory.create()
        mandate_events_type = MandateEventsTypeFactory.create()
        date = date_to_timestamp(datetime.utcnow().date())

        yield self.anonymous_fetch(
            '/mandate-events/',
            method='POST',
            body=dumps({
                'date': date,
                'mandate_id': mandate.id,
                'mandate_events_type_id': mandate_events_type.id
            })
        )

        try:
            yield self.anonymous_fetch(
                '/mandate-events/',
                method='POST',
                body=dumps({
                    'date': date,
                    'mandate_id': mandate.id,
                    'mandate_events_type_id': mandate_events_type.id
                })
            )
        except HTTPError as e:
            expect(e).not_to_be_null()
            expect(e.code).to_equal(500)
            expect(e.response.reason).to_be_like('Internal Server Error')
Exemplo n.º 2
0
    def test_change_guest_to_admin(self) -> None:
        iago = self.example_user("iago")
        self.login(iago.email)
        polonius = self.example_user("polonius")
        self.assertTrue(polonius.is_guest)
        self.assertFalse(polonius.is_realm_admin)

        # Test failure of making a guest to admin without revoking guest status
        req = dict(is_admin=ujson.dumps(True))
        result = self.client_patch('/json/users/{}'.format(polonius.id), req)
        self.assert_json_error(result, 'Guests cannot be organization administrators')

        # Test changing a user from guest to admin and revoking guest status
        polonius = self.example_user("polonius")
        self.assertFalse(polonius.is_realm_admin)
        req = dict(is_admin=ujson.dumps(True), is_guest=ujson.dumps(False))
        events = []  # type: List[Mapping[str, Any]]
        with tornado_redirected_to_list(events):
            result = self.client_patch('/json/users/{}'.format(polonius.id), req)
        self.assert_json_success(result)

        polonius = self.example_user("polonius")
        self.assertFalse(polonius.is_guest)
        self.assertTrue(polonius.is_realm_admin)

        person = events[0]['event']['person']
        self.assertEqual(person['email'], polonius.email)
        self.assertTrue(person['is_admin'])

        person = events[1]['event']['person']
        self.assertEqual(person['email'], polonius.email)
        self.assertFalse(person['is_guest'])
Exemplo n.º 3
0
    def term_remote_call(self, method, *args, **kwargs):
        """
        kwargs: content=None, content_type="", content_encoding=""
        """
        logging.error("term_remote_call: %s", method)
        try:
            if not kwargs:
                # Text message
                json_msg = json.dumps([method, args])
                self.term_write(json_msg)
            else:
                # Binary message with UTF-16 JSON prefix
                content = kwargs.get("content")
                assert isinstance(content, bytes), "Content must be of bytes type"

                json_prefix = (
                    json.dumps(
                        [
                            method,
                            args,
                            {
                                "content_type": kwargs.get("content_type", ""),
                                "content_encoding": kwargs.get("content_encoding", ""),
                                "content_length": len(content),
                            },
                        ]
                    )
                    + "\n\n"
                )
                content_prefix = json_prefix.encode("utf-16")
                self.term_write(content_prefix + content, binary=True)
        except Exception as excp:
            logging.error("term_remote_call: ERROR %s", excp)
Exemplo n.º 4
0
 def test_sending_stream_message_from_electron(self) -> Generator[str, Any, None]:
     user_profile = self.example_user('hamlet')
     cookies = self._get_cookies(user_profile)
     cookie_header = self.get_cookie_header(cookies)
     queue_events_data = self._get_queue_events_data(user_profile.email)
     ws = yield self.ws_connect('/sockjs/366/v8nw22qe/websocket', cookie_header=cookie_header)
     yield ws.read_message()
     yield self._websocket_auth(ws, queue_events_data, cookies)
     request_id = ':'.join((queue_events_data['response']['queue_id'], '1'))
     user_message = {
         "req_id": request_id,
         "type": "request",
         "request": {
             "client": "website",
             "type": "stream",
             TOPIC_NAME: "Stream message",
             "stream": "Denmark",
             "private_message_recipient": "",
             "content": "hello",
             "sender_id": user_profile.id,
             "queue_id": queue_events_data['response']['queue_id'],
             "to": ujson.dumps(["Denmark"]),
             "reply_to": self.example_email('hamlet'),
             "local_id": -1,
             "socket_user_agent": "ZulipElectron/1.5.0"
         }
     }
     user_message_str = ujson.dumps(user_message)
     ws.write_message(ujson.dumps([user_message_str]))
     ack_resp = yield ws.read_message()
     msg_resp = yield ws.read_message()
     self._check_message_sending(request_id, ack_resp, msg_resp, user_profile, queue_events_data)
     yield self.close(ws)
Exemplo n.º 5
0
    def test_change_regular_member_to_guest(self) -> None:
        iago = self.example_user("iago")
        self.login(iago.email)

        hamlet = self.example_user("hamlet")
        self.assertFalse(hamlet.is_guest)

        # Test failure of making user both admin and guest
        req = dict(is_guest=ujson.dumps(True), is_admin=ujson.dumps(True))
        result = self.client_patch('/json/users/{}'.format(hamlet.id), req)
        self.assert_json_error(result, 'Guests cannot be organization administrators')
        self.assertFalse(hamlet.is_guest)
        self.assertFalse(hamlet.is_realm_admin)
        hamlet = self.example_user("hamlet")

        req = dict(is_guest=ujson.dumps(True))
        events = []  # type: List[Mapping[str, Any]]
        with tornado_redirected_to_list(events):
            result = self.client_patch('/json/users/{}'.format(hamlet.id), req)
        self.assert_json_success(result)

        hamlet = self.example_user("hamlet")
        self.assertTrue(hamlet.is_guest)
        person = events[0]['event']['person']
        self.assertEqual(person['email'], hamlet.email)
        self.assertTrue(person['is_guest'])
Exemplo n.º 6
0
    def POST(self):
        x = web.input(master_id='', project_id='', task_id='')
        project = models.Project.get(id=x.project_id)
        if not project:
            raise web.notfound()

        master = models.Master.get(id=x.master_id)
        if not master:
            return web.notfound()

        if x.task_id:
            from celery.result import AsyncResult
            from metapolator.config import celery
            res = AsyncResult(x.task_id, backend=celery.backend)

            if res.ready():
                master.task_completed = True
                web.ctx.orm.commit()
                return ujson.dumps({'done': True})
            else:
                master.task_updated = datetime.datetime.now()
                web.ctx.orm.commit()
                return ujson.dumps({'done': False, 'task_id': x.task_id})

        master.task_completed = True
        web.ctx.orm.commit()
        return ujson.dumps({'done': True})
Exemplo n.º 7
0
    def testEncodeUnicodeBMP(self):
        s = u'\U0001f42e\U0001f42e\U0001F42D\U0001F42D' # 🐮🐮🐭🐭
        encoded = ujson.dumps(s)
        encoded_json = json.dumps(s)
		
        if len(s) == 4:
            self.assertEqual(len(encoded), len(s) * 12 + 2)
        else:
            self.assertEqual(len(encoded), len(s) * 6 + 2) 
          
        self.assertEqual(encoded, encoded_json)
        decoded = ujson.loads(encoded)
        self.assertEqual(s, decoded)

        # ujson outputs an UTF-8 encoded str object
        if PY3:
            encoded = ujson.dumps(s, ensure_ascii=False)
        else:
            encoded = ujson.dumps(s, ensure_ascii=False).decode("utf-8")

        # json outputs an unicode object
        encoded_json = json.dumps(s, ensure_ascii=False)
        self.assertEqual(len(encoded), len(s) + 2) # original length + quotes
        self.assertEqual(encoded, encoded_json)
        decoded = ujson.loads(encoded)
        self.assertEqual(s, decoded)
Exemplo n.º 8
0
    def test_replace_payment_source(self, *mocks: Mock) -> None:
        user = self.example_user("hamlet")
        self.login(user.email)
        self.upgrade()
        # Try replacing with a valid card
        stripe_token = stripe_create_token(card_number='5555555555554444').id
        response = self.client_post("/json/billing/sources/change",
                                    {'stripe_token': ujson.dumps(stripe_token)})
        self.assert_json_success(response)
        number_of_sources = 0
        for stripe_source in stripe_get_customer(Customer.objects.first().stripe_customer_id).sources:
            self.assertEqual(cast(stripe.Card, stripe_source).last4, '4444')
            number_of_sources += 1
        self.assertEqual(number_of_sources, 1)
        audit_log_entry = RealmAuditLog.objects.order_by('-id') \
                                               .values_list('acting_user', 'event_type').first()
        self.assertEqual(audit_log_entry, (user.id, RealmAuditLog.STRIPE_CARD_CHANGED))
        RealmAuditLog.objects.filter(acting_user=user).delete()

        # Try replacing with an invalid card
        stripe_token = stripe_create_token(card_number='4000000000009987').id
        with patch("corporate.lib.stripe.billing_logger.error") as mock_billing_logger:
            response = self.client_post("/json/billing/sources/change",
                                        {'stripe_token': ujson.dumps(stripe_token)})
        mock_billing_logger.assert_called()
        self.assertEqual(ujson.loads(response.content)['error_description'], 'card error')
        self.assert_json_error_contains(response, 'Your card was declined')
        number_of_sources = 0
        for stripe_source in stripe_get_customer(Customer.objects.first().stripe_customer_id).sources:
            self.assertEqual(cast(stripe.Card, stripe_source).last4, '4444')
            number_of_sources += 1
        self.assertEqual(number_of_sources, 1)
        self.assertFalse(RealmAuditLog.objects.filter(event_type=RealmAuditLog.STRIPE_CARD_CHANGED).exists())
Exemplo n.º 9
0
 def _render_hits(item):
     rslt = item['metrics']['hits']['hits'][0]['_source']
     if flag['is_first']:
         flag['is_first'] = False
         return json.dumps(rslt)
     else:
         return ',' + json.dumps(rslt)
Exemplo n.º 10
0
    def test_delete_field_value(self) -> None:
        iago = self.example_user("iago")
        self.login(iago.email)
        realm = get_realm("zulip")

        invalid_field_id = 1234
        result = self.client_delete("/json/users/me/profile_data", {
            'data': ujson.dumps([invalid_field_id])
        })
        self.assert_json_error(result,
                               u'Field id %d not found.' % (invalid_field_id))

        field = CustomProfileField.objects.get(name="Mentor", realm=realm)
        data = [{'id': field.id,
                 'value': [self.example_user("aaron").id]}]  # type: List[Dict[str, Union[int, str, List[int]]]]
        do_update_user_custom_profile_data(iago, data)

        iago_value = CustomProfileFieldValue.objects.get(user_profile=iago, field=field)
        converter = field.FIELD_CONVERTERS[field.field_type]
        self.assertEqual([self.example_user("aaron").id], converter(iago_value.value))

        result = self.client_delete("/json/users/me/profile_data", {
            'data': ujson.dumps([field.id])
        })
        self.assert_json_success(result)

        # Don't throw an exception here
        result = self.client_delete("/json/users/me/profile_data", {
            'data': ujson.dumps([field.id])
        })
        self.assert_json_success(result)
Exemplo n.º 11
0
    def index_reviews(self, reviewed_pages, reviews_count, batch_size):
        action = {'index': {'_type': 'review'}}

        for i in range(0, reviews_count, batch_size):
            body_bits = []

            for page in reviewed_pages[i:i + batch_size]:
                doc = self.gen_doc(page.last_review)

                action['index']['_id'] = doc['page_id']

                body_bits.append(dumps(action))
                body_bits.append(dumps(doc))

            # Yes, that trailing newline IS necessary
            body = '\n'.join(body_bits) + '\n'

            self.syncES.send_request(
                method='POST',
                path_components=[self.index, '_bulk'],
                body=body,
                encode_body=False
            )

        logging.info('Done!')
Exemplo n.º 12
0
 def make_request(self):
     retries = 0
     while retries < self.max_retries:
         try:
             resp = yield from aiohttp.request(self.method, self.url,
                                               **self.request_kwargs)
             return (yield from self._handle_response(resp))
         except Exception as exc:
             retries += 1
             error = dict(
                 url=self.url,
                 params=self.request_kwargs.get('params'),
                 message='Request failed, retrying.',
                 retries_left=self.max_retries-retries,
             )
             if self.debug:
                 error['callback'] = repr(self.callback)
                 error['exception'] = repr(exc)
                 error['traceback'] = traceback.format_exc()
                 sys.stderr.write('{}\n'.format(json.dumps(error)))
         yield from asyncio.sleep(1)
     else:
         error['message'] = 'Maximum retries exceeded for url, giving up.'
         sys.stderr.write('{}\n'.format(json.dumps(error)))
         return
Exemplo n.º 13
0
 def common_subscribe_to_streams(self, email, streams, extra_post_data={}, invite_only=False):
     # type: (Text, Iterable[Text], Dict[str, Any], bool) -> HttpResponse
     post_data = {'subscriptions': ujson.dumps([{"name": stream} for stream in streams]),
                  'invite_only': ujson.dumps(invite_only)}
     post_data.update(extra_post_data)
     result = self.client_post("/api/v1/users/me/subscriptions", post_data, **self.api_auth(email))
     return result
Exemplo n.º 14
0
 def construct(bucket):
     positiveHeadersArray = []
     neutralHeadersArray = []
     negativeHeadersArray = []
     topicHeadersArray = []
     for h in bucket:
         if h['DIRECTION'] == 'POSITIVE':
             del h['DIRECTION']
             hJson = ujson.dumps(h)
             positiveHeadersArray.append(hJson)
         elif h['DIRECTION'] == 'NEUTRAL':
             del h['DIRECTION']
             hJson = ujson.dumps(h)
             neutralHeadersArray.append(hJson)
         elif h['DIRECTION'] == 'NEGATIVE':
             del h['DIRECTION']
             hJson = ujson.dumps(h)
             negativeHeadersArray.append(hJson)
         elif h['DIRECTION'] == 'TOPIC':
             del h['DIRECTION']
             hJson = ujson.dumps(h)
             topicHeadersArray.append(hJson)
     return {
             'PositiveHeaders': positiveHeadersArray,
             'NeutralHeaders': neutralHeadersArray,
             'NegativeHeaders': negativeHeadersArray,
             'TopicHeaders': topicHeadersArray
     }
Exemplo n.º 15
0
 def __call__(self, parsed_json):
     flattened = self._flatten(parsed_json)
     for distribution_set in args.distribution or ():
         flattened_subset = tuple( (k,v) for k,v in flattened.iteritems() if k.startswith(distribution_set) and k != distribution_set )
         sum_subset = sum( v for (_,v) in flattened_subset )
         flattened.update( (k,float(v)/(sum_subset or 1)) for (k,v) in flattened_subset )
     found_keys = set()
     at_least_one_found = False
     for (k,_) in self._keys:
         result = flattened.pop(k, self._missing_marker)
         if (result not in (dict, self._missing_marker)):
             yield result
             at_least_one_found = True
             k = k.split('/')
             while k:
                 found_keys.add('/'.join(k))
                 k = k[0:-1]
         elif result is dict:
             raise ValueError('%s is a container in %s' % (k, json.dumps(parsed_json))) # pylint: disable=E1101
         elif args.ignore_missing or k in self._defaults:
             yield self._defaults.get(k, "")
             k = k.split('/')
             while k:
                 found_keys.add('/'.join(k))
                 k = k[0:-1]
         else:
             raise ValueError('Cannot extract %s from %s' % (k, json.dumps(parsed_json))) # pylint: disable=E1101
     if args.add_unparsed:
         yield '' if at_least_one_found else json.dumps(parsed_json)
     if (not args.ignore_surplus):
         flattened = ', '.join( '%s: %s' % (k, str(v)) for k,v in flattened.iteritems() if v and (v is not dict) and k not in found_keys )
         if flattened:
             raise ValueError('Found extra values ' + flattened)
Exemplo n.º 16
0
def return2client(handler, payload):
    """Home brew solution to dump the result back to client's open socket.
    No need to worry about package size or socket behavior as
    tornado handles this for us
    Parameters
    -----------
    handler: tornado.web.RequestHandler
        Request handler for the collection of operation(post/get)
    payload: dict, list
        Information to be sent to the client
    """
    # TODO: Solve precision issue with json precision
    if isinstance(payload, pymongo.cursor.Cursor):
            l = []
            for p in payload:
                del(p['_id'])
                l.append(p)
            handler.write(ujson.dumps(l))
    elif isinstance(payload, dict):
        del(payload['_id'])
        handler.write(ujson.dumps(list(payload)))
    else:
        handler.write('[')
        d = next(payload)
        while True:
            try:
                del(d['_id'])
                handler.write(ujson.dumps(d))
                d = next(payload)
                handler.write(',')
            except StopIteration:
                break
        handler.write(']')
    handler.finish()
Exemplo n.º 17
0
    def test_change_signup_notifications_stream(self) -> None:
        # We need an admin user.
        email = '*****@*****.**'
        self.login(email)

        disabled_signup_notifications_stream_id = -1
        req = dict(signup_notifications_stream_id = ujson.dumps(disabled_signup_notifications_stream_id))
        result = self.client_patch('/json/realm', req)
        self.assert_json_success(result)
        realm = get_realm('zulip')
        self.assertEqual(realm.signup_notifications_stream, None)

        new_signup_notifications_stream_id = 4
        req = dict(signup_notifications_stream_id = ujson.dumps(new_signup_notifications_stream_id))

        result = self.client_patch('/json/realm', req)
        self.assert_json_success(result)
        realm = get_realm('zulip')
        self.assertEqual(realm.signup_notifications_stream.id, new_signup_notifications_stream_id)

        invalid_signup_notifications_stream_id = 1234
        req = dict(signup_notifications_stream_id = ujson.dumps(invalid_signup_notifications_stream_id))
        result = self.client_patch('/json/realm', req)
        self.assert_json_error(result, 'Invalid stream id')
        realm = get_realm('zulip')
        self.assertNotEqual(realm.signup_notifications_stream.id, invalid_signup_notifications_stream_id)
Exemplo n.º 18
0
def collection_update(request, *args, **kwargs):
    """
    Given a PATCH request, update a collection.

    :param request:
    :param args:
    :param kwargs:
    :return:
    """
    patch_data = request.data
    # Extract form data and validate
    form = CollectionForm(patch_data)
    if not form.is_valid():
        data = json.dumps({"errors": form.errors})
        return HttpResponse(content=data, content_type="application/json", status=status.HTTP_400_BAD_REQUEST)
    # Update the collection
    collection = Collection.objects.get(id=int(kwargs['pk']))
    if "title" in patch_data:
        collection.title = patch_data["title"]
    if "permission" in patch_data:
        collection.public = patch_data["permission"] == "Public"
    if "comment" in patch_data:
        collection.comment = patch_data["comment"]
    collection.save()
    # Prepare a response
    data = json.dumps({'success': True, 'id': collection.id, 'url': "/collection/{0}".format(collection.id)})
    return HttpResponse(data, content_type="json")
Exemplo n.º 19
0
    def test_update_flags(self) -> None:
        self.login(self.example_email("hamlet"))

        result = self.client_post("/json/messages/flags",
                                  {"messages": ujson.dumps(self.unread_msg_ids),
                                   "op": "add",
                                   "flag": "read"})
        self.assert_json_success(result)

        # Ensure we properly set the flags
        found = 0
        for msg in self.get_messages():
            if msg['id'] in self.unread_msg_ids:
                self.assertEqual(msg['flags'], ['read'])
                found += 1
        self.assertEqual(found, 2)

        result = self.client_post("/json/messages/flags",
                                  {"messages": ujson.dumps([self.unread_msg_ids[1]]),
                                   "op": "remove", "flag": "read"})
        self.assert_json_success(result)

        # Ensure we properly remove just one flag
        for msg in self.get_messages():
            if msg['id'] == self.unread_msg_ids[0]:
                self.assertEqual(msg['flags'], ['read'])
            elif msg['id'] == self.unread_msg_ids[1]:
                self.assertEqual(msg['flags'], [])
Exemplo n.º 20
0
 def process_log_103(self):
     time_val = self.json_object['time']
     game_id = long(str(self.uid) + str(self.json_object['params']['desk']['id']) + str(self.json_object['params']['desk']['hand']))
     low_balance_pipe = self.r5.pipeline()
     low_balance_pipe.delete(game_id)
     low_balance_pipe.getset('bet_{0}'.format(self.uid), 0)
     low_balance_pipe.getset('half_{0}'.format(self.uid), 0)
     rem_bets = low_balance_pipe.execute()
     if rem_bets[1]:
         if int(rem_bets[1]) == 1:
             self.support_dict['event_type'] = 6
             self.support_dict['uid'] = self.uid
             self.support_dict['timestamp'] = time_val
             self.support_dict['balance'] = self.json_object['accounts']['1']['balance']
             self.support_dict['status'] = 0
             supp_json = ujson.dumps(self.support_dict)
             # send status to rabbitmq exchange
             self.rabbit_publish('log_6_0', supp_json)
     if rem_bets[2]:
         if int(rem_bets[2]) == 1:
             self.support_dict['event_type'] = 4
             self.support_dict['uid'] = self.uid
             self.support_dict['timestamp'] = time_val
             self.support_dict['status'] = 0
             supp_json = ujson.dumps(self.support_dict)
             # send status to rabbitmq exchange
             self.rabbit_publish('log_4_0', supp_json)
Exemplo n.º 21
0
    def test_create_realm_domain(self):
        # type: () -> None
        self.login(self.example_email("iago"))
        data = {'domain': ujson.dumps(''),
                'allow_subdomains': ujson.dumps(True)}
        result = self.client_post("/json/realm/domains", info=data)
        self.assert_json_error(result, 'Invalid domain: Domain can\'t be empty.')

        data['domain'] = ujson.dumps('acme.com')
        result = self.client_post("/json/realm/domains", info=data)
        self.assert_json_success(result)
        realm = get_realm('zulip')
        self.assertTrue(RealmDomain.objects.filter(realm=realm, domain='acme.com',
                                                   allow_subdomains=True).exists())

        result = self.client_post("/json/realm/domains", info=data)
        self.assert_json_error(result, 'The domain acme.com is already a part of your organization.')

        mit_user_profile = self.mit_user("sipbtest")
        self.login(mit_user_profile.email)

        do_change_is_admin(mit_user_profile, True)

        result = self.client_post("/json/realm/domains", info=data,
                                  HTTP_HOST=mit_user_profile.realm.host)
        self.assert_json_success(result)
Exemplo n.º 22
0
def main(JSONinput):

    query = json.loads(JSONinput)
    # Set up the query.
    p = SQLAPIcall(query)

    # run the query.
    resp = p.execute()

    if query['method'] == 'data' and 'format' in query and query['format'] == 'json':
        try:
            resp = json.loads(resp)
        except:
            resp = dict(status="error", code=500,
                        message="Internal error: server did not return json")

        # Print appropriate HTML headers
        if 'status' in resp and resp['status'] == 'error':
            code = resp['code'] if 'code' in resp else 500
            headers(query['method'], errorcode=code)
        else:
            headers(query['method'])
        print json.dumps(resp)
    else:
        headers(query['method'])
        print resp

    return True
Exemplo n.º 23
0
    def handle_message(self, s, e):
        self.logger.debug('message received')
        m = s.recv_multipart()

        id, mtype, token, data = m

        if isinstance(data, basestring):
            try:
                data = json.loads(data)
            except ValueError as e:
                self.logger.error(e)
                self.router.send_multipart(["", json.dumps({"status": "failed" })])

        handler = getattr(self, "handle_" + mtype)
        if handler:
            self.logger.debug("mtype: {0}".format(mtype))

            self.logger.debug('running handler: {}'.format(mtype))

            try:
                rv = handler(token, data)
                rv = {"status": "success", "data": rv}
            except Exception as e:
                self.logger.error(e)
                rv = {"status": "failed"}

            rv = json.dumps(rv)
            self.router.send_multipart([id, rv])
        else:
            self.logger.error('message type {0} unknown'.format(mtype))
            self.router.send_multipart([id, '0'])
Exemplo n.º 24
0
    def test_REQ_converter(self):

        def my_converter(data):
            lst = ujson.loads(data)
            if not isinstance(lst, list):
                raise ValueError('not a list')
            if 13 in lst:
                raise JsonableError('13 is an unlucky number!')
            return lst

        @has_request_variables
        def get_total(request, numbers=REQ(converter=my_converter)):
            return sum(numbers)

        class Request(object):
            REQUEST = {} # type: Dict[str, str]

        request = Request()

        with self.assertRaises(RequestVariableMissingError):
            get_total(request)

        request.REQUEST['numbers'] = 'bad_value'
        with self.assertRaises(RequestVariableConversionError) as cm:
            get_total(request)
        self.assertEqual(str(cm.exception), "Bad value for 'numbers': bad_value")

        request.REQUEST['numbers'] = ujson.dumps([2, 3, 5, 8, 13, 21])
        with self.assertRaises(JsonableError) as cm:
            get_total(request)
        self.assertEqual(str(cm.exception), "13 is an unlucky number!")

        request.REQUEST['numbers'] = ujson.dumps([1, 2, 3, 4, 5, 6])
        result = get_total(request)
        self.assertEqual(result, 21)
Exemplo n.º 25
0
    def test_REQ_validator(self):

        @has_request_variables
        def get_total(request, numbers=REQ(validator=check_list(check_int))):
            return sum(numbers)

        class Request(object):
            REQUEST = {} # type: Dict[str, str]

        request = Request()

        with self.assertRaises(RequestVariableMissingError):
            get_total(request)

        request.REQUEST['numbers'] = 'bad_value'
        with self.assertRaises(JsonableError) as cm:
            get_total(request)
        self.assertEqual(str(cm.exception), 'argument "numbers" is not valid json.')

        request.REQUEST['numbers'] = ujson.dumps([1, 2, "what?", 4, 5, 6])
        with self.assertRaises(JsonableError) as cm:
            get_total(request)
        self.assertEqual(str(cm.exception), 'numbers[2] is not an integer')

        request.REQUEST['numbers'] = ujson.dumps([1, 2, 3, 4, 5, 6])
        result = get_total(request)
        self.assertEqual(result, 21)
Exemplo n.º 26
0
    def send(self, service_req):
        """Send will wait for a response with a listener and is async
        """
        service_req.conn_id = uuid4().hex

        header = "%s %s %s %s %s %s %s %s %s" % (self.sender_id, 
            t(service_req.conn_id), 
            t(service_req.request_timestamp),
            t(self.passphrase),
            t(service_req.origin_sender_id),
            t(service_req.origin_conn_id),
            t(service_req.origin_out_addr),
            t(service_req.path),
            t(service_req.method),
        )
        arguments = to_bytes(json.dumps(service_req.arguments))
        headers = to_bytes(json.dumps(service_req.headers))
        body = to_bytes(json.dumps(service_req.body))

        msg = ' %s %s%s%s' % (header, t(arguments),t(headers), t(body))
        logging.debug(
            "ServiceClientConnection send (%s:%s): %s" % (self.sender_id, service_req.conn_id, msg)
        )
        self.out_sock.send(msg)

        return service_req
Exemplo n.º 27
0
    def insert_graph_receipt_txn(self, txn, room_id, receipt_type,
                                 user_id, event_ids, data):
        txn.call_after(
            self.get_receipts_for_room.invalidate, (room_id, receipt_type)
        )
        txn.call_after(
            self._invalidate_get_users_with_receipts_in_room,
            room_id, receipt_type, user_id,
        )
        txn.call_after(
            self.get_receipts_for_user.invalidate, (user_id, receipt_type)
        )
        # FIXME: This shouldn't invalidate the whole cache
        txn.call_after(self.get_linearized_receipts_for_room.invalidate_many, (room_id,))

        self._simple_delete_txn(
            txn,
            table="receipts_graph",
            keyvalues={
                "room_id": room_id,
                "receipt_type": receipt_type,
                "user_id": user_id,
            }
        )
        self._simple_insert_txn(
            txn,
            table="receipts_graph",
            values={
                "room_id": room_id,
                "receipt_type": receipt_type,
                "user_id": user_id,
                "event_ids": json.dumps(event_ids),
                "data": json.dumps(data),
            }
        )
Exemplo n.º 28
0
    def make_response(self, request, response):
        """Convert a handler result to web response."""
        while iscoroutine(response):
            response = yield from response

        if isinstance(response, StreamResponse):
            return response

        if isinstance(response, str):
            return Response(text=response, content_type='text/html', charset=self.app.cfg.ENCODING)

        if isinstance(response, (list, dict)):
            return Response(text=json.dumps(response), content_type='application/json')

        if isinstance(response, (MultiDict, MultiDictProxy)):
            response = dict(response)
            return Response(text=json.dumps(response), content_type='application/json')

        if isinstance(response, bytes):
            response = Response(
                body=response, content_type='text/html', charset=self.app.cfg.ENCODING)
            return response

        if response is None:
            response = ''

        return Response(text=str(response), content_type='text/html')
Exemplo n.º 29
0
    def UploadPhoto(self, token, albumid):
        name = 'test2%d' % int(time.time())

        filename = './2.png'
        content = open(filename, 'r').read()
        
        sha = hashlib.sha1()
        sha.update(content)
        sha1 = sha.hexdigest()
        
        # first commit
        data = {'token' : token,
                'latitude':40.0425140000, 'longitude': 116.3293040000,
                'sha1' : sha1, 'albumid' : albumid,
                'tag' : ['测试中文标签', 'test english tag', '第三个测试标签']}
        data = ujson.dumps(data)
        res = self.api('commit', data)

        if res['result_code'] == 10000:
            assert res['photoid'] is not None
            photoid  = res['photoid']
        else:
            assert res['result_code'] == 14004, res 
            #upload file
            res = ossmisc.uploadFile(sha1, content)
            assert res == 10000
            
            # true commit 
            data = {'token' : token, 'latitude':40.0425140000, 'longitude': 116.3293040000, 'sha1' : sha1, 'albumid' : albumid}
            data = ujson.dumps(data)
            res = self.api('commit', data)
            assert res['result_code'] == 10000
            photoid  = res['photoid']

        return photoid
Exemplo n.º 30
0
    def do_test_change_user_display_setting(self, setting_name: str) -> None:

        test_changes = dict(
            default_language = 'de',
            emojiset = 'apple',
            timezone = 'US/Mountain',
        )  # type: Dict[str, Any]

        email = self.example_email('hamlet')
        self.login(email)
        test_value = test_changes.get(setting_name)
        # Error if a setting in UserProfile.property_types does not have test values
        if test_value is None:
            raise AssertionError('No test created for %s' % (setting_name))
        invalid_value = 'invalid_' + setting_name

        data = {setting_name: ujson.dumps(test_value)}
        result = self.client_patch("/json/settings/display", data)
        self.assert_json_success(result)
        user_profile = self.example_user('hamlet')
        self.assertEqual(getattr(user_profile, setting_name), test_value)

        # Test to make sure invalid settings are not accepted
        # and saved in the db.
        data = {setting_name: ujson.dumps(invalid_value)}
        result = self.client_patch("/json/settings/display", data)
        # the json error for multiple word setting names (ex: default_language)
        # displays as 'Invalid language'. Using setting_name.split('_') to format.
        self.assert_json_error(result, "Invalid %s '%s'" % (setting_name.split('_')[-1],
                                                            invalid_value))
        user_profile = self.example_user('hamlet')
        self.assertNotEqual(getattr(user_profile, setting_name), invalid_value)
Exemplo n.º 31
0
        "title": author,
        "author": author,
        "config": {
            "generator": "lofter"
        },
        "entries": []
    }

    posts = soup.select(".m-postlst .m-post")
    if len(posts) > 0:
        i = 1
        for post in posts:
            link = post.select("a")[0]["href"]
            sys.stderr.write("(" + str(i) + "/" + str(len(posts)) +
                             ") Downloading " + link + " ... ")
            sys.stderr.flush()

            newdata = download(link)
            newsoup = bs4.BeautifulSoup(newdata, 'lxml')

            sys.stderr.write("done\n")
            sys.stderr.flush()

            myjson["entries"].append(processdata(author, newsoup, link))

            i += 1
    else:
        myjson["entries"].append(processdata(author, soup, url))

    print(ujson.dumps(myjson))
Exemplo n.º 32
0
 def test_user_cannot_promote_to_admin(self) -> None:
     self.login(self.example_email("hamlet"))
     req = dict(is_admin=ujson.dumps(True))
     result = self.client_patch(
         '/json/users/{}'.format(self.example_user('hamlet').id), req)
     self.assert_json_error(result, 'Insufficient permission')
Exemplo n.º 33
0
 def respond_json(self, resp, data):
     resp.content_type = 'application/json'
     resp.status = falcon.HTTP_200
     resp.body = json.dumps(data, indent=2)
Exemplo n.º 34
0
import html
from urllib.parse import parse_qs, quote, urlencode

import falcon
from jsonschema import ValidationError

try:
    import ujson as json
    UJSON = True
except ImportError:  # pragma: no cover
    UJSON = False
    import json

INDENT = True
try:
    json.dumps({}, indent=True)  # ujson 1.33 doesn't support 'indent'
except TypeError:  # pragma: no cover
    INDENT = False


def _parse_qs(req):
    args = parse_qs(req.query_string)
    for arg, values in args.items():
        if len(values) > 1:
            raise HTTPBadRequest(
                "Query option '%s' was specified more than once, but it must be specified at most once."
                % arg)

    for key in ('$top', '$skip'):
        if key in args:
            value = args[key][0]
Exemplo n.º 35
0
def generate_doc_from_each_end_point(
        app: web.Application,
        *,
        api_base_url: str = "/",
        description: str = "Swagger API definition",
        api_version: str = "1.0.0",
        title: str = "Swagger API",
        contact: str = "",
        security_definitions: dict = None):
    # Clean description
    _start_desc = 0
    for i, word in enumerate(description):
        if word != '\n':
            _start_desc = i
            break
    cleaned_description = "    ".join(description[_start_desc:].splitlines())

    def nesteddict2yaml(d, indent=10, result=""):
        for key, value in d.items():
            result += " " * indent + str(key) + ':'
            if isinstance(value, dict):
                result = nesteddict2yaml(value, indent + 2, result + "\n")
            else:
                result += " " + str(value) + "\n"
        return result

    # Load base Swagger template
    jinja2_env = Environment(loader=BaseLoader())
    jinja2_env.filters['nesteddict2yaml'] = nesteddict2yaml

    with open(join(SWAGGER_TEMPLATE, "swagger.yaml"), "r") as f:
        swagger_base = (jinja2_env.from_string(f.read()).render(
            description=cleaned_description,
            version=api_version,
            title=title,
            contact=contact,
            base_path=api_base_url,
            security_definitions=security_definitions))

    # The Swagger OBJ
    swagger = yaml.full_load(swagger_base)
    swagger["paths"] = defaultdict(dict)

    for route in app.router.routes():

        end_point_doc = None

        # If route has a external link to doc, we use it, not function doc
        if getattr(route.handler, "swagger_file", False):
            try:
                with open(route.handler.swagger_file, "r") as f:
                    end_point_doc = {
                        route.method.lower(): yaml.full_load(f.read())
                    }
            except yaml.YAMLError:
                end_point_doc = {
                    route.method.lower(): {
                        "description": "⚠ Swagger document could not be "
                        "loaded from file ⚠",
                        "tags": ["Invalid Swagger"]
                    }
                }
            except FileNotFoundError:
                end_point_doc = {
                    route.method.lower(): {
                        "description":
                        "⚠ Swagger file not "
                        "found ({}) ⚠".format(route.handler.swagger_file),
                        "tags": ["Invalid Swagger"]
                    }
                }

        # Check if end-point has Swagger doc
        else:
            end_point_doc = _build_doc_from_func_doc(route)

        # there is doc available?
        if end_point_doc:
            url_info = route._resource.get_info()
            if url_info.get("path", None):
                url = url_info.get("path")
            else:
                url = url_info.get("formatter")

            swagger["paths"][url].update(end_point_doc)

    return json.dumps(swagger)
Exemplo n.º 36
0
def load_doc_from_yaml_file(doc_path: str):
    loaded_yaml = yaml.full_load(open(doc_path, "r").read())
    return json.dumps(loaded_yaml)
Exemplo n.º 37
0
 def _write_current_job(self, job):
     fn = self.local_working_dir + '/CURRENT_JOB.data'
     f = open(fn, 'w')
     s = ujson.dumps(job)
     f.write(s)
     f.close()
Exemplo n.º 38
0
try:
    fm = 'r+' if os.path.isfile('/conf/event_config_changed.json') else 'w+'
    status_fhandle = open('/conf/event_config_changed.json', fm)
    fcntl.flock(status_fhandle, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
    # already running, exit status 99, it should be safe to skip an event when config changes happen too frequently
    sys.exit(99)

status_fhandle.seek(0)
try:
    metadata = ujson.loads(status_fhandle.read())
    # ujson treats decimals as floats, round these numbers to avoid re-triggering the previous handled event
    metadata['last_proccessed_stamp'] = round(
        decimal.Decimal(metadata['last_proccessed_stamp']), 4)
except ValueError:
    metadata = {'last_proccessed_stamp': 0}

for filename in sorted(glob.glob('/conf/backup/config-*.xml')):
    ts = filename.split('-')[-1].split('.xml')[0].replace('_', '')
    if ts.count('.') <= 1 and ts.replace('.', '').isdigit():
        # only process valid config backups containing a timestamp
        ts_num = decimal.Decimal(ts)
        if ts_num > metadata['last_proccessed_stamp']:
            subprocess.run(["/usr/local/etc/rc.syshook", "config", filename])
            metadata['last_proccessed_stamp'] = ts_num

# write metadata and exit
status_fhandle.seek(0)
status_fhandle.truncate()
status_fhandle.write(ujson.dumps(metadata))
Exemplo n.º 39
0
def test_reject_bytes_false():
    data = {"a": b"b"}
    assert ujson.dumps(data, reject_bytes=False) == '{"a":"b"}'
Exemplo n.º 40
0
 def write(self, data):
     self._handle.seek(0)
     serialized = json.dumps(data, **self.kwargs)
     self._handle.write(serialized)
     self._handle.flush()
     self._handle.truncate()
Exemplo n.º 41
0
def test_reject_bytes_default():
    data = {"a": b"b"}
    with pytest.raises(TypeError):
        ujson.dumps(data)
Exemplo n.º 42
0
def test_encode_none_key():
    data = {None: None}
    assert ujson.dumps(data) == '{"null":null}'
Exemplo n.º 43
0
def test_encode_raises_allow_nan(test_input, expected_exception):
    with pytest.raises(expected_exception):
        ujson.dumps(test_input, allow_nan=False)
Exemplo n.º 44
0
def test_reject_bytes_true():
    data = {"a": b"b"}
    with pytest.raises(TypeError):
        ujson.dumps(data, reject_bytes=True)
Exemplo n.º 45
0
def test_dumps(test_input, expected):
    assert ujson.dumps(test_input) == expected
Exemplo n.º 46
0
def test_encode_decode_long_decimal():
    sut = {"a": -528656961.4399388}
    encoded = ujson.dumps(sut)
    ujson.decode(encoded)
Exemplo n.º 47
0
def test_sort_keys():
    data = {"a": 1, "c": 1, "b": 1, "e": 1, "f": 1, "d": 1}
    sorted_keys = ujson.dumps(data, sort_keys=True)
    assert sorted_keys == '{"a":1,"b":1,"c":1,"d":1,"e":1,"f":1}'
Exemplo n.º 48
0
def test_dumps_raises(test_input, expected_exception, expected_message):
    with pytest.raises(expected_exception) as e:
        ujson.dumps(test_input)
    assert str(e.value) == expected_message
Exemplo n.º 49
0
    def craft_t0_processor_config(
            cls,
            channel: ChannelId,
            config: FirstPassConfig | dict[str, Any],
            t2_compute: list[T2Compute],
            supplier: str | dict[str, Any],
            shaper: str | dict[str, Any],
            combiner: str | dict[str, Any],
            filter_dict: None | dict[str, Any] = None,
            muxer: None | str | dict[str, Any] = None,
            compiler_opts: None | dict[str, Any] = None) -> dict[str, Any]:
        """
		This method needs a reference to a FirstPassConfig dict because
		config information might be needed during the template transforming process.
		For example, legacy channel templates (such as ZTFLegacyChannelTemplate)
		allow users to reference any kind of t2 units under the root config section 't2_compute'.
		The AlertConsumer however, requires different configuration paths for "state T2s" and "point T2s".
		The underlying templates will thus have to sort T2s based on their respective abstract classes,
		and for this, the ampel configuration is required.

		:param stock_ingester: unit_class or (unit_class, config dict)
		:param point_t2: units to schedule on t0_add
		:param state_t2: units to schedule on t1_combine
		"""

        state_t2s = filter_units(t2_compute, [
            "AbsStateT2Unit",
            "AbsCustomStateT2Unit",
            "AbsTiedStateT2Unit",
            "AbsTiedCustomStateT2Unit",
        ], config)

        stock_t2s = filter_units(t2_compute, "AbsStockT2Unit", config)
        point_t2s = filter_units(t2_compute, "AbsPointT2Unit", config)
        check_tied_units(t2_compute, config)

        ingest: dict[str, Any] = {}

        # See IngestDirective docstring
        if stock_t2s:
            ingest['stock_t2'] = stock_t2s

        # This template does not support 'free' point t2s (based on input dps list)
        # but anchors potentially available point t2s under 'combine' (based on dps list returned by combine)
        if muxer:
            ingest['mux'] = ujson.loads(ujson.dumps(resolve_shortcut(muxer)))
            if state_t2s:
                ingest['mux']['combine'] = [
                    resolve_shortcut(combiner) | {
                        'state_t2': state_t2s
                    }
                ]
            if point_t2s:
                ingest['mux']['insert'] = {"point_t2": point_t2s}
        else:
            if state_t2s:
                ingest['combine'] = [
                    resolve_shortcut(combiner) | {
                        'state_t2': state_t2s
                    }
                ]
            if point_t2s:
                if 'combine' in ingest:
                    ingest['combine'][0]['point_t2'] = point_t2s
                else:
                    ingest['combine'] = [
                        resolve_shortcut(combiner) | {
                            'point_t2': point_t2s
                        }
                    ]

        return {
         "supplier": resolve_shortcut(supplier),
         "shaper": resolve_shortcut(shaper),
         "compiler_opts": compiler_opts,
         "directives": [
          {"channel": channel, "filter": filter_dict, "ingest": ingest} if filter_dict \
          else {"channel": channel, "ingest": ingest}
         ]
        }
Exemplo n.º 50
0
def test_write_escaped_string():
    assert "\"\\u003cimg src='\\u0026amp;'\\/\\u003e\"" == ujson.dumps(
        "<img src='&amp;'/>", encode_html_chars=True)
    def process_message(self, message):
        (dimensions, metric_name, region, tenant_id, time_stamp, value,
         value_meta) = parse_measurement_message(message)

        with self._lock:
            dim_names = []
            dim_list = []
            for name in sorted(dimensions.iterkeys()):
                dim_list.append('%s\t%s' % (name, dimensions[name]))
                dim_names.append(name)

            hash_string = '%s\0%s\0%s\0%s' % (region, tenant_id, metric_name,
                                              '\0'.join(dim_list))
            metric_id = hashlib.sha1(hash_string.encode('utf8')).hexdigest()

            metric = Metric(id=metric_id,
                            region=region,
                            tenant_id=tenant_id,
                            name=metric_name,
                            dimension_list=dim_list,
                            dimension_names=dim_names,
                            time_stamp=time_stamp,
                            value=value,
                            value_meta=json.dumps(value_meta,
                                                  ensure_ascii=False))

            id_bytes = bytearray.fromhex(metric.id)
            if self._metric_id_cache.get(metric.id, None):
                measurement_bound_stmt = self._measurement_update_stmt.bind(
                    (self._retention, metric.value, metric.value_meta,
                     id_bytes, metric.time_stamp))
                self._metric_batch.add_measurement_query(
                    measurement_bound_stmt)

                metric_update_bound_stmt = self._metric_update_stmt.bind(
                    (self._retention, metric.time_stamp, metric.region,
                     metric.tenant_id, metric.name, metric.dimension_list,
                     metric.dimension_names))
                self._metric_batch.add_metric_query(metric_update_bound_stmt)

                return metric

            self._metric_id_cache[metric.id] = metric.id

            metric_insert_bound_stmt = self._metric_insert_stmt.bind(
                (self._retention, id_bytes, metric.time_stamp,
                 metric.time_stamp, metric.region, metric.tenant_id,
                 metric.name, metric.dimension_list, metric.dimension_names))
            self._metric_batch.add_metric_query(metric_insert_bound_stmt)

            for dim in metric.dimension_list:
                (name, value) = dim.split('\t')
                dim_key = self._get_dimnesion_key(metric.region,
                                                  metric.tenant_id, name,
                                                  value)
                if not self._dimension_cache.get(dim_key, None):
                    dimension_bound_stmt = self._dimension_stmt.bind(
                        (metric.region, metric.tenant_id, name, value))
                    self._metric_batch.add_dimension_query(
                        dimension_bound_stmt)
                    self._dimension_cache[dim_key] = dim_key

                metric_dim_key = self._get_metric_dimnesion_key(
                    metric.region, metric.tenant_id, metric.name, name, value)
                if not self._metric_dimension_cache.get(metric_dim_key, None):
                    dimension_metric_bound_stmt = self._dimension_metric_stmt.bind(
                        (metric.region, metric.tenant_id, name, value,
                         metric.name))
                    self._metric_batch.add_dimension_metric_query(
                        dimension_metric_bound_stmt)

                    metric_dimension_bound_stmt = self._metric_dimension_stmt.bind(
                        (metric.region, metric.tenant_id, metric.name, name,
                         value))
                    self._metric_batch.add_metric_dimension_query(
                        metric_dimension_bound_stmt)

                    self._metric_dimension_cache[
                        metric_dim_key] = metric_dim_key

            measurement_insert_bound_stmt = self._measurement_insert_stmt.bind(
                (self._retention, metric.value, metric.value_meta,
                 metric.region, metric.tenant_id, metric.name,
                 metric.dimension_list, id_bytes, metric.time_stamp))
            self._metric_batch.add_measurement_query(
                measurement_insert_bound_stmt)

            return metric
Exemplo n.º 52
0
def test_encoding_invalid_unicode_character():
    s = "\udc7f"
    with pytest.raises(UnicodeEncodeError):
        ujson.dumps(s)
    def _sendAllPackets(self,
                        listener,
                        jpTransferStartTimeBorder=10,
                        jpTransferEndTimeBorder=15,
                        hasTransferPermissionsList=["dev_hello"]):
        # get all packets types without LinkAddress type
        #isNeedPacketsRegeneration = True
        while True:
            jsonPacketsDataList = CM_Service.packetsGeneration(20)[0]
            print("LOG: Error. jsonPacketsDataList is empty!")
            if len(jsonPacketsDataList) > 0:
                break
        print("-----------------------------")
        print("Packets for sendidng:")
        print(jsonPacketsDataList)
        print("-----------------------------")
        print("log: Sending packets ")
        transferInSec = rnd.randint(jpTransferStartTimeBorder,
                                    jpTransferEndTimeBorder)
        # send all packets
        packetsCount = 3
        helloPackNo = 0
        dataPackNo = 0
        # list for dev_id and ctrl_id sync
        id_sync = list()
        ctrl_id_type = dict()
        dataPacket = dict()
        switch_state_max_cnt = 0
        for jsonPacket in jsonPacketsDataList:
            # check up the transfer permission list
            if self.isPacketHasTransferPermission(jsonPacket["type"],
                                                  hasTransferPermissionsList):
                #self.write_message(ujson.dumps(jsonPacket))
                #pingPack = listener.ping(data='')
                #print("ping = ", pingPack)
                # if not ws.ws_connection or not ws.ws_connection.stream.socket:
                #self.ping(data='hey')
                #if listener.ws_connection:

                if listener in self.clients:
                    #if jsonPacket["type"] == "dev_hello":
                    #dataPacket = dict()
                    if helloPackNo < 1:
                        print("Cur Hello Pack:")
                        print(jsonPacket)
                        from faker import Faker
                        fake = Faker()
                        new_dev_id = fake.mac_address()
                        jsonPacket["dev_id"] = new_dev_id
                        '''
                            id_sync.append(jsonPacket["dev_id"])
                            id_sync.append(
                                (jsonPacket["controls"][0].get("type"), jsonPacket["controls"][0].get("ctrl_id")))
                            id_sync.append(
                                (jsonPacket["controls"][1].get("type"), jsonPacket["controls"][1].get("ctrl_id")))
                            id_sync.append(
                                (jsonPacket["controls"][2].get("type"), jsonPacket["controls"][2].get("ctrl_id")))
                            id_sync.append(
                                (jsonPacket["controls"][3].get("type"), jsonPacket["controls"][3].get("ctrl_id")))
                            id_sync.append(
                                (jsonPacket["controls"][4].get("type"), jsonPacket["controls"][4].get("ctrl_id")))
                            '''
                        ctrl_id_type.update({"dev_id": jsonPacket["dev_id"]})

                        for i in range(0, len(jsonPacket["controls"])):
                            print(
                                "DEBUG: control #", i, " name: ",
                                jsonPacket["controls"][i].get("type").get(
                                    "name"), " state: ",
                                jsonPacket["changes_packet"]["controls"]
                                [i].get("state"))
                            if jsonPacket["controls"][i].get("type").get(
                                    "name") == "switch_state":
                                switch_state_max_cnt = len(
                                    jsonPacket["controls"][i].get("type").get(
                                        "optional").get("names"))
                            ctrl_id_type.update({
                                jsonPacket["controls"][i].get("type").get("name"):
                                jsonPacket["controls"][i].get("ctrl_id")
                            })
                        print("DEBUG: max count of switch_state:",
                              switch_state_max_cnt)
                        dataPacket = self.hello_pack_resetId(
                            jsonPacket, jsonPacket["type"], ctrl_id_type,
                            switch_state_max_cnt)
                        listener.write_message(ujson.dumps(jsonPacket))

                        #pauseInSec = rnd.randint(20, 30)
                        #time.sleep(pauseInSec)
                        #self.reset_states_in_data_packet(dataPacket, ctrl_id_type, switch_state_max_cnt)
                        #listener.write_message(ujson.dumps(dataPacket))
                        #print("Sent Data Packet")
                        #print(dataPacket)
                        print("DEBUG: struct for sync:")
                        print(ctrl_id_type)
                        helloPackNo += 1
                    else:
                        #helloPackNo = 0
                        if dataPackNo < 3:
                            pauseInSec = rnd.randint(20, 30)
                            time.sleep(pauseInSec)
                            self.reset_states_in_data_packet(
                                dataPacket, ctrl_id_type, switch_state_max_cnt)
                            listener.write_message(ujson.dumps(dataPacket))
                            print("Sent Data Packet:")
                            print(dataPacket)
                        else:
                            dataPackNo = 0
                            ctrl_id_type.clear()
                            dataPacket.clear()
                        break

                        #listener.write_message(ujson.dumps(dataPacket))
                        '''
                            helloPackNo = 0
                            #listener.write_message(ujson.dumps({"msg": "Please, reconnecting for new 3 packets"}))
                            sync_data_packs = self.dataPacksModification("dev_changes", ctrl_id_type)
                            # send data packs
                            for dataPacket in sync_data_packs:
                                print("id list for sync:")
                                print(id_sync)
                                print("Data_Packet")
                                print(dataPacket)
                                if dataPackNo < 3:
                                    listener.write_message(ujson.dumps(dataPacket))
                                    dataPackNo += 1
                                    print("Sent Data_Packet")
                                    print(dataPacket)
                                    #pauseInSec = rnd.randint(20, 30)
                                    pauseInSec = rnd.randint(20, 30)
                                    time.sleep(pauseInSec)
                                else:
                                    dataPackNo = 0
                                    ctrl_id_type.clear()
                                    break
                            '''
                        break

                #else:
                    '''
                        sync_data_packs = self.dataPacksModification(jsonPacket["type"], id_sync)
                        # send data packs
                        for jsonPacket in sync_data_packs:
                            if dataPackNo < 3:
                                listener.write_message(ujson.dumps(jsonPacket))
                                dataPackNo += 1
                                pauseInSec = rnd.randint(20, 30)
                                time.sleep(pauseInSec)
                            else:
                                id_sync.clear()
                                break
                        '''
                #self.data_received()
                else:
                    break
                    #self.clients.remove(listener)
                #pauseInSec = rnd.randint(5, 10)
                pauseInSec = rnd.randint(20, 30)
                time.sleep(pauseInSec)
                print("Sent hello-packet:")
                print(jsonPacket)
                transferInSec -= 1
                '''
Exemplo n.º 54
0
def main():
    parser = argparse.ArgumentParser(
        "Evaluate nnsum models using original Perl ROUGE script.")
    parser.add_argument("--batch-size", default=32, type=int)
    parser.add_argument("--gpu", default=-1, type=int)
    parser.add_argument("--sentence-limit", default=None, type=int)
    parser.add_argument("--summary-length", type=int, default=100)
    parser.add_argument("--remove-stopwords",
                        action="store_true",
                        default=False)
    parser.add_argument("--inputs", type=pathlib.Path, required=True)
    parser.add_argument("--refs", type=pathlib.Path, required=True)
    parser.add_argument("--model", type=pathlib.Path, required=True)
    parser.add_argument("--results",
                        type=pathlib.Path,
                        required=False,
                        default=None)

    args = parser.parse_args()

    print("Loading model...", end="", flush=True)
    model = torch.load(args.model, map_location=lambda storage, loc: storage)
    if args.gpu > -1:
        model.cuda(args.gpu)
    vocab = model.embeddings.vocab
    print(" OK!")

    data = nnsum.data.SingleDocumentDataset(vocab,
                                            args.inputs,
                                            references_dir=args.refs,
                                            sentence_limit=args.sentence_limit)
    loader = data.dataloader(batch_size=args.batch_size)

    ids = []
    path_data = []
    model.eval()
    with rouge_papier.util.TempFileManager() as manager:
        with torch.no_grad():
            for step, batch in enumerate(loader, 1):
                batch = batch.to(args.gpu)
                print("generating summaries {} / {} ...".format(
                    step, len(loader)),
                      end="\r" if step < len(loader) else "\n",
                      flush=True)
                texts = model.predict(batch, max_length=args.summary_length)

                for text, ref_paths in zip(texts, batch.reference_paths):
                    summary = "\n".join(text)
                    summary_path = manager.create_temp_file(summary)
                    path_data.append(
                        [summary_path, [str(x) for x in ref_paths]])
                ids.extend(batch.id)

        config_text = rouge_papier.util.make_simple_config_text(path_data)
        config_path = manager.create_temp_file(config_text)
        df = rouge_papier.compute_rouge(config_path,
                                        max_ngram=2,
                                        lcs=True,
                                        remove_stopwords=args.remove_stopwords,
                                        length=args.summary_length)
        df.index = ids + ["average"]
        df = pd.concat([df[:-1].sort_index(), df[-1:]], axis=0)
        print(df[-1:])

        if args.results:
            records = df[:-1].to_dict("records")

            results = {
                "idividual": {id: record
                              for id, record in zip(ids, records)},
                "average": df[-1:].to_dict("records")[0]
            }
            args.results.parent.mkdir(parents=True, exist_ok=True)
            with args.results.open("w") as fp:
                fp.write(json.dumps(results))
Exemplo n.º 55
0
def json_filter(s):
    return json.dumps(s)
 def packSerialize(self, packet):
     return ujson.dumps(packet)
Exemplo n.º 57
0
 async def get_ping_data(self):
     return ujson.dumps({
         "event": "ping",
         "reqid": await self.now_timestamp
     })
Exemplo n.º 58
0
def send_update_shipment(id_shipment,
                         id_shop,
                         id_brand,
                         shipping_fee=None,
                         handling_fee=None,
                         status=None,
                         tracking_num=None,
                         content=None,
                         shipping_date=None,
                         tracking_name=None,
                         shipping_carrier=None):
    if isinstance(content, list):
        content = ujson.dumps(content)

    try:
        data = {}
        if shipping_fee:
            data['shipping_fee'] = shipping_fee
        if handling_fee:
            data['handling_fee'] = handling_fee
        if status:
            data['status'] = status
        if tracking_num:
            data['tracking'] = tracking_num
        if content:
            data['content'] = content
        if shipping_date:
            data['shipping_date'] = shipping_date
        if tracking_name:
            data['tracking_name'] = tracking_name
        if shipping_carrier:
            data['shipping_carrier'] = shipping_carrier

        if not data:
            return

        data['shipment'] = id_shipment
        data['shop'] = id_shop
        data['brand'] = id_brand
        data['action'] = 'modify'
        data = ujson.dumps(data)
        data = gen_encrypt_json_context(
            data, settings.SERVER_APIKEY_URI_MAP[SERVICES.USR],
            settings.PRIVATE_KEY_PATH)
        rst = get_from_remote(settings.ORDER_SHIPMENT,
                              settings.SERVER_APIKEY_URI_MAP[SERVICES.USR],
                              settings.PRIVATE_KEY_PATH,
                              data=data,
                              headers={'Content-Type': 'application/json'})
        return rst

    except Exception, e:
        logging.error(
            'Failed to update shipment %s,'
            'error: %s', {
                'id_shipment': id_shipment,
                'shipping_fee': shipping_fee,
                'handling_fee': handling_fee,
                'status': status,
                'tracking_num': tracking_num,
                'content': content
            },
            e,
            exc_info=True)
        raise UsersServerError
Exemplo n.º 59
0
def json_dumps(data, **kwargs):
    # kwargs.pop('default', None)
    # kwargs['ensure_ascii'] = False
    return ujson.dumps(data, ensure_ascii=False)
Exemplo n.º 60
0
def write_json(raw, path):
    with open(path, 'w') as out:
        print(json.dumps(raw), file=out)