def test_do_restore_message(self): client = Mock(name='client') pl1 = {'body': 'BODY'} spl1 = dumps(pl1) lookup = self.channel._lookup = Mock(name='_lookup') lookup.return_value = ['george', 'elaine'] self.channel._do_restore_message( pl1, 'ex', 'rkey', client, ) client.rpush.assert_has_calls([ call('george', spl1), call('elaine', spl1), ]) pl2 = {'body': 'BODY2', 'headers': {'x-funny': 1}} headers_after = dict(pl2['headers'], redelivered=True) spl2 = dumps(dict(pl2, headers=headers_after)) self.channel._do_restore_message( pl2, 'ex', 'rkey', client, ) client.rpush.assert_has_calls([ call('george', spl2), call('elaine', spl2), ]) client.rpush.side_effect = KeyError() with patch('kombu.transport.redis.crit') as crit: self.channel._do_restore_message( pl2, 'ex', 'rkey', client, ) self.assertTrue(crit.called)
def _unpack_fields(cls, schedule, args=None, kwargs=None, relative=None, options=None, **entry): model_schedule, model_field = cls.to_model_schedule(schedule) entry.update( {model_field: model_schedule}, args=dumps(args or []), kwargs=dumps(kwargs or {}), **cls._unpack_options(**options or {}) ) return entry
def test_put_priority(self): client = self.channel.client = Mock(name="client") msg1 = {"properties": {"delivery_info": {"priority": 3}}} self.channel._put("george", msg1) client.lpush.assert_called_with(self.channel._q_for_pri("george", 3), dumps(msg1)) msg2 = {"properties": {"delivery_info": {"priority": 313}}} self.channel._put("george", msg2) client.lpush.assert_called_with(self.channel._q_for_pri("george", 9), dumps(msg2)) msg3 = {"properties": {"delivery_info": {}}} self.channel._put("george", msg3) client.lpush.assert_called_with(self.channel._q_for_pri("george", 0), dumps(msg3))
def do_call_method(self, args, timeout=None, destination=None, json=False, **kwargs): method = args[0] if method == 'help': raise self.Error("Did you mean '{0.name} --help'?".format(self)) try: meta = self.choices[method] except KeyError: raise self.UsageError( 'Unknown {0.name} method {1}'.format(self, method)) self._ensure_fanout_supported() timeout = timeout or meta.default_timeout if destination and isinstance(destination, string_t): destination = [dest.strip() for dest in destination.split(',')] replies = self.call( method, arguments=self.compile_arguments(meta, method, args[1:]), timeout=timeout, destination=destination, callback=None if json else self.say_remote_command_reply, ) if not replies: raise self.Error('No nodes replied within time constraint.', status=EX_UNAVAILABLE) if json: self.out(dumps(replies)) return replies
def test_put_fanout(self): self.channel._in_poll = False c = self.channel.client = Mock() body = {"hello": "world"} self.channel._put_fanout("exchange", body, "") c.publish.assert_called_with("exchange", dumps(body))
def test_receive(self): s = self.channel.subclient = Mock() self.channel._fanout_to_queue["a"] = "b" s.parse_response.return_value = ["message", "a", dumps({"hello": "world"})] payload, queue = self.channel._receive() self.assertDictEqual(payload, {"hello": "world"}) self.assertEqual(queue, "b")
def test_put_fanout(self): self.channel._in_poll = False c = self.channel.client = Mock() body = {'hello': 'world'} self.channel._put_fanout('exchange', body, '') c.publish.assert_called_with('exchange', dumps(body))
def message_to_python(self, message, *args, **kwargs): self._called('message_to_python') return Message(self, body=json.dumps(message), delivery_tag=next(self.deliveries), throw_decode_error=self.throw_decode_error, content_type='application/json', content_encoding='utf-8')
def _put_fanout(self, exchange, message, routing_key, **kwargs): """Deliver fanout message.""" with self.conn_or_acquire() as client: client.publish( self._get_publish_topic(exchange, routing_key), dumps(message), )
def do_call_method(self, args, **kwargs): method = args[0] if method == 'help': raise self.Error("Did you mean '{0.name} --help'?".format(self)) if method not in self.choices: raise self.UsageError( 'Unknown {0.name} method {1}'.format(self, method)) if self.app.connection_for_write().transport.driver_type == 'sql': raise self.Error('Broadcast not supported by SQL broker transport') output_json = kwargs.get('json') destination = kwargs.get('destination') timeout = kwargs.get('timeout') or self.choices[method][0] if destination and isinstance(destination, string_t): destination = [dest.strip() for dest in destination.split(',')] handler = getattr(self, method, self.call) callback = None if output_json else self.say_remote_command_reply replies = handler(method, *args[1:], timeout=timeout, destination=destination, callback=callback) if not replies: raise self.Error('No nodes replied within time constraint.', status=EX_UNAVAILABLE) if output_json: self.out(json.dumps(replies)) return replies
def test_is_JSON_serializable(self): s = self.MockTask.subtask( (2, ), {'cache': True}, {'routing_key': 'CPU-bound'}, ) s.args = list(s.args) # tuples are not preserved # but this doesn't matter. self.assertEqual(s, self.subtask(json.loads(json.dumps(s))))
def _put(self, queue, payload, **kwargs): obj = self._get_or_create(queue) message = self.message_cls(dumps(payload), obj) self.session.add(message) try: self.session.commit() except OperationalError: self.session.rollback()
def test_receive(self): s = self.channel.subclient = Mock() self.channel._fanout_to_queue['a'] = 'b' s.parse_response.return_value = ['message', 'a', dumps({'hello': 'world'})] payload, queue = self.channel._receive() self.assertDictEqual(payload, {'hello': 'world'}) self.assertEqual(queue, 'b')
def _put(self, queue, message, **kwargs): extra = {} priority = message['properties']['delivery_info']['priority'] ttr = message['properties'].get('ttr') if ttr is not None: extra['ttr'] = ttr self.client.use(queue) self.client.put(dumps(message), priority=priority, **extra)
def _put(self, queue, message, **kwargs): extra = {} priority = self._get_message_priority(message) ttr = message['properties'].get('ttr') if ttr is not None: extra['ttr'] = ttr self.client.use(queue) self.client.put(dumps(message), priority=priority, **extra)
def append(self, message, delivery_tag): delivery = message.delivery_info EX, RK = delivery['exchange'], delivery['routing_key'] with self.pipe_or_acquire() as pipe: pipe.zadd(self.unacked_index_key, delivery_tag, time()) \ .hset(self.unacked_key, delivery_tag, dumps([message._raw, EX, RK])) \ .execute() super(QoS, self).append(message, delivery_tag)
def test_do_restore_message(self): client = Mock(name="client") pl1 = {"body": "BODY"} spl1 = dumps(pl1) lookup = self.channel._lookup = Mock(name="_lookup") lookup.return_value = ["george", "elaine"] self.channel._do_restore_message(pl1, "ex", "rkey", client) client.rpush.assert_has_calls([call("george", spl1), call("elaine", spl1)]) pl2 = {"body": "BODY2", "headers": {"x-funny": 1}} headers_after = dict(pl2["headers"], redelivered=True) spl2 = dumps(dict(pl2, headers=headers_after)) self.channel._do_restore_message(pl2, "ex", "rkey", client) client.rpush.assert_has_calls([call("george", spl2), call("elaine", spl2)]) client.rpush.side_effect = KeyError() with patch("kombu.transport.redis.crit") as crit: self.channel._do_restore_message(pl2, "ex", "rkey", client) self.assertTrue(crit.called)
def test_put_priority(self): client = self.channel.client = Mock(name='client') msg1 = {'properties': {'priority': 3}} self.channel._put('george', msg1) client.lpush.assert_called_with( self.channel._q_for_pri('george', 3), dumps(msg1), ) msg2 = {'properties': {'priority': 313}} self.channel._put('george', msg2) client.lpush.assert_called_with( self.channel._q_for_pri('george', 9), dumps(msg2), ) msg3 = {'properties': {}} self.channel._put('george', msg3) client.lpush.assert_called_with( self.channel._q_for_pri('george', 0), dumps(msg3), )
def _put(self, queue, message, **kwargs): data = { 'payload': dumps(message), 'queue': queue, 'priority': self._get_message_priority(message, reverse=True) } if self.ttl: data['expire_at'] = self._get_expire(queue, 'x-message-ttl') self.messages.insert(data)
def _put(self, queue, payload, **_): """Put `message` onto `queue`. This simply writes a key to the K/V store of Consul """ key = '{0}/msg/{1}_{2}'.format( self._key_prefix(queue), int(round(monotonic() * 1000)), uuid.uuid4(), ) if not self.client.kv.put(key=key, value=dumps(payload), cas=0): raise ChannelError('Cannot add key {0!r} to consul'.format(key))
def test_run(self, send_task): a = call(app=self.app, stderr=WhateverIO(), stdout=WhateverIO()) a.run(self.add.name) send_task.assert_called() a.run(self.add.name, args=dumps([4, 4]), kwargs=dumps({'x': 2, 'y': 2})) self.assertEqual(send_task.call_args[1]['args'], [4, 4]) self.assertEqual(send_task.call_args[1]['kwargs'], {'x': 2, 'y': 2}) a.run(self.add.name, expires=10, countdown=10) self.assertEqual(send_task.call_args[1]['expires'], 10) self.assertEqual(send_task.call_args[1]['countdown'], 10) now = datetime.now() iso = now.isoformat() a.run(self.add.name, expires=iso) self.assertEqual(send_task.call_args[1]['expires'], now) with self.assertRaises(ValueError): a.run(self.add.name, expires='foobaribazibar')
def _do_restore_message(self, payload, exchange, routing_key, client=None, leftmost=False): with self.conn_or_acquire(client) as client: try: try: payload['headers']['redelivered'] = True except KeyError: pass for queue in self._lookup(exchange, routing_key): (client.lpush if leftmost else client.rpush)( queue, dumps(payload), ) except Exception: crit('Could not restore message: %r', payload, exc_info=True)
def _put(self, queue, payload, **kwargs): """Put `message` onto `queue`.""" filename = "%s_%s.%s.msg" % (int(round(monotonic() * 1000)), uuid.uuid4(), queue) filename = os.path.join(self.data_folder_out, filename) try: f = open(filename, "wb") lock(f, LOCK_EX) f.write(str_to_bytes(dumps(payload))) except (IOError, OSError): raise ChannelError("Cannot add file {0!r} to directory".format(filename)) finally: unlock(f) f.close()
def post(self, request, *args, **kwargs): pk = kwargs.get('pk') password = request.data.get('password') new_password = request.data.get('new_password') user = get_object_or_404(User, pk=pk) response = HttpResponse() response.write(json.dumps({'result': 'success'})) if user.check_password(password): if new_password: user.set_password(new_password) user.save() else: raise Exception('新密码不能为空!') else: raise Exception('原密码错误!') return response
def _unpack_options(cls, queue=None, exchange=None, routing_key=None, priority=None, headers=None, expire_seconds=None, **kwargs): return { "queue": queue, "exchange": exchange, "routing_key": routing_key, "priority": priority, "headers": dumps(headers or {}), "expire_seconds": expire_seconds, }
def _put(self, queue, message, **kwargs): data = { 'payload': dumps(message), 'queue': queue, 'priority': self._get_message_priority(message, reverse=True) } if self.ttl: data['expire_at'] = self._get_queue_expire(queue, 'x-message-ttl') msg_expire = self._get_message_expire(message) if msg_expire is not None and ( data['expire_at'] is None or msg_expire < data['expire_at'] ): data['expire_at'] = msg_expire self.messages.insert_one(data)
def _unpack_options(cls, queue=None, exchange=None, routing_key=None, priority=None, headers=None, expire_seconds=None, **kwargs): return { 'queue': queue, 'exchange': exchange, 'routing_key': routing_key, 'priority': priority, 'headers': dumps(headers or {}), 'expire_seconds': expire_seconds, }
def test_encode_datetime(): now = datetime.utcnow() now_utc = now.replace(tzinfo=pytz.utc) stripped = datetime(*now.timetuple()[:3]) serialized = loads(dumps({ 'datetime': now, 'tz': now_utc, 'date': now.date(), 'time': now.time()}, )) assert serialized == { 'datetime': now.isoformat(), 'tz': '{0}Z'.format(now_utc.isoformat().split('+', 1)[0]), 'time': now.time().isoformat(), 'date': stripped.isoformat(), }
def test_receive(self): s = self.channel.subclient = Mock() self.channel._fanout_to_queue['a'] = 'b' self.channel.connection._deliver = Mock(name='_deliver') message = { 'body': 'hello', 'properties': { 'delivery_tag': 1, 'delivery_info': {'exchange': 'E', 'routing_key': 'R'}, }, } s.parse_response.return_value = ['message', 'a', dumps(message)] self.channel._receive_one(self.channel.subclient) self.channel.connection._deliver.assert_called_once_with( message, 'b', )
def append(self, message, delivery_tag): delivery = message.delivery_info EX, RK = delivery['exchange'], delivery['routing_key'] # TODO: Remove this once we soley on Redis-py 3.0.0+ if redis.VERSION[0] >= 3: # Redis-py changed the format of zadd args in v3.0.0 zadd_args = [{delivery_tag: time()}] else: zadd_args = [time(), delivery_tag] with self.pipe_or_acquire() as pipe: pipe.zadd(self.unacked_index_key, *zadd_args) \ .hset(self.unacked_key, delivery_tag, dumps([message._raw, EX, RK])) \ .execute() super().append(message, delivery_tag)
def _put(self, queue, payload, **_): """Put `message` onto `queue`. This simply writes a key to the Etcd store Arguments: queue (str): The name of the queue. payload (dict): Message data which will be dumped to etcd. """ with self._queue_lock(queue): key = self._key_prefix(queue) if not self.client.write( key=key, value=dumps(payload), append=True): raise ChannelError('Cannot add key {0!r} to etcd'.format(key))
def _put(self, queue, payload, **kwargs): """Put `message` onto `queue`.""" filename = '%s_%s.%s.msg' % (int(round(monotonic() * 1000)), uuid.uuid4(), queue) filename = os.path.join(self.data_folder_out, filename) try: f = open(filename, 'wb') lock(f, LOCK_EX) f.write(str_to_bytes(dumps(payload))) except (IOError, OSError): raise ChannelError( 'Cannot add file {0!r} to directory'.format(filename)) finally: unlock(f) f.close()
def callback(self, message): event_dict = json.loads(message.data) try: for event in self.enrich_event(event_dict): new_message = json.dumps( event.serialize(machine_metadata=False)).encode("utf-8") self.publisher_client.publish(self.enriched_events_topic, new_message) self.inc_counter("produced_events", event.event_type) except Exception as exception: logger.exception("Requeuing message with 1s delay: %s", exception) time.sleep(1) message.nack() else: message.ack() self.inc_counter("enriched_events", event_dict['_zentral']['type'])
def run(self): entries = {} min_event_ts = None while True: logger.debug("%s event(s) to send", len(entries)) try: routing_key, event_d, event_ts = self.in_queue.get(block=True, timeout=1) except queue.Empty: logger.debug("no new event to send") if entries: if self.stop_event.is_set(): logger.debug( "send current event(s) before gracefull exit") self.send_entries(entries) entries = {} min_event_ts = None else: if time.time( ) > min_event_ts + self.max_event_age_seconds: logger.debug( "send %s event(s) because max event age reached", len(entries)) self.send_entries(entries) entries = {} min_event_ts = None if self.stop_event.is_set(): logger.debug("send thread gracefull exit") break else: logger.debug("new event to send %s %s", routing_key, event_ts) entry_id = str(uuid.uuid4()) entry = {"Id": entry_id, "MessageBody": json.dumps(event_d)} if routing_key: entry["MessageAttributes"] = { "zentral.routing_key": { "DataType": "String", "StringValue": routing_key } } entries[entry_id] = entry min_event_ts = min(min_event_ts or event_ts, event_ts) if len(entries) == self.max_number_of_messages: self.send_entries(entries) entries = {} min_event_ts = None
def callback(self, message): routing_key = message.attributes.get("routing_key") if not routing_key: self.log_error("Message w/o routing key") else: preprocessor = self.preprocessors.get(routing_key) if not preprocessor: self.log_error("No preprocessor for routing key %s", routing_key) else: for event in preprocessor.process_raw_event(message.data): new_message = json.dumps(event.serialize(machine_metadata=False)).encode("utf-8") self.publisher_client.publish(self.events_topic, new_message) if self.prometheus_setup_done: self.produced_events_counter.labels(event.event_type).inc() message.ack() if self.prometheus_setup_done: self.preprocessed_events_counter.labels(routing_key or "UNKNOWN").inc()
def status(ctx, timeout, destination, json, **kwargs): """Show list of workers that are online.""" callback = None if json else partial(_say_remote_command_reply, ctx) replies = ctx.obj.app.control.inspect(timeout=timeout, destination=destination, callback=callback).ping() if not replies: ctx.obj.echo('No nodes replied within time constraint') return EX_UNAVAILABLE if json: ctx.obj.echo(dumps(replies)) nodecount = len(replies) if not kwargs.get('quiet', False): ctx.obj.echo('\n{0} {1} online.'.format( nodecount, text.pluralize(nodecount, 'node')))
def _put(self, queue, payload, **kwargs): """Put `message` onto `queue`.""" queue_folder = os.path.join(self.data_folder_out, queue) filename = '{}_{}.msg'.format(int(round(monotonic() * 1000)),uuid.uuid4()) filename = os.path.join(queue_folder, filename) f = None # define file descriptor try: os.makedirs(queue_folder, exist_ok = True) f = open(filename, 'wb') lock(f, LOCK_EX) f.write(str_to_bytes(dumps(payload))) except OSError: raise ChannelError(f'Cannot create {filename}') finally: if (f): unlock(f) f.close()
def _put(self, queue, message, **kwargs): """Put message onto queue.""" q_url = self._new_queue(queue) kwargs = {'QueueUrl': q_url, 'MessageBody': AsyncMessage().encode(dumps(message))} if queue.endswith('.fifo'): if 'MessageGroupId' in message['properties']: kwargs['MessageGroupId'] = \ message['properties']['MessageGroupId'] else: kwargs['MessageGroupId'] = 'default' if 'MessageDeduplicationId' in message['properties']: kwargs['MessageDeduplicationId'] = \ message['properties']['MessageDeduplicationId'] else: kwargs['MessageDeduplicationId'] = str(uuid.uuid4()) self.sqs.send_message(**kwargs)
def run(self): logger.info("[%s] start on queue %s", self.name, self.queue_url) self.entries = {} self.min_event_ts = None while True: logger.debug("[%s] %s event(s) to send", self.name, len(self.entries)) try: receipt_handle, routing_key, event_d, event_ts = self.in_queue.get( block=True, timeout=1) except queue.Empty: logger.debug("[%s] no new event to send", self.name) if self.entries: if self.stop_event.is_set(): logger.debug( "[%s] send current event(s) before graceful exit", self.name) self.send_entries() else: if time.monotonic( ) > self.min_event_ts + self.max_event_age_seconds: logger.debug( "[%s] send %s event(s) because max event age reached", self.name, len(self.entries)) self.send_entries() if self.stop_event.is_set(): logger.info("[%s] graceful exit", self.name) break else: logger.debug("[%s] new event to send %s %s", self.name, routing_key, event_ts) entry_id = str(uuid.uuid4()) entry = {"Id": entry_id, "MessageBody": json.dumps(event_d)} if routing_key: entry["MessageAttributes"] = { "zentral.routing_key": { "DataType": "String", "StringValue": routing_key } } self.entries[entry_id] = (receipt_handle, entry) self.min_event_ts = min(self.min_event_ts or event_ts, event_ts) if len(self.entries) == self.max_number_of_messages: self.send_entries()
def _do_restore_message(self, payload, exchange, routing_key, pipe, leftmost=False): try: try: payload['headers']['redelivered'] = True except KeyError: pass for queue in self._lookup(exchange, routing_key): (pipe.lpush if leftmost else pipe.rpush)( queue, dumps(payload), ) except Exception: crit('Could not restore message: %r', payload, exc_info=True)
def _do_restore_message(self, payload, exchange, routing_key, client=None, leftmost=False): with self.conn_or_acquire(client) as client: try: try: payload['headers']['redelivered'] = True except KeyError: pass for queue in self._lookup(exchange, routing_key): # Add with priority 0 so it jumps ahead in the queue client.zadd( queue, {self._add_time_prefix(dumps(payload)): '-inf'}) except Exception: crit('Could not restore message: %r', payload, exc_info=True)
def get_QuesProScore(id): # url = "http://www.genyuanlian.org/students/getWorkDetail" url = "http://10.103.247.54:8080/students/getWorkDetail" headers = { 'content-type': 'application/json', } quesProScore = QuestionProductionScore.objects.get(pk=id) all_ethereumQuesProScore = EthereumQuesProScore.objects.filter( question=quesProScore.question, production=quesProScore.production, rater=quesProScore.rater, ) for ethereumQuesProScore in all_ethereumQuesProScore: body = {"hash": ethereumQuesProScore.hash} response = requests.post(url, data=json.dumps(body), headers=headers) print(response.status_code) response = json.loads(response.text) print(response)
def _do_restore_message(self, payload, exchange, routing_key, client=None, leftmost=False): with self.conn_or_acquire(client) as client: try: try: payload["headers"]["redelivered"] = True except KeyError: pass for queue in self._lookup(exchange, routing_key): (client.lpush if leftmost else client.rpush)( queue, dumps(payload), ) except Exception: crit("Could not restore message: %r", payload, exc_info=True)
def run(self): logger.info("[%s] start on topic %s", self.name, self.topic_arn) while True: try: receipt_handle, routing_key, event_d, event_ts = self.in_queue.get( block=True, timeout=1) except queue.Empty: logger.debug("[%s] no new event to publish", self.name) if self.stop_event.is_set(): logger.info("[%s] graceful exit", self.name) break else: logger.debug("[%s] new event to publish %s %s", self.name, routing_key, event_ts) message = json.dumps(event_d) message_attributes = {} if routing_key: message_attributes["zentral.routing_key"] = { "DataType": "String", "StringValue": routing_key, } else: try: message_attributes["zentral.type"] = { "DataType": "String", "StringValue": event_d['_zentral']['type'] } except KeyError: pass try: response = self.client.publish( TopicArn=self.topic_arn, Message=message, MessageAttributes=message_attributes) except Exception: logger.exception("[%s] could not publish event", self.name) else: logger.debug("[%s] event with MessageID %s published", self.name, response["MessageId"]) self.out_queue.put(receipt_handle) logger.debug("[%s] receipt handle %s: put to out queue", self.name, receipt_handle[-7:])
def index_test(request, ): global alarm_content if request.method == 'Post': received_json_data = json.loads(request.body) return HttpResponse(received_json_data) else: print("function test start:") result_head = {} result_data = {} print(str(request)) if 'alarm_content' in str(request): alarm_content = str(request.GET['alarm_content']) result_head['result'] = '200' result_head['detail'] = 'successful' result_data['content'] = alarm_content result_data['type'] = predictionCase(alarm_content) address = get_address(alarm_content) result_data['type'] += ';' + address # save alarm and result alarm_obj = Alarm(content=alarm_content, alarm_id=uuid.uuid1(), first_type=' ', second_type=' ', third_type=result_data['type'], fourth_type=' ') alarm_obj.save() else: detail = "parameter error, alarm_content can't be null!" result_head['result'] = '400' result_head['detail'] = detail result = {} result['head'] = result_head result['data'] = result_data get_address_by_crawer() response = json.dumps(result) return HttpResponse(response)
def create(self, request, *args, **kwargs): status = '201 Created' data = request.data try: user = data.get('user') user_obj = CustomUser.objects.get(id=user) s3_client = boto3.client('s3', aws_access_key_id=settings.AWS_AccessKeyId,aws_secret_access_key=settings.AWS_SecretKey) bucket = 'crowning' work_experience_year = data.get('work_experience_year') work_experience_month = data.get('work_experience_month') currently_employment = data.get('currently_employment') company_name = data.get('company_name') position = data.get('position') joining_date = data.get('joining_date') ctc = data.get('ctc') company_address = data.get('company_address') employee_code = data.get('employee_code') reporting_manager = data.get('reporting_manager') relationship_with_reporting_manager = data.get('relationship_with_reporting_manager') previous_work_experience = data.get('previous_work_experience') WorkExperience_obj_creation = WorkExperience.objects.create(user=user_obj, work_experience_year=work_experience_year, work_experience_month=work_experience_month, currently_employment=currently_employment , company_name=company_name, position=position, joining_date=joining_date, ctc=ctc, company_address=company_address, employee_code=employee_code, reporting_manager=reporting_manager, relationship_with_reporting_manager=relationship_with_reporting_manager, previous_Work_experience=previous_work_experience) for f in request.FILES.getlist('files'): key = save_file(f) file_path = company_name + '_' + key s3_client.upload_file(key, bucket, 'root/workExperience/' + company_name + '_' + key) obj = Media.objects.create(user=user_obj, document_type='workExperience_document',file_url='https://crowning.s3.amazonaws.com/root/workExperience/' + file_path) os.remove(key) except Exception as e: if e: status = e return Response(json.dumps(status))
def inspect(ctx, action, timeout, destination, json, **kwargs): """Inspect the worker at runtime. Availability: RabbitMQ (AMQP) and Redis transports. """ callback = None if json else partial( _say_remote_command_reply, ctx, show_reply=True) replies = ctx.obj.app.control.inspect(timeout=timeout, destination=destination, callback=callback)._request(action) if not replies: ctx.obj.echo('No nodes replied within time constraint') return EX_UNAVAILABLE if json: ctx.obj.echo(dumps(replies)) nodecount = len(replies) if not ctx.obj.quiet: ctx.obj.echo('\n{0} {1} online.'.format( nodecount, text.pluralize(nodecount, 'node')))
def _publish_artp_event(self, artc_meta_id, artifact): """ Creates and ArtifactPublished event and sends it to RabbitMQ exchange :param artc_meta_id: the id of ArtifactCreated event :param artifact: the results dictionary returned from Artifactory by the AQL query. """ location = '{}/{}/{}/{}'.format(CFG.artifactory.url, artifact['repo'], artifact['path'], artifact['name']) artp_event = eiffel.create_artifact_published_event( artc_meta_id, [eiffel.Location(location)]) artp_event_json = json.dumps(utils.remove_none_from_dict(artp_event)) self.rmq_connection.publish_message(artp_event_json) LOGGER_ARTIFACTS.info(artifact) LOGGER_PUBLISHED.info(artp_event_json)
def control(ctx, action, timeout, destination, json): """Workers remote control. Availability: RabbitMQ (AMQP), Redis, and MongoDB transports. """ callback = None if json else partial( _say_remote_command_reply, ctx, show_reply=True) args = ctx.args arguments = _compile_arguments(action, args) replies = ctx.obj.app.control.broadcast(action, timeout=timeout, destination=destination, callback=callback, reply=True, arguments=arguments) if not replies: ctx.obj.echo('No nodes replied within time constraint') return EX_UNAVAILABLE if json: ctx.obj.echo(dumps(replies))
def upload_QuesProScore(id): quesProScore = QuestionProductionScore.objects.get(pk=id) address = "0x72102c0ac12cecefc0ad798f778c65541600f1d6" password = "******" # url = 'http://www.genyuanlian.org/students/uploadWorks' url = 'http://10.103.247.54:8080/students/uploadWorks' headers = { 'content-type': 'application/json', } body = { "address": address, "password": password, "detail": { "question_id": quesProScore.question.id, "production_id": quesProScore.production.id, "rater_username": quesProScore.rater.username, "score": quesProScore.score, "small_score": quesProScore.small_score, "comment": quesProScore.comment, "score_time": quesProScore.score_time, "is_adviser": quesProScore.is_adviser, } } response = requests.post(url, data=json.dumps(body), headers=headers) print(response.status_code) response = json.loads(response.text) print(response) if response['result']: ethereumQuesProScore = EthereumQuesProScore() ethereumQuesProScore.question = quesProScore.question ethereumQuesProScore.production = quesProScore.production ethereumQuesProScore.rater = quesProScore.rater ethereumQuesProScore.score_time = quesProScore.score_time ethereumQuesProScore.hash = response['hash'] ethereumQuesProScore.save() get_QuesProScore.delay(id) else: print("error", response)
def create(self, request, *args, **kwargs): data = request.data status = '201 Created' try: s3_client = boto3.client('s3', aws_access_key_id=settings.AWS_AccessKeyId, aws_secret_access_key=settings.AWS_SecretKey) bucket = 'crowning' user = data.get('user') user_obj = CustomUser.objects.get(id=user) aadhar_number = data.get('aadhar_number') isVerified = data.get('isVerified') AadharVerification_obj_creation = AadharVerification.objects.create(user=user_obj, aadhar_number=aadhar_number,isVerified=isVerified) for f in request.FILES.getlist('files'): key = save_file(f) file_path = aadhar_number + '_' + key s3_client.upload_file(key, bucket, 'root/aadhar_document/' + aadhar_number + '_' + key) obj = Media.objects.create(user=user_obj, document_type='aadhar_document',file_url='https://crowning.s3.amazonaws.com/root/aadhar_document/' + file_path) os.remove(key) except Exception as e: if e: status = e return Response(json.dumps(status))
def handle_attack(attacker_id: int, flag_str: str, round: int) -> float: """Check flag, lock team for update, call rating recalculation, then publish redis message about stolen flag :param attacker_id: id of the attacking team :param flag_str: flag to be checked :param round: round of the attack :raises FlagSubmitException: when flag check was failed :return: attacker rating change """ flag = flags.try_add_stolen_flag_by_str(flag_str=flag_str, attacker=attacker_id, round=round) with storage.db_cursor() as (conn, curs): curs.callproc("recalculate_rating", (attacker_id, flag.team_id, flag.task_id, flag.id)) attacker_delta, victim_delta = curs.fetchone() conn.commit() flag_data = { 'attacker_id': attacker_id, 'victim_id': flag.team_id, 'task_id': flag.task_id, 'attacker_delta': attacker_delta, 'victim_delta': victim_delta, } storage.get_wro_sio_manager().emit( event='flag_stolen', data={'data': json.dumps(flag_data)}, namespace='/game_events', ) return attacker_delta
def _put(self, queue, message, **kwargs): """Put message onto queue.""" q_url = self._new_queue(queue) kwargs = {'QueueUrl': q_url, 'MessageBody': AsyncMessage().encode(dumps(message))} if queue.endswith('.fifo'): if 'MessageGroupId' in message['properties']: kwargs['MessageGroupId'] = \ message['properties']['MessageGroupId'] else: kwargs['MessageGroupId'] = 'default' if 'MessageDeduplicationId' in message['properties']: kwargs['MessageDeduplicationId'] = \ message['properties']['MessageDeduplicationId'] else: kwargs['MessageDeduplicationId'] = str(uuid.uuid4()) if message.get('redelivered'): self.sqs.change_message_visibility( QueueUrl=q_url, ReceiptHandle=message['properties']['delivery_tag'], VisibilityTimeout=0 ) else: self.sqs.send_message(**kwargs)
def to_json_for_participants(self) -> str: return kjson.dumps(self.to_dict_for_participants()) # type: ignore
def _put(self, queue, message, **kwargs): """Put message onto queue.""" q = self._new_queue(queue) q.push(dumps(message))
def _put_fanout(self, exchange, message, routing_key, **kwargs): """Deliver fanout message.""" self.get_broadcast().insert({'payload': dumps(message), 'queue': exchange})