def test_all_dates_filtering(self): self._create_events(use_time=True) # automatically sets first day as first day of any events with freeze_time("2020-01-04T15:01:01Z"): action_response = self.client.get( "/api/action/trends/?date_from=all").json() event_response = self.client.get( "/api/action/trends/", data={ "date_from": "all", "events": jdumps([{ "id": "sign up" }, { "id": "no events" }]), }, ).json() self.assertEqual(action_response[0]["labels"][0], "Tue. 24 December") self.assertEqual(action_response[0]["data"][0], 1.0) self.assertTrue( self._compare_entity_response(action_response, event_response)) # test empty response with freeze_time("2020-01-04"): empty = self.client.get( "/api/action/trends/?date_from=all&events=%s" % jdumps([{ "id": "blabla" }, { "id": "sign up" }])).json() self.assertEqual(empty[0]["data"][0], 0)
def serverstat(request, server): server_stat = {'server':server,'properties':[],'clients':[]} servers_dict = dict(zkconfig.servers) if server not in servers_dict: server_stat['error'] = 'No this server' return HttpResponse(jdumps(server_stat)) stat = zkutils.stat((server,servers_dict[server])) if stat == False: server_stat['error'] == 'failed' else: version_regex = re.compile(r'(Zookeeper version.*)') client_regex = re.compile("\s+\/(?P<ip>[\d\.]+):(?P<port>\d+)\[\d+\]\(queued=(?P<queued>\d+),recved=(?P<recved>\d+),sent=(?P<sent>\d+)\)") for line in stat.split('\n'): if line.startswith('Clients') or line == "": continue if version_regex.match(line): server_stat['version'] = line elif client_regex.match(line): mo = client_regex.match(line) client_dict = mo.groupdict() client_dict['host'] = zkutils.ip2host(client_dict['ip']) server_stat['clients'].append(client_dict) else: n,v = line.split(':',1) server_stat['properties'].append({"name":n, "value":v}) return HttpResponse(jdumps(server_stat))
def test_dau_filtering(self): sign_up_action, person = self._create_events() with freeze_time("2020-01-02"): Person.objects.create(team=self.team, distinct_ids=["someone_else"]) Event.objects.create(team=self.team, event="sign up", distinct_id="someone_else") with freeze_time("2020-01-04"): action_response = self.client.get( "/api/action/trends/", data={ "actions": jdumps([{ "id": sign_up_action.id, "math": "dau" }]), }, ).json() event_response = self.client.get( "/api/action/trends/", data={ "events": jdumps([{ "id": "sign up", "math": "dau" }]), }, ).json() self.assertEqual(action_response[0]["data"][4], 1) self.assertEqual(action_response[0]["data"][5], 2) self.assertTrue( self._compare_entity_response(action_response, event_response))
def send_to_zabbix(metrics, zabbix_host="", zabbix_port=10051): """Send set of metrics to Zabbix server.""" def _recv_all(sock, count): buf = [] while len(buf) < count: chunk = sock.recv(count - len(buf)) if not chunk: return buf buf.extend(chunk) return "".join(buf) metrics_data = [] for m in metrics: metrics_data.append(ZBX_MESSAGE_TEMPLATE % (jdumps(m[0]), jdumps(m[1]), jdumps(m[2]), int(time.time()))) json_data = ZBX_REQUEST_TEMPLATE % (",\n".join(metrics_data)) data_len = struct.pack("<Q", len(json_data)) packet = "ZBXD\1" + data_len + json_data zabbix = socket.socket() zabbix.connect((zabbix_host, zabbix_port)) zabbix.sendall(packet) resp_hdr = _recv_all(zabbix, 13) if not resp_hdr.startswith("ZBXD\1") or len(resp_hdr) != 13: return False resp_body_len = struct.unpack("<Q", resp_hdr[5:])[0] resp_body = zabbix.recv(resp_body_len) zabbix.close() resp = jloads(resp_body) if (resp.get("response") != "success") \ or (resp.get("failed") != 0): return False return True
def test_property_filtering(self): self._create_events() with freeze_time('2020-01-04'): action_response = self.client.get( '/api/action/trends/', data={ 'properties': jdumps({'$some_property': 'value'}), }, ).json() event_response = self.client.get( '/api/action/trends/', data={ 'events': jdumps([{ 'id': "sign up" }, { 'id': "no events" }]), 'properties': jdumps({'$some_property': 'value'}), }, ).json() self.assertEqual(action_response[0]['labels'][4], 'Wed. 1 January') self.assertEqual(action_response[0]['data'][4], 1.0) self.assertEqual(action_response[0]['labels'][5], 'Thu. 2 January') self.assertEqual(action_response[0]['data'][5], 0) self.assertEqual(action_response[1]['count'], 0) self.assertTrue( self._compare_entity_response(action_response, event_response))
def test_dau_with_breakdown_filtering(self): sign_up_action, person = self._create_events() with freeze_time('2020-01-02'): Event.objects.create( team=self.team, event='sign up', distinct_id='blabla', properties={"some_property": "other_value"}, ) with freeze_time('2020-01-04'): action_response = self.client.get( '/api/action/trends/', data={ 'breakdown': 'some_property', 'actions': jdumps([{'id': sign_up_action.id, 'math': 'dau'}]), }, ).json() event_response = self.client.get( '/api/action/trends/', data={ 'breakdown': 'some_property', 'events': jdumps([{'id': "sign up", 'math': 'dau'}]), }, ).json() self.assertEqual(action_response[0]['breakdown'][0]['name'], 'other_value') self.assertEqual(action_response[0]['breakdown'][0]['count'], 1) self.assertEqual(action_response[0]['breakdown'][1]['name'], 'value') self.assertEqual(action_response[0]['breakdown'][1]['count'], 1) self.assertEqual(action_response[0]['breakdown'][2]['name'], 'undefined') self.assertEqual(action_response[0]['breakdown'][2]['count'], 1) self.assertTrue(self._compare_entity_response(action_response, event_response))
def test_dau_filtering(self): sign_up_action, person = self._create_events() with freeze_time('2020-01-02'): Person.objects.create(team=self.team, distinct_ids=['someone_else']) Event.objects.create(team=self.team, event='sign up', distinct_id='someone_else') with freeze_time('2020-01-04'): action_response = self.client.get( '/api/action/trends/', data={ 'actions': jdumps([{ 'id': sign_up_action.id, 'math': 'dau' }]), }, ).json() event_response = self.client.get( '/api/action/trends/', data={ 'events': jdumps([{ 'id': "sign up", 'math': 'dau' }]), }, ).json() self.assertEqual(action_response[0]['data'][4], 1) self.assertEqual(action_response[0]['data'][5], 2) self.assertTrue( self._compare_entity_response(action_response, event_response))
def test_all_dates_filtering(self): self._create_events(use_time=True) # automatically sets first day as first day of any events with freeze_time('2020-01-04T15:01:01Z'): action_response = self.client.get( '/api/action/trends/?date_from=all').json() event_response = self.client.get( '/api/action/trends/', data={ 'date_from': 'all', 'events': jdumps([{ 'id': "sign up" }, { 'id': "no events" }]), }, ).json() self.assertEqual(action_response[0]['labels'][0], 'Tue. 24 December') self.assertEqual(action_response[0]['data'][0], 1.0) self.assertTrue( self._compare_entity_response(action_response, event_response)) # test empty response with freeze_time('2020-01-04'): empty = self.client.get( '/api/action/trends/?date_from=all&events=%s' % jdumps([{ 'id': 'blabla' }, { 'id': 'sign up' }])).json() self.assertEqual(empty[0]['data'][0], 0)
def serverstat(request, server): server_stat = {'server': server, 'properties': [], 'clients': []} servers_dict = dict(zkconfig.servers) if server not in servers_dict: server_stat['error'] = 'No this server' return HttpResponse(jdumps(server_stat)) stat = zkutils.stat((server, servers_dict[server])) if stat == False: server_stat['error'] == 'failed' else: version_regex = re.compile(r'(Zookeeper version.*)') client_regex = re.compile( "\s+\/(?P<ip>[\d\.]+):(?P<port>\d+)\[\d+\]\(queued=(?P<queued>\d+),recved=(?P<recved>\d+),sent=(?P<sent>\d+)\)" ) for line in stat.split('\n'): if line.startswith('Clients') or line == "": continue if version_regex.match(line): server_stat['version'] = line elif client_regex.match(line): mo = client_regex.match(line) client_dict = mo.groupdict() client_dict['host'] = zkutils.ip2host(client_dict['ip']) server_stat['clients'].append(client_dict) else: n, v = line.split(':', 1) server_stat['properties'].append({"name": n, "value": v}) return HttpResponse(jdumps(server_stat))
def test_dau_with_breakdown_filtering(self): sign_up_action, _ = self._create_events() with freeze_time('2020-01-02'): Event.objects.create(team=self.team, event='sign up', distinct_id='blabla', properties={"$some_property": "other_value"}) with freeze_time('2020-01-04'): action_response = self.client.get( '/api/action/trends/?breakdown=$some_property&actions=%s' % jdumps([{ 'id': sign_up_action.id, 'math': 'dau' }])).json() event_response = self.client.get( '/api/action/trends/?breakdown=$some_property&events=%s' % jdumps([{ 'id': "sign up", 'math': 'dau' }])).json() self.assertEqual(event_response[0]['label'], 'sign up - other_value') self.assertEqual(event_response[1]['label'], 'sign up - value') self.assertEqual(event_response[2]['label'], 'sign up - Other') self.assertEqual(sum(event_response[0]['data']), 1) self.assertEqual(event_response[0]['data'][5], 1) self.assertEqual(sum(event_response[2]['data']), 1) self.assertEqual(event_response[2]['data'][4], 1) # property not defined self.assertTrue( self._compare_entity_response(action_response, event_response))
def containers_index(): """ List all containers curl -s -X GET -H 'Accept: application/json' http://localhost:8080/containers curl -s -X GET -H 'Accept: application/json' http://localhost:8080/containers?state=running """ # check if state parameter was supplied with value running if request.args.get('state') == 'running': # show only running containers # use a go format template to get the output in TSV (so we can split the string on tab instead of space) # if the output string was split on ' ' character, it would not work since details such as status contain spaces output = docker('ps --format \'{{.ID}}\t{{.Image}}\t{{.Command}}\t{{.CreatedAt}}\t{{.Status}}\t{{.Ports}}\t{{.Names}}\'') resp = jdumps(docker_ps_to_array(output)) else: # show all containers, not just running cmd = ['docker', 'ps', '-a', '--format', '\'{{.ID}}\t{{.Image}}\t{{.Command}}\t{{.CreatedAt}}\t{{.Status}}\t{{.Ports}}\t{{.Names}}\''] process = Popen(cmd, stdout=PIPE, stderr=PIPE, universal_newlines=True) stdout, stderr = process.communicate() output = stderr + stdout resp = jdumps(docker_ps_to_array(output)) return Response(response=resp, mimetype="application/json")
def test_loads( mock_hvac_client_read, mock_loads, mock_open_handle, mock_chmod, localhost_client, gen_input_config, gen_processed_config, gen_vault_response_kv1, file_path, file_path_normalized, file_contents, secret_path, ): """ Basic test of the loads function with file writing """ mock_loads.return_value = gen_input_config() mock_hvac_client_read.return_value = gen_vault_response_kv1() input_config_json = jdumps(gen_input_config()) assert localhost_client.loads( input_config_json, process_secret_files=True) == gen_processed_config() mock_loads.assert_called_once_with(jdumps(gen_input_config())) mock_hvac_client_read.assert_called_once_with(secret_path) mock_open_handle.assert_called_once_with(file_path_normalized, "w") mock_open_handle().write.assert_called_once_with(file_contents) mock_chmod.assert_called_once_with(file_path_normalized, S_IRUSR)
def test_property_filtering(self): self._create_events() with freeze_time("2020-01-04"): action_response = self.client.get( "/api/action/trends/", data={ "properties": jdumps({"$some_property": "value"}), }, ).json() event_response = self.client.get( "/api/action/trends/", data={ "events": jdumps([{ "id": "sign up" }, { "id": "no events" }]), "properties": jdumps({"$some_property": "value"}), }, ).json() self.assertEqual(action_response[0]["labels"][4], "Wed. 1 January") self.assertEqual(action_response[0]["data"][4], 1.0) self.assertEqual(action_response[0]["labels"][5], "Thu. 2 January") self.assertEqual(action_response[0]["data"][5], 0) self.assertEqual(action_response[1]["count"], 0) self.assertTrue( self._compare_entity_response(action_response, event_response))
def write_file(self, file_path): ''' Write data to the Journal. ''' if hasattr(self, 'vmw'): self.metadata['play_level'] = self.vmw.level self.metadata['low_score_beginner'] = int(self.vmw.low_score[0]) self.metadata['low_score_intermediate'] = int( self.vmw.low_score[1]) self.metadata['low_score_expert'] = int(self.vmw.low_score[2]) self.metadata['all_scores'] = jdumps(self.vmw.all_scores) print jdumps(self.vmw.all_scores) self.metadata['robot_time'] = self.vmw.robot_time self.metadata['numberO'] = self.vmw.numberO self.metadata['numberC'] = self.vmw.numberC self.metadata['cardtype'] = self.vmw.card_type self.metadata['matches'] = self.vmw.matches self.metadata['robot_matches'] = self.vmw.robot_matches self.metadata['total_time'] = int(self.vmw.total_time) self.metadata['deck_index'] = self.vmw.deck.index self.metadata['mouse'] = self.vmw.word_lists[0][0] self.metadata['cat'] = self.vmw.word_lists[0][1] self.metadata['dog'] = self.vmw.word_lists[0][2] self.metadata['cheese'] = self.vmw.word_lists[1][0] self.metadata['apple'] = self.vmw.word_lists[1][1] self.metadata['bread'] = self.vmw.word_lists[1][2] self.metadata['moon'] = self.vmw.word_lists[2][0] self.metadata['sun'] = self.vmw.word_lists[2][1] self.metadata['earth'] = self.vmw.word_lists[2][2] self.metadata['editing_word_list'] = self.vmw.editing_word_list self.metadata['mime_type'] = 'application/x-visualmatch' f = file(file_path, 'w') f.write(self._dump()) f.close() else: _logger.debug('Deferring saving to %s' % file_path)
def delete(request, path='/'): try: ret = zk.delete(zh, path) print >> sys.stderr, path if ret == zk.OK: return HttpResponse(jdumps({'status':'ok'})) except IOError, errmsg: return HttpResponse(jdumps({'error':str(errmsg)}))
def delete(request, path='/'): try: ret = zk.delete(zh, path) print >> sys.stderr, path if ret == zk.OK: return HttpResponse(jdumps({'status': 'ok'})) except IOError, errmsg: return HttpResponse(jdumps({'error': str(errmsg)}))
def printJson(queue, showPending, showRunning): res = {} running = queue.running() if len(running) > 0 and showRunning: res['running'] = map(runningJson, running) pending = queue.pending() if len(pending) > 0 and showPending: res['pending'] = map(pendingJson, pending) print jdumps(res)
def test_trends_for_non_existing_action(self): with freeze_time('2020-01-04'): response = self.client.get('/api/action/trends/', {'actions': jdumps([{'id': 4000000}])}).json() self.assertEqual(len(response), 0) with freeze_time('2020-01-04'): response = self.client.get('/api/action/trends/', {'events': jdumps([{'id': "DNE"}])}).json() self.assertEqual(response[0]['data'], [0, 0, 0, 0, 0, 0, 0, 0])
def test_breakdown_filtering(self): self._create_events() # test breakdown filtering with freeze_time('2020-01-04T13:01:01Z'): action_response = self.client.get( '/api/action/trends/?date_from=-14d&breakdown=$some_property' ).json() event_response = self.client.get( '/api/action/trends/?date_from=-14d&properties={}&actions=[]&display=ActionsTable&interval=day&breakdown=$some_property&events=%s' % jdumps([{ 'id': "sign up", "name": "sign up", "type": "events", "order": 0 }, { 'id': "no events" }])).json() self.assertEqual(event_response[0]['label'], 'sign up - Other') self.assertEqual(event_response[1]['label'], 'sign up - other_value') self.assertEqual(event_response[2]['label'], 'sign up - value') self.assertEqual(event_response[3]['label'], 'no events - Other') self.assertEqual(sum(event_response[0]['data']), 2) self.assertEqual(event_response[0]['data'][4 + 7], 2) self.assertEqual(event_response[0]['breakdown_value'], 'None') self.assertEqual(sum(event_response[1]['data']), 1) self.assertEqual(event_response[1]['data'][5 + 7], 1) self.assertEqual(event_response[1]['breakdown_value'], 'other_value') self.assertTrue( self._compare_entity_response(action_response, event_response)) # check numerical breakdown with freeze_time('2020-01-04T13:01:01Z'): action_response = self.client.get( '/api/action/trends/?date_from=-14d&breakdown=$some_numerical_prop' ).json() event_response = self.client.get( '/api/action/trends/?date_from=-14d&properties={}&actions=[]&display=ActionsTable&interval=day&breakdown=$some_numerical_prop&events=%s' % jdumps([{ 'id': "sign up", "name": "sign up", "type": "events", "order": 0 }, { 'id': "no events" }])).json() self.assertEqual(event_response[0]['label'], 'sign up - Other') self.assertEqual(event_response[0]['count'], 4.0) self.assertEqual(event_response[1]['label'], 'sign up - 80.0') self.assertEqual(event_response[1]['count'], 1.0) self.assertTrue( self._compare_entity_response(action_response, event_response))
def __call__(self, request, path='/'): if not request.user.is_authenticated(): return HttpResponse(jdumps({"error": "You need to login first."})) # Check posting data encoding. for k,v in request.POST.items(): try: v.encode('ascii') except UnicodeEncodeError, errmsg: return HttpResponse(jdumps({"error": 'Invalid encoding.<br />' \ 'Only ASCII code can be used in roshan.'}))
def __call__(self, request, path='/'): if not request.user.is_authenticated(): return HttpResponse(jdumps({"error": "You need to login first."})) # Check posting data encoding. for k, v in request.POST.items(): try: v.encode('ascii') except UnicodeEncodeError, errmsg: return HttpResponse(jdumps({"error": 'Invalid encoding.<br />' \ 'Only ASCII code can be used in roshan.'}))
def updateDictRecursive(kwargs0, kwargs1): for key, val in kwargs1.items(): if key not in kwargs0: kwargs0[key] = val continue if type(kwargs0[key]) is dict: if type(val) is not dict: raise RuntimeError('can not merge key(%s) of %s with %s' % \ (key, jdumps(kwargs0), jdumps(kwargs1))) updateDictRecursive(kwargs0[key], val) else: kwargs0[key] = val return kwargs0
def edit_profile_view(request): if request.method == 'POST' and request.user.is_authenticated and 'uid' in request.POST: profileForm = ProfileForm(request.POST, request.FILES, uid=request.POST['uid']) response = jloads(profileForm.errors.as_json()) if profileForm.is_valid(): profileForm.save() response['status'] = 1 return HttpResponse(jdumps(response), content_type='application/json') response['status'] = 0 return HttpResponse(jdumps(response), content_type='application/json') raise HttpResponse()
def pretty_print_list(file_name=None, data_format="JSON"): # print YAML or JSON representations of list data assert(file_name is not None), "Provide a file name" assert((data_format == "JSON" or data_format == "YAML")), ("Format must be 'JSON'" " or 'YAML'") try: formatted_list = [] with open(file_name, "r") as f: if data_format == "JSON": some_list = jload(f) formatted_list = jdumps(some_list) elif data_format == "YAML": some_list = yload(f) formatted_list = ydump(some_list, default_flow_style=False, explicit_start=True, width=1, indent=2) except IOError as e: print "Could not read file: %s" % e except Exception as e: print "Unexpected exception: %s" % e print "======================" print "list from file: %s in %s data_format:" % (file_name, data_format) print "======================" print formatted_list print "======================" print "list from file: %s in pretty_print native python" % file_name print "======================" pp(some_list, width=1)
def add(request, path='/'): stat = zkutils._get_stat(zh, path) if stat: return HttpResponse(jdumps({'error': 'Node already exists'})) control_masters = list(zkconfig.control_machines) control_masters.append("127.0.0.1") control_masters.append(socket.gethostbyname(socket.gethostname())) default_acl = zkutils.AclSet(["ip:%s:31" %(id) for id in control_masters]) try: ret = zk.create(zh, path, "", default_acl.to_dict()) if ret == path: return HttpResponse(jdumps({'status':'ok'})) else: return HttpResponse(jdumps({'error':str(ret)})) except IOError, errmsg: return HttpResponse(jdumps({'error': str(errmsg)}))
def run(self) -> None: self.init() if self.dpath.suffix == ".parquet": df = dd.read_parquet(self.dpath) elif self.dpath.suffix == ".csv": df = dd.read_csv(self.dpath) df_cat = df.select_dtypes(object) cols = [] for x in df.columns: if x in df_cat.columns: nuniq = df[x].nunique().compute() if nuniq <= 100: cols.append(x) else: cols.append(x) times = [] col_pairs = [] for x, y in itertools.combinations(cols, 2): then = time() self.bench(x, y) times.append(time() - then) col_pairs.append((x, y)) result = { "name": self.__class__.__name__, "times": times, "column_pairs": col_pairs, "all_columns": cols, } print(jdumps(result))
def test_trends_compare(self): self._create_events() with freeze_time('2020-01-04T13:00:01Z'): action_response = self.client.get( '/api/action/trends/?date_from=-7d&compare=true').json() event_response = self.client.get( '/api/action/trends/', data={ 'date_from': '-7d', 'events': jdumps([{ 'id': "sign up" }, { 'id': "no events" }]), 'compare': 'true' }, ).json() self.assertEqual(action_response[0]['label'], 'sign up - current') self.assertEqual(action_response[0]['labels'][4], 'day 4') self.assertEqual(action_response[0]['data'][4], 3.0) self.assertEqual(action_response[0]['labels'][5], 'day 5') self.assertEqual(action_response[0]['data'][5], 1.0) self.assertEqual(action_response[1]['label'], 'sign up - previous') self.assertEqual(action_response[1]['labels'][4], 'day 4') self.assertEqual(action_response[1]['data'][4], 1.0) self.assertEqual(action_response[1]['labels'][5], 'day 5') self.assertEqual(action_response[1]['data'][5], 0.0) self.assertTrue( self._compare_entity_response(action_response, event_response))
def __call__(self): self.signprepare() yield self.report.setTitle('srv.rflist') bodyfobj = StringIO(self.urlpost('rflist', jdumps(self.rpathes))) self.srvrflist = [] for ln in GzipFile('srvrflist', 'rb', fileobj=bodyfobj).readlines(): self.srvrflist.append(ln.strip()) self.srvrflist = [normpath(path) for path in self.srvrflist] self.report.setTitle('traversal') self.maxstep = len(self.srvrflist) wdobj = walkdirs(self.rootdir, self.rpathes, self.excrpathes) fpath = wdobj.next() while fpath is not None: rfpath = relpath(fpath, self.rootdir) while self.srvrflist and rfpath > self.srvrflist[0]: if self.doSign(self.srvrflist.pop(0)): yield self.report if not self.srvrflist: self.doDel(rfpath) elif rfpath < self.srvrflist[0]: self.doDel(rfpath) elif rfpath == self.srvrflist[0]: if self.doSign(self.srvrflist.pop(0)): yield self.report fpath = wdobj.next() while self.srvrflist: if self.doSign(self.srvrflist.pop(0)): yield self.report if self.signbatch(): yield self.report self.report.setTitle('result') yield self.report.setStep(0, 0) yield None
def run(self) -> None: self.init() then = time() self.bench() elapsed = time() - then result = {"name": self.__class__.__name__, "elapsed": elapsed} print(jdumps(result))
def announce_to_slack(user, status='connected', message=None, sender=None): """ Provides the voice of the master of ceremonies (MC). Allows the message of a Slack sender to be echoed to all others in the channel. The MC also announces the arrival and departure of an anonymous poster, so that Slack users do not try to respond to someone who has left the chat webpage. :param user: the anonymous user who is visiting the chat webpage :param status: the type of message that the MC announces. Must be either 'connected' (default), 'disconnected', or 'echo'. status='echo' also requires the message and sender :param message: the message that the Slack user sent to the anonymous poster. The message is echoed back to the Slack team :param sender: the Slack sender who is responding to the anonymous poster. The sender is echoed back to the Slack team :return: None """ secret = import_secrets() if status == 'connected': message = user + ' has joined the chat' elif status == 'disconnected': message = user + ' has disconnected' elif status == 'echo' and message and sender: message = sender + ' replied to ' + user + ': ' + message payload = {"text": message, "username": "******"} r = urlfetch.fetch(url=secret['URL'], payload=jdumps(payload), method=urlfetch.POST, headers=HEADERS)
def test_trends_per_day_cumulative(self): self._create_events() with freeze_time('2020-01-04T13:00:01Z'): with self.assertNumQueries(14): action_response = self.client.get( '/api/action/trends/?date_from=-7d&display=ActionsLineGraphCumulative' ).json() event_response = self.client.get( '/api/action/trends/', data={ 'date_from': '-7d', 'events': jdumps([{ 'id': "sign up" }, { 'id': "no events" }]), 'display': 'ActionsLineGraphCumulative' }, ).json() self.assertEqual(action_response[0]['label'], 'sign up') self.assertEqual(action_response[0]['labels'][4], 'Wed. 1 January') self.assertEqual(action_response[0]['data'][4], 3.0) self.assertEqual(action_response[0]['labels'][5], 'Thu. 2 January') self.assertEqual(action_response[0]['data'][5], 4.0) self.assertEqual(event_response[0]['label'], 'sign up') self.assertTrue( self._compare_entity_response(action_response, event_response))
def run(self) -> None: self.init() if self.dpath.suffix == ".parquet": df = dd.read_parquet(self.dpath) elif self.dpath.suffix == ".csv": df = dd.read_csv(self.dpath) cols = [] for x in df.columns: cols.append(x) times = [] col_pairs = [] for x, y in itertools.combinations(cols, 2): then = time() self.bench(x, y) times.append(time() - then) col_pairs.append((x, y)) result = { "name": self.__class__.__name__, "times": times, "column_pairs": col_pairs, "all_columns": cols, } print(jdumps(result))
def detail(request): id = request.GET.get('id') doc_id = int(id) a = Song.objects.get(pk=doc_id) song = {} song['title'] = a.title song['image_url'] = a.song_art_image_thumbnail_url song['artist'] = a.primary_artist_name song['album'] = a.album_name song['youtube_url'] = a.youtube_url try: api_info = requests.get(f"https://api.genius.com/songs/{a.api_id}",\ headers=headers, timeout = 5) if api_info.status_code != 200: 1 / 0 else: lyrics_url = api_info.json()['response']['song']['url'] lyrics_info = requests.get(lyrics_url, headers=headers, timeout=5) if lyrics_info.status_code != 200: 1 / 0 else: html = BeautifulSoup(lyrics_info.text, "html.parser") div = html.find("div", class_=re.compile("^lyrics$|Lyrics__Root")) song['lyrics'] = re.sub("\n+", "\n", div.get_text("\n")) print(song['lyrics']) except: song['lyrics'] = a.lyrics resp = {'err': 'false', 'detail': 'Get success', 'ret': song} return HttpResponse(jdumps(resp), content_type="application/json")
def output_item(data, color, width=0): printf( jdumps({ "full_text": data, "color": color, "separator_block_width": width }))
def __call__(self): self.signprepare() yield self.report.setTitle('srv.rflist') bodyfobj = StringIO(self.urlpost('rflist', jdumps(self.rpathes))) self.srvrflist = [] for ln in GzipFile('srvrflist', 'rb', fileobj = bodyfobj).readlines(): self.srvrflist.append(ln.strip()) self.srvrflist = [ normpath(path) for path in self.srvrflist ] self.report.setTitle('traversal') self.maxstep = len(self.srvrflist) wdobj = walkdirs(self.rootdir, self.rpathes, self.excrpathes) fpath = wdobj.next() while fpath is not None: rfpath = relpath(fpath, self.rootdir) while self.srvrflist and rfpath > self.srvrflist[0]: if self.doSign(self.srvrflist.pop(0)): yield self.report if not self.srvrflist: self.doDel(rfpath) elif rfpath < self.srvrflist[0]: self.doDel(rfpath) elif rfpath == self.srvrflist[0]: if self.doSign(self.srvrflist.pop(0)): yield self.report fpath = wdobj.next() while self.srvrflist: if self.doSign(self.srvrflist.pop(0)): yield self.report if self.signbatch(): yield self.report self.report.setTitle('result') yield self.report.setStep(0, 0) yield None
def send_into_slack(channel, message, color='#439FE0'): ''' Send notifitcation into Slack channel ''' params = { 'channel': channel, 'attachments': [ { 'color': color, 'text': message, 'mrkdwn_in': [ 'text', ], }, ], } logging.debug(f'sending message "{message}" into channel {channel}') request = Request(slack_webhook_url, data=jdumps(params).encode('gbk')) response = urlopen(request) code = response.getcode() if code == 200: logging.debug('slack api said: %s', response.read()) else: Exception(f'slack api returns code {code}: {response.read()}')
def get_data(request, host_id, plugin): """Get JSON data from a plugin.""" H = get_object_or_404(Host.objects.filter(id=host_id)) if not has_perm(request.user, Host, host_id): raise Http404 data = {'plugin':plugin, 'res':request.GET.get('res','Daily')} r = H.get_extended_data(**data) return HttpResponse(jdumps(r), content_type="application/json")
def test_py_json(): """ Tests: test_py_json frompickle """ print('::: TEST: test_py_json()') edict_with_all = _get_orig__edict_with_all() new_reobj_all__jdumps = jdumps(edict_with_all) new_reobj_all = jloads(new_reobj_all__jdumps) # note is not equal because tuples are changed to list in json # ok_(edict_with_all == new_reobj_all, msg=None) ok_(isinstance(new_reobj_all, dict) and not isinstance(new_reobj_all, Edict), msg=None) ok_(edict_with_all['edict1'] == new_reobj_all['edict1'], msg=None) ok_(isinstance(new_reobj_all['edict1'], dict) and not isinstance(new_reobj_all['edict1'], Edict), msg=None) ok_(edict_with_all['rdict1'] == new_reobj_all['rdict1'], msg=None) ok_(isinstance(new_reobj_all['rdict1'], dict) and not isinstance(new_reobj_all['rdict1'], Rdict), msg=None) ok_(edict_with_all['edictf1'] == new_reobj_all['edictf1'], msg=None) ok_(isinstance(new_reobj_all['edictf1'], dict) and not isinstance(new_reobj_all['edictf1'], RdictF), msg=None) ok_(edict_with_all['edictio1'] == new_reobj_all['edictio1'], msg=None) ok_(isinstance(new_reobj_all['edictio1'], dict) and not isinstance(new_reobj_all['edictio1'], RdictIO), msg=None) ok_(edict_with_all['edictfo1'] == new_reobj_all['edictfo1'], msg=None) ok_(isinstance(new_reobj_all['edictfo1'], dict) and not isinstance(new_reobj_all['edictfo1'], RdictFO), msg=None) ok_(edict_with_all['edictfo2_1'] == new_reobj_all['edictfo2_1'], msg=None) ok_(isinstance(new_reobj_all['edictfo2_1'], dict) and not isinstance(new_reobj_all['edictfo2_1'], RdictFO2), msg=None) ok_(edict_with_all['elist1'] == new_reobj_all['elist1'], msg=None) ok_(isinstance(new_reobj_all['elist1'], list) and not isinstance(new_reobj_all['elist1'], Elist), msg=None) ok_(edict_with_all['rlist1'] == new_reobj_all['rlist1'], msg=None) ok_(isinstance(new_reobj_all['rlist1'], list) and not isinstance(new_reobj_all['rlist1'], Rlist), msg=None) ok_(edict_with_all['rlistf1'] == new_reobj_all['rlistf1'], msg=None) ok_(isinstance(new_reobj_all['rlistf1'], list) and not isinstance(new_reobj_all['rlistf1'], RlistF), msg=None) # note is not equal because tuples are changed to list in json # ok_(edict_with_all['etuple1'] == new_reobj_all['etuple1'], msg=None) ok_(isinstance(new_reobj_all['etuple1'], list) and not isinstance(new_reobj_all['etuple1'], Etuple), msg=None) # note is not equal because tuples are changed to list in json # ok_(edict_with_all['lmatrix1'] == new_reobj_all['lmatrix1'], msg=None) ok_(isinstance(new_reobj_all['lmatrix1'], list) and not isinstance(new_reobj_all['lmatrix1'], Lmatrix), msg=None) # note is not equal because tuples are changed to list in json # ok_(edict_with_all['lmatrixf1'] == new_reobj_all['lmatrixf1'], msg=None) ok_(isinstance(new_reobj_all['lmatrixf1'], list) and not isinstance(new_reobj_all['lmatrixf1'], LmatrixF), msg=None) # some data checks ok_(edict_with_all['edictfo1']['edictfo_inner2'] == new_reobj_all['edictfo1']['edictfo_inner2'] and new_reobj_all['edictfo1']['edictfo_inner2'] == 'edictfo_inner2 value', msg=None) ok_(edict_with_all['rlist1'][1] == new_reobj_all['rlist1'][1] and new_reobj_all['rlist1'][1] == 'rlist_inner value2', msg=None) ok_(edict_with_all['lmatrixf1'][1][2] == new_reobj_all['lmatrixf1'][1][2] and new_reobj_all['lmatrixf1'][1][2] == 125, msg=None)
def children(request, path='/'): childrens = [] data = zkutils._get_children(zh, path) if data[0] == False: return HttpResponse(jdumps({"error":str(data[1])})) for child in data[1]: child_path = os.path.join(path, child) if child_path == '/zookeeper': continue if check_path_perm(request, child_path) == False: continue child_dict = {"text":child, "id": child_path} stat = zkutils._get_stat(zh, child_path) if stat != False and stat['numChildren'] > 0: child_dict['leaf'] = False else: child_dict['leaf'] = True childrens.append(child_dict) return HttpResponse(jdumps(childrens))
def host(request, host_id=None): """Get list of plugin's categories.""" H = get_object_or_404(Host.objects.filter(id=host_id)) if not has_perm(request.user, Host, host_id): raise Http404 # return render(request, 'hosttree/host.html', { # 'host': H.get_categories(), # }) return HttpResponse(jdumps(H.get_categories()), content_type="application/json")
def post(self): token = self.request.get('from') user = names.get_first_name() db = User(id=token) db.user = user db.token = token db.put() announce_to_slack(user) to_client = {'user': user} channel.send_message(token, jdumps(to_client))
def add(request, path='/'): stat = zkutils._get_stat(zh, path) if stat: return HttpResponse(jdumps({'error': 'Node already exists'})) control_masters = list(zkconfig.control_machines) control_masters.append("127.0.0.1") control_masters.append(socket.gethostbyname(socket.gethostname())) acl_set = zkutils.AclSet(["ip:%s:31" %(id) for id in control_masters]) if 'acl' in request.POST: acl_set.addmany(request.POST['acl'].split()) node_data = request.POST.get('data', "") try: ret = zk.create(zh, path, node_data, acl_set.to_dict()) if ret == path: return HttpResponse(jdumps({'status':'ok'})) else: return HttpResponse(jdumps({'error':str(ret)})) except IOError, errmsg: return HttpResponse(jdumps({'error': str(errmsg)}))
def category(request, host_id): """Get list of plugins of a category.""" # H = get_object_or_404(Host.objects.filter(id=host_id)) H = [{'plugin': plugin['Plugin'], 'title': plugin['Title']} for plugin in get_object_or_404(Host.objects.filter(id=host_id)).get_plugins_by_category(request.GET['category'])] if not has_perm(request.user, Host, host_id): raise Http404 # return render(request, 'hosttree/category.html', { # 'category': H.get_plugins_by_category(request.GET['category']), # }) return HttpResponse(jdumps(H), content_type="application/json")
def get_data(): s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) s.connect("mastersocket") s.send(jdumps({"request": "get_data"})) data = s.recv(8192) raw = jloads(data) if raw : (data, data_forecast, unread, news, cal) = raw return (data, data_forecast, unread, news, cal) else : return False
def uploadResult(self, sn, step, result, descobj={}): url = Setting.BASE_STEP_URL % {'sn': sn, 'step': Setting.getStepInt(step), 'result': result} Log.d(url) data = jdumps(descobj, ensure_ascii=False).encode('utf8') try: resp = requests.post(url, params={}, data=data) except: from factcore.works.workflow import BaseWork return {'ret': BaseWork.FAILED, 'desc': u'不能连接服务器'} return self.jsonloads(resp.text)
def encrypt(self, fromPath=None): """""" if fromPath is None : fromPath = self.pathPlain Sys.pwlog([(' Encrypt Index... ' , Const.CLZ_0, True)]) Io.set_data(fromPath, jdumps(self.dic)) call = ' '.join([Sys.executable, 'kirmah-cli.py', 'enc', '-qfj2' if Sys.isUnix() else '-qf', fromPath, '-z', '-r', '-m', '-o', fromPath+Kirmah.EXT, '-k', self.keyPath ]) #~ print(call) Sys.sysCall(call) Io.removeFile(fromPath) Sys.pwlog([(' done', Const.CLZ_2, True)]) return Io.get_data(fromPath+Kirmah.EXT, True)
def get_twitter_media(self, entities, tw_id): """ Method is used to extract media data from tweets. Called from infinity loop. In case, if url contains Instagram photo (not shown in 'media'), pushes data to queue:instagram Redis key. """ if 'media' in entities: for item in entities['media']: q = 'INSERT INTO media(tweet_id, url) VALUES ("{}", "{}");'.format(tw_id, item['media_url_https']) exec_mysql(q, self.mysql) if 'urls' in entities: for url in entities['urls']: if 'instagram.com' in url['expanded_url']: self.redis.rpush('queue:instagram', jdumps([tw_id, url['expanded_url']]))
def serverlist(request): stats = [] for zkserver in zkconfig.servers: server_dict = {"id": zkserver[0], "text": zkserver[0], "leaf": True} stat = zkutils.stat(zkserver) if stat == False: server_dict['cls'] = 'failed' elif "Mode: leader" in stat: server_dict['cls'] = 'leader' elif "Mode: follower" in stat: server_dict['cls'] = 'follower' stats.append(server_dict) return HttpResponse(jdumps(stats))
def show_menu(background, back_color): dt = datetime.today() data = jload(urlopen("http://127.0.0.1/functions/cron.php?next")) dr = datetime.fromtimestamp(data["heure"]) i = 0 while True : s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) s.connect("mastersocket") s.send(jdumps({"request": "get_delta"})) delta = int(s.recv(4096)) s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) s.connect("mastersocket") s.send(jdumps({"request": "get_sw_state"})) sw_state = int(s.recv(4096)) background.fill(back_color) render.render(get_ip_address('eth0'), font, background, hex_to_rgb(conf["general"]["front_color"]), 0, 0, 320, 60) if i == 0 : render.render(dt.strftime("%H:%M"), font, background, hex_to_rgb(conf["general"]["front_color"]), 0, 60, 320, 120) dt = dt.replace(minute = (dt.minute + delta) % 60, hour = dt.hour + int(floor((dt.minute + delta) / 60))) elif i == 1 : render.render(dr.strftime("%H:%M"), font, background, hex_to_rgb(conf["general"]["front_color"]), 0, 60, 320, 120) dr = dr.replace(minute = (dr.minute + delta) % 60, hour = dr.hour + int(floor((dr.minute + delta) / 60))) if sw_state : i+= 1 screen.blit(background, (0, 0)) display.flip() sleep(0.1) if i >= 2 : _linux_set_time(dt.timetuple()) urlopen("http://127.0.0.1/functions/cron.php?change=" + dr.strftime("%s")).read() break
def _process(self): current_thread= currentThread() if current_thread.name == 'MainThread': return previous_thread= current_thread.previous_thread while self.alive: self._thread_regulator(current_thread, previous_thread) with self._lock: jobs= filter(lambda job: job.get("callback") == current_thread.name, self.store.values()) ready= filter(lambda job: job.get("status") == "ready", jobs) errors= filter(lambda job: job.get("status") == "error", jobs) for job in ready: self.ready[current_thread.name].write(encode(compress(jdumps(job, cls= JobEncoder))) + "\n") self.store.pop(job.get("id")) for job in errors: self.errors[current_thread.name].write(encode(compress(jdumps(job, cls= JobEncoder))) + "\n") self.store.pop(job.get("id")) if len(ready) or len(errors): target(self, ready, errors) self._thread_progress(current_thread.name, "processed", len(ready) + len(errors)) self._show_progress(current_thread) if len(self.store) == 0 and previous_thread != None and previous_thread.is_alive() == False: print "%s %s completed" % (datetime.utcnow(), current_thread.name) stdout.flush() self.alive= False sleep(0.01)
def data(request, id): sources = request.GET['ds'].split(',') start_date = get_start_date(request.GET['res']) step = res[request.GET['res']] step_num = int((now() - start_date).total_seconds() / 60) / step r = { "TS_start": int(start_date.strftime('%s')), "TS_step": step*60, "DATAS": dict([ (k,[]) for k in sources ]) } for s in sources: for val in random_func(step_num): r['DATAS'][s].append( val ) r = jdumps(r) return HttpResponse(r)
def dumps(data, sort_keys=False): ''' Abstraction on json.dumps to handle TransmissionData serialization. Takes a TransmissionData object and serializes it :param data: The data object to be serialized :param sort_keys: Optional to sort the json keys defaults to False :returns: A serialized TransmissionData string ''' return jdumps( data, cls=_TransmissionDataSerializer, sort_keys=sorted, )
def megacli(array): '''Discovery device id ''' megacli_path = get_utils('megacli_path', '/usr/sbin/megacli') sudo_path = get_utils('sudo_path', '/usr/bin/sudo') ret = Popen('%s %s -pdlist -a%s' % (sudo_path, megacli_path, array), shell = True, stdout = PIPE).stdout.readlines() ids = { 'data': [ ] } for i in ret: if len(i) > 11 and i[:11] == 'Device Id: ': ids['data'].append( { '{#ARRAY}': array, '{#DEVICEID}': i[10:-1].strip(), } ) print(jdumps(ids, sort_keys=True, indent=3, separators=(',', ': ')))
def send_to_slack(user, message): """ Send the message of an anonymous user from the website to the Slack channel. Define the name of the anonymous user, the message of the anonymous user, and the ghost icon identifying the anonymous user in the Slack channel. Must be a POST request to the Slack server (see Slack documentation as well as the DHub inbound webhook. :param user: the anonymous name of the poster :param message: the message of the poster :return: None """ secret = import_secrets() payload = {"text": message, "username": user, "icon_emoji": ":ghost:"} r = urlfetch.fetch(url=secret['URL'], payload=jdumps(payload), method=urlfetch.POST, headers=HEADERS)
def data(request, id): sources = request.GET['ds'].split(',') start_date = get_start_date(request.GET['res']) step = res[request.GET['res']] step_num = int((now() - start_date).total_seconds() / 60) / step r = { "TS_start": int(start_date.strftime('%s')), "TS_step": step*60, "DATAS": dict([ (k,[]) for k in sources ]) } if len(sources) == 2: for val in random_func(step_num, offset_y=randint(0, 100), min_y=0): r['DATAS'][sources[0]].append(val) for val in delta_of(r['DATAS'][sources[0]]): r['DATAS'][sources[1]].append(val) else: for s in sources: for val in random_func(step_num): r['DATAS'][s].append( val ) r = jdumps(r) return HttpResponse(r)
def send_to_client(user, message, sender): """ Send a message from the Slack channel to the anonymous user on the website, provided that the user has not closed their browser window. Responses are generated using a /respond slash command, noting the name of the recipient, and finally the message. For example, /respond Goldwater thanks for your feedback! Sends the message "thanks for your feedback!" to Goldwater. The name of the sender is provided to the anonymous poster. :param user: the anonymous recipient of the response :param message: the message for the anonymous recipient :param sender: the name of the sender (their Slack ID) :return: an echo from the slackbot on success, otherwise the ERRORS message """ token = get_token_by_user(user) if token != ERRORS: payload = {"response": message, "from": sender} channel.send_message(token, jdumps(payload)) return 'Message to ' + user + ' sent successfully: ' + message else: return ERRORS
"bik": "3424234324", "boss_fio": "\u0419\u0446\u0443\u043a \u0415\u043d\u0433 \u0418\u0447\u0435\u0448\u0443\u0442\u0441\u044f", "phone": "777771", "inn": "123123213123213", "proof": "scans/\u043f\u0430\u0441\u043f\u043e\u0440\u0442_HZzLWaq.png", "user": i, "orgn": "123213213213123", "orgn_emitter": "\u0439\u0446\u0443\u043a\u0435\u043d\u043a\u0443\u0446\u0439", "email": "*****@*****.**", "bill_numb": "1123213123" } }) entity['fields'] = order_dict(COMPANY_ORDER, dict({ 'name': vals[0], 'full_name': vals[0], 'workgraph': vals[1], 'company_type': 0 if int(vals[2]) <800 else 1, 'post_address': vals[3], 'legal_address': vals[3] }, **entity['fields'])) j += [user, entity] pp = PrettyPrinter(indent=4) print('s = ') pp.pprint(j) if(len(argv) > 1): jf = open(argv[1], 'w') jf.writelines(jdumps(j, indent=4)) jf.close()
def fast_add(request): M = get_object_or_404(Multiview.objects.filter(pk=request.POST['multiview_id'])) V = View.objects.create(name=request.POST['view_name']) M.views.add(V) r = V.get_extended_data(res=request.POST.get('res','Daily')) return HttpResponse(jdumps(r), content_type="application/json")