Esempio n. 1
1
    def test_twitter_login_registration(self, verify_credentials):
        twitter_data = TWITTER_DATA.copy()
        twitter_data["id"] = uuid.uuid4().hex
        verify_credentials.return_value = type("U", (object,), {"AsDict": lambda s: twitter_data})()

        with self.app.test_client() as client:
            token_key, token_secret = "kkkkkkkk", "sssssss"
            for registered in True, False:
                r = client.post(
                    "/ws/login/external/",
                    headers=[get_client_auth_header()],
                    data=dict(external_system="twitter", external_token=token_key + ":" + token_secret),
                )
                self.assertEquals(200, r.status_code)
                creds = json.loads(r.data)
                self.assertEquals(creds["registered"], registered)

            r = client.get(
                creds["resource_url"], headers=[("Authorization", creds["token_type"] + " " + creds["access_token"])]
            )
            data = json.loads(r.data)
            self.assertEquals(data["username"], TWITTER_DATA["screen_name"])
            self.assertEquals(data["display_name"], TWITTER_DATA["name"])
            self.assertTrue(data["avatar_thumbnail_url"])

            token = ExternalToken.query.filter_by(user=creds["user_id"]).one()
            self.assertEquals(token.external_uid, twitter_data["id"])
            self.assertIn("read", token.permissions)
Esempio n. 2
0
    def test_where_3_way_logic(self):
        #             This column
        #                 ⇓
        #  ___________________
        # |     | col1 | col2 |
        #  -------------------
        # | r1  |  1   |      | ⇐ This row
        #  -------------------
        # | r2  |  1   |  2   |
        #  -------------------

        response = requests.get(self.dataset_url)
        datasets = json.loads(response.content)
        self.assertEqual(response.status_code, 200,
            msg="Couldn't get the list of datasets")
        self.assertEqual(datasets['status']['rowCount'], 2,
            msg="2 rows should be in the dataset. r1 and r2")

        params = {
            "select":'col1',
            "where":"'col2' < 2"
        }

        response = requests.get(self.dataset_url + '/query', params=params)
        content = json.loads(response.content)
        self.assertEqual(len(content), 0,
            msg="The query should have returned no results")
Esempio n. 3
0
File: api.py Progetto: heshunwq/hue
def get_logs(request):
  response = {'status': -1}

  notebook = json.loads(request.POST.get('notebook', '{}'))
  snippet = json.loads(request.POST.get('snippet', '{}'))

  startFrom = request.POST.get('from')
  startFrom = int(startFrom) if startFrom else None

  size = request.POST.get('size')
  size = int(size) if size else None

  db = get_api(request, snippet)

  logs = db.get_log(notebook, snippet, startFrom=startFrom, size=size)

  jobs = json.loads(request.POST.get('jobs', '[]'))

  # Get any new jobs from current logs snippet
  new_jobs = db.get_jobs(notebook, snippet, logs)

  # Append new jobs to known jobs and get the unique set
  if new_jobs:
    all_jobs = jobs + new_jobs
    jobs = dict((job['name'], job) for job in all_jobs).values()

  # Retrieve full log for job progress parsing
  full_log = request.POST.get('full_log', logs)

  response['logs'] = logs
  response['progress'] = db.progress(snippet, full_log) if snippet['status'] != 'available' and snippet['status'] != 'success' else 100
  response['jobs'] = jobs
  response['status'] = 0

  return JsonResponse(response)
Esempio n. 4
0
def decodejson(content):
        print "decode"
	decoded=json.loads(template_json)
        decoded.update(json.loads(str(content)))
	if decoded['v']>2:
		print decoded
        return decoded
Esempio n. 5
0
File: api.py Progetto: heshunwq/hue
def export_result(request):
  response = {'status': -1, 'message': _('Exporting result failed.')}

  # Passed by check_document_access_permission but unused by APIs
  notebook = json.loads(request.POST.get('notebook', '{}'))
  snippet = json.loads(request.POST.get('snippet', '{}'))
  data_format = json.loads(request.POST.get('format', 'hdfs-file'))
  destination = json.loads(request.POST.get('destination', ''))
  overwrite = json.loads(request.POST.get('overwrite', False))

  api = get_api(request, snippet)

  if data_format == 'hdfs-file':
    if overwrite and request.fs.exists(destination):
      if request.fs.isfile(destination):
        request.fs.do_as_user(request.user.username, request.fs.rmtree, destination)
      else:
        raise ValidationError(_("The target path is a directory"))
    response['watch_url'] = api.export_data_as_hdfs_file(snippet, destination, overwrite)
    response['status'] = 0
  elif data_format == 'hive-table':
    notebook_id = notebook['id'] or request.GET.get('editor', request.GET.get('notebook'))
    response['watch_url'] = reverse('notebook:execute_and_watch') + '?action=save_as_table&notebook=' + str(notebook_id) + '&snippet=0&destination=' + destination
    response['status'] = 0
  elif data_format == 'hdfs-directory':
    notebook_id = notebook['id'] or request.GET.get('editor', request.GET.get('notebook'))
    response['watch_url'] = reverse('notebook:execute_and_watch') + '?action=insert_as_query&notebook=' + str(notebook_id) + '&snippet=0&destination=' + destination
    response['status'] = 0

  return JsonResponse(response)
def get_servlet_stats(base_url, url, user, password):
    monitor_status = json.loads(retrieve_status_page(
                                user, password, base_url+url))
    serv_stats = {}

    attribute_array = monitor_status.get("attributes")
    servlet_url = servlet_get_url(attribute_array, "ResponseTimeDetails")
    servlet_status = json.loads(retrieve_status_page(
                                user, password, base_url+servlet_url))

    serv_stats["reqCount"] = servlet_status.get("value").get("count")
    serv_stats["responseMean"] = servlet_status.get("value").get("mean")
    serv_stats["responseMax"] = servlet_status.get("value").get("maximumValue")
    serv_stats["responseMin"] = servlet_status.get("value").get("minimumValue")

    servlet_url = servlet_get_url(attribute_array, "ServletName")
    servlet_status = json.loads(retrieve_status_page(
                                user, password, base_url + servlet_url))
    serv_stats["name"] = servlet_status.get("value")

    servlet_url = servlet_get_url(attribute_array, "AppName")
    servlet_status = json.loads(retrieve_status_page(
                                user, password, base_url + servlet_url))
    serv_stats["appName"] = servlet_status.get("value")
    return serv_stats
Esempio n. 7
0
    def test_13_admin_user_add_del(self):
        """Test ADMIN add/del user to admin group works"""
        self.register()
        self.signout()
        self.register(fullname="Juan Jose", name="juan",
                      email="*****@*****.**", password="******")
        self.signout()
        # Signin with admin user
        self.signin()
        # Add user.id=1000 (it does not exist)
        res = self.app.get("/admin/users/add/1000", follow_redirects=True)
        err = json.loads(res.data)
        assert res.status_code == 404, res.status_code
        assert err['error'] == "User not found", err
        assert err['status_code'] == 404, err


        # Add user.id=2 to admin group
        res = self.app.get("/admin/users/add/2", follow_redirects=True)
        assert "Current Users with Admin privileges" in res.data
        err_msg = "User.id=2 should be listed as an admin"
        assert "Juan Jose" in res.data, err_msg
        # Remove user.id=2 from admin group
        res = self.app.get("/admin/users/del/2", follow_redirects=True)
        assert "Current Users with Admin privileges" not in res.data
        err_msg = "User.id=2 should be listed as an admin"
        assert "Juan Jose" not in res.data, err_msg
        # Delete a non existant user should return an error
        res = self.app.get("/admin/users/del/5000", follow_redirects=True)
        err = json.loads(res.data)
        assert res.status_code == 404, res.status_code
        assert err['error'] == "User.id not found", err
        assert err['status_code'] == 404, err
Esempio n. 8
0
def test_list_inconsistent_obj(ctx, manager, osd_remote, pg, acting, osd_id,
                               obj_name, obj_path):
    mon = manager.controller
    pool = 'rbd'
    omap_key = 'key'
    omap_val = 'val'
    manager.do_rados(mon, ['-p', pool, 'setomapval', obj_name,
                           omap_key, omap_val])
    messup = MessUp(manager, osd_remote, pool, osd_id, obj_name, obj_path,
                    omap_key, omap_val)
    for test in [messup.rm_omap, messup.add_omap, messup.change_omap,
                 messup.append, messup.truncate, messup.change_obj,
                 messup.remove]:
        with test() as checks:
            deep_scrub(manager, pg)
            cmd = 'rados list-inconsistent-pg {pool} ' \
                  '--format=json'.format(pool=pool)
            with contextlib.closing(StringIO()) as out:
                mon.run(args=cmd.split(), stdout=out)
                pgs = json.loads(out.getvalue())
            assert pgs == [pg]

            cmd = 'rados list-inconsistent-obj {pg} ' \
                  '--format=json'.format(pg=pg)
            with contextlib.closing(StringIO()) as out:
                mon.run(args=cmd.split(), stdout=out)
                objs = json.loads(out.getvalue())
            assert len(objs['inconsistents']) == 1

            checker = InconsistentObjChecker(osd_id, acting, obj_name)
            inc_obj = objs['inconsistents'][0]
            log.info('inc = %r', inc_obj)
            checker.basic_checks(inc_obj)
            for check in checks:
                checker.run(check, inc_obj)
Esempio n. 9
0
	def request(self, connection, url, headers, data=None, timeout=0):
		result = response = None
		try:
			if data: connection.request('POST', url, data, headers)
			else: connection.request('GET', url, headers=headers)
			response = self.timeout_response(connection, timeout)
			if not response:
				return None
			if response.status == httplib.UNAUTHORIZED:
				say_line('Wrong username or password for %s', self.server().name)
				self.authorization_failed = True
				raise NotAuthorized()
			r = self.max_redirects
			while response.status == httplib.TEMPORARY_REDIRECT:
				response.read()
				url = response.getheader('Location', '')
				if r == 0 or url == '': raise HTTPException('Too much or bad redirects')
				connection.request('GET', url, headers=headers)
				response = self.timeout_response(connection, timeout)
				r -= 1
			self.long_poll_url = response.getheader('X-Long-Polling', '')
			self.switch.update_time = bool(response.getheader('X-Roll-NTime', ''))
			hostList = response.getheader('X-Host-List', '')
			self.stratum_header = response.getheader('x-stratum', '')
			if (not self.options.nsf) and hostList: self.switch.add_servers(loads(hostList))
			result = loads(response.read())
			if result['error']:
				say_line('server error: %s', result['error']['message'])
				raise RPCError(result['error']['message'])
			return (connection, result)
		finally:
			if not result or not response or (response.version == 10 and response.getheader('connection', '') != 'keep-alive') or response.getheader('connection', '') == 'close':
				connection.close()
				connection = None
Esempio n. 10
0
	def test_view_get_report_data(self):
		url = reverse('activities.views.get_report_data')

		self.client.login(username='******', password='******')

		resp = self.client.get(url, {"startdate": 1325376000000, "enddate": 13885344000000})
		response = json.loads(resp.content)
		self.assertEqual(response, {})

		resp = self.client.get(url, {"startdate": 1325376000000, "enddate": 1388534400000, "mode": "sports", "param": "eyJldmVudHMiOiBbM10sICJzcG9ydHMiOiBbMiwgM119"})
		response = json.loads(resp.content)
		self.assertIn('Laufen', response)
		self.assertIn('Rennrad', response)
		self.assertIn('Schwimmen', response)
		self.assertEquals(response['Schwimmen'], {u'total_time': 0, u'total_calories': 0, u'color': u'#66ccff', u'num_activities': 0, u'total_time_str': u'0:00:00', u'total_distance': 0.0, u'total_elev_gain': 0})
		self.assertEquals(response['Laufen'], {u'total_time': 11018, u'total_calories': 2493, u'color': u'#cc6600', u'num_activities': 2, u'total_time_str': u'3:03:38', u'total_distance': 37.808, u'total_elev_gain': 896})

		resp = self.client.get(url, {"startdate": 1325376000000, "enddate": 1388534400000, "mode": "weeks"})
		response = json.loads(resp.content)
		self.assertEqual(response, {u'count': [], u'distance': [], u'calories': [], u'time': []})

		resp = self.client.get(url, {"startdate": 1325376000000, "enddate": 1388534400000, "mode": "weeks", "param": "eyJldmVudHMiOiBbM10sICJzcG9ydHMiOiBbMiwgM119"})
		response = json.loads(resp.content)
		resp_time_bike = response["time"][1]  # by-week list of bike times
		self.assertEqual(len(resp_time_bike["data"]), 106)  # covering 106 weeks
		self.assertEqual(resp_time_bike["data"][0][1], 0)   # no activity in week 0
		self.assertEqual(resp_time_bike["data"][87][1], 80)  # activity in week 87
Esempio n. 11
0
def get_free_movies():
	html2 = make_request(url)
	html = json.loads(html2)
	content_id=''
	for r in html['contents']:
		if '1' in r['content_type_id']:
			content_id = r['content_id']
			name = r['title']
	if content_id:
		userurl = 'https://erosnow.com/secured/dologin'
		req = make_request_post(userurl)
		movieurl2 = 'http://erosnow.com/profiles/'+str(content_id)+'?platform=2&q=auto'
		html3 = make_request(movieurl2)
		html4 = json.loads(html3)
		req2 = json.loads(req)
		item2 = xbmcgui.ListItem(name)
		if (str(req2['success']['plan']) == 'False'):
			movie_link = html4['profiles']['ADAPTIVE_SD'][0]
		else:
			movie_link = html4['profiles']['ADAPTIVE_ALL'][0]
			subYes = Addon.getSetting('subType')
			if (subYes=='true') and (html4.get('subtitles')):
				closedcaption=[]
				closedcaption.append(html4['subtitles']['eng']['url'])
				subpath = convert_subtitles(closedcaption)
				item2.setSubtitles([subpath])
		
		item2.setProperty('IsPlayable', 'true')
		item2.setPath(movie_link['url'])
		xbmcplugin.setResolvedUrl(int(sys.argv[1]), True, item2)
	else:
		dialog.notification('Error', 'Movie may not be released yet.', xbmcgui.NOTIFICATION_INFO, 6000)
Esempio n. 12
0
    def test_ng_save_update(self):
        # CRUDTestViewWithFK
        request = self.factory.post('/crud/?pk=1',
                                    data=json.dumps({'pk': 1, 'name': 'John2'}),
                                    content_type='application/json')
        response = CRUDTestView.as_view()(request)
        data = json.loads(response.content.decode('utf-8'))
        self.assertEqual(data['name'], 'John2')

        request2 = self.factory.get('/crud/?pk=1')
        response2 = CRUDTestView.as_view()(request2)
        data2 = json.loads(response2.content.decode('utf-8'))
        self.assertEqual(data2['name'], 'John2')

        # CRUDTestViewWithSlug
        request3 = self.factory.post('/crud/?pk=1',
                                    data=json.dumps({'name': 'John', 'email': '*****@*****.**'}),
                                    content_type='application/json')
        response3 = CRUDTestViewWithSlug.as_view()(request3)
        data3 = json.loads(response3.content.decode('utf-8'))
        self.assertEqual(data3['name'], 'John')
        self.assertEqual(data3['email'], '*****@*****.**')

        request4 = self.factory.get('/crud/[email protected]')
        response4 = CRUDTestViewWithSlug.as_view()(request4)
        data4 = json.loads(response4.content.decode('utf-8'))
        self.assertEqual(data4['name'], 'John')

        request5 = self.factory.post('/crud/?pk=3',  # Modifying "Chris"
                                    data=json.dumps({'pk': 4, 'name': 'John2', 'email': '*****@*****.**'}),
                                    content_type='application/json')
        response5 = CRUDTestViewWithSlug.as_view()(request5)
        self.assertGreaterEqual(response5.status_code, 400)
        data5 = json.loads(response5.content.decode('utf-8'))
        self.assertTrue('detail' in data5 and 'email' in data5['detail'] and len(data5['detail']['email']) > 0)
Esempio n. 13
0
    def test_ng_delete(self):
        # CRUDTestViewWithFK
        request = self.factory.delete('/crud/?pk=1')
        response = CRUDTestViewWithFK.as_view()(request)
        data = json.loads(response.content.decode('utf-8'))
        deleted_name = data['name']

        request2 = self.factory.get('/crud/')
        response2 = CRUDTestViewWithFK.as_view()(request2)
        data2 = json.loads(response2.content.decode('utf-8'))
        for obj in data2:
            self.assertTrue(deleted_name != obj['name'])

        # CRUDTestViewWithSlug delete is not different from CRUDTestViewWithFK only testing error status codes
        request3 = self.factory.delete('/crud/[email protected]')  # Missing pk
        response3 = CRUDTestViewWithSlug.as_view()(request3)
        self.assertEqual(response3.status_code, 400)

        request4 = self.factory.delete('/crud/?pk=100')  # Invalid pk
        response4 = CRUDTestViewWithSlug.as_view()(request4)
        self.assertEqual(response4.status_code, 404)

        # Testing with m2m relationship
        request5 = self.factory.delete('/crud/?pk=%s' % self.m2m_model.pk)
        response5 = CRUDTestViewWithM2M.as_view()(request5)
        self.assertEqual(response5.status_code, 200)
Esempio n. 14
0
    def put(self, request, pk, subject, record, *args, **kwargs):
        '''
        Updates the subject record. Currently only changing the records label is
        supported.

        If successful the updated external record is returned
        '''
        try:
            pds = ProtocolDataSource.objects.get(pk=pk)
        except ObjectDoesNotExist:
            return Response({'error': 'ProtocolDatasource requested not found'}, status=404)

        if pds.protocol.isUserAuthorized(request.user):
            ex_rec = json.loads(request.body.decode('utf-8'))
            rec = self.er_rh.get(id=ex_rec['id'])
            rec.label_id = ex_rec['label_id']
            rec.modified = datetime.now()
            res = self.er_rh.update(rec)[0]
            if res['success']:
                ex_rec = res['external_record']
                return Response(json.loads(ex_rec.json_from_identity(ex_rec)))
            else:
                return Response({
                    'success': res['success'],
                    'errors': res['errors']},
                    status=422)
        else:
            return Response(
                {"detail": "You are not authorized to view records from this protocol"},
                status=403
            )
Esempio n. 15
0
    def test_ng_save_create(self):
        # CRUDTestViewWithFK
        request = self.factory.post('/crud/',
                                    data=json.dumps({'name': 'Leonard'}),
                                    content_type='application/json')
        response = CRUDTestView.as_view()(request)
        data = json.loads(response.content.decode('utf-8'))
        pk = data['pk']

        request2 = self.factory.get('/crud/?pk={0}'.format(pk))
        response2 = CRUDTestView.as_view()(request2)
        data2 = json.loads(response2.content.decode('utf-8'))
        self.assertEqual(data2['name'], 'Leonard')

        # CRUDTestViewWithSlug
        request3 = self.factory.post('/crud/',
                                    data=json.dumps({'name': 'Leonard', 'email': '*****@*****.**'}),
                                    content_type='application/json')
        CRUDTestViewWithSlug.as_view()(request3)

        request4 = self.factory.get('/crud/?email={0}'.format('*****@*****.**'))
        response4 = CRUDTestViewWithSlug.as_view()(request4)
        data4 = json.loads(response4.content.decode('utf-8'))
        self.assertEqual(data4['name'], 'Leonard')

        request5 = self.factory.post('/crud/',
                                    data=json.dumps({'name': 'Leonard2', 'email': '*****@*****.**'}),
                                    content_type='application/json')
        response5 = CRUDTestViewWithSlug.as_view()(request5)
        self.assertGreaterEqual(response5.status_code, 400)
        data5 = json.loads(response5.content.decode('utf-8'))
        self.assertTrue('detail' in data5 and 'email' in data5['detail'] and len(data5['detail']['email']) > 0)
Esempio n. 16
0
    def pretty_log(self, args, kwargs, resp, body):
        if not _logger.isEnabledFor(logging.DEBUG):
            return

        string_parts = ['curl -i']
        for element in args:
            if element in ('GET', 'POST'):
                string_parts.append(' -X %s' % element)
            else:
                string_parts.append(' %s' % element)

        for element in kwargs['headers']:
            header = ' -H "%s: %s"' % (element, kwargs['headers'][element])
            string_parts.append(header)

        curl_cmd = "".join(string_parts)
        _logger.debug("REQUEST:")
        if 'body' in kwargs:
            _logger.debug("%s -d '%s'" % (curl_cmd, kwargs['body']))
            try:
                req_body = json.dumps(json.loads(kwargs['body']),
                                      sort_keys=True, indent=4)
            except:
                req_body = kwargs['body']
            _logger.debug("BODY: %s\n" % (req_body))
        else:
            _logger.debug(curl_cmd)

        try:
            resp_body = json.dumps(json.loads(body), sort_keys=True, indent=4)
        except:
            resp_body = body
        _logger.debug("RESPONSE HEADERS: %s" % resp)
        _logger.debug("RESPONSE BODY   : %s" % resp_body)
Esempio n. 17
0
    def test_particles(self):
        headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}
        request = {
            'streams': [
                {
                    "node": "XX00X",
                    "stream": "ctdpf_ckl_wfp_instrument_recovered",
                    "subsite": "XX00XXXX",
                    "sensor": "00-CTDPFW100",
                    "method": "recovered",
                    "parameters": [1959]
                }
            ],
            'coefficients': {
                'CC_latitude': [{'value': 1.0, 'deployment': 1}],
                'CC_longitude': [{'value': 1.0, 'deployment': 1}],
            },
            'include_provenance': False,
            'include_annotations': False,
            'qcParameters': {}
        }

        r = self.app.post('/particles', data=json.dumps(request), headers=headers)
        data = json.loads(r.data)
        with open(TEST_DIR + '/test_particles.json', mode='r') as f:
            testdata = json.loads(f.read())
            assert _almost_equal(data, testdata)
Esempio n. 18
0
def urls(request):
    if request.method == "GET":
        urls = list(URL.objects.values())
        urls_json = json.dumps(urls)
        return HttpResponse(urls_json, content_type="application/json")
    elif request.method == "POST":
        try:
            payload = json.loads(request.body)
        except:
            return mock_server_error("Fail to unmarshal json string")
        if "name" not in payload or "pattern" not in payload:
            return mock_server_error("Lacking required field")
        try:
            re.compile(payload["pattern"])
        except:
            return mock_server_error("invalid regular expression")
        url = URL(name=payload["name"], pattern=payload["pattern"])
        url.save()
        return mock_server_success()
    elif request.method == "DELETE":
        try:
            payload = json.loads(request.body)
        except:
            return mock_server_error("Fail to unmarshal json string")
        if "id" not in payload:
            return mock_server_error("Lacking required field:id")
        try:
            url = URL.objects.get(id=int(payload["id"]))
        except:
            return mock_server_error("URL not found")
        url.delete()
        return mock_server_success()
    else:
        return mock_server_error("HTTP method not supported.")
    def post(self, match_key):
        self._require_admin()
        alliances_json = self.request.get("alliances_json")
        alliances = json.loads(alliances_json)
        youtube_videos = json.loads(self.request.get("youtube_videos"))
        team_key_names = list()

        for alliance in alliances:
            team_key_names.extend(alliances[alliance].get('teams', None))

        match = Match(
            id=match_key,
            event=Event.get_by_id(self.request.get("event_key_name")).key,
            set_number=int(self.request.get("set_number")),
            match_number=int(self.request.get("match_number")),
            comp_level=self.request.get("comp_level"),
            team_key_names=team_key_names,
            alliances_json=alliances_json,
            # no_auto_update = str(self.request.get("no_auto_update")).lower() == "true", #TODO
        )
        match = MatchManipulator.createOrUpdate(match)
        match.youtube_videos = youtube_videos
        match.dirty = True  # hacky
        MatchManipulator.createOrUpdate(match)

        self.redirect("/admin/match/" + match.key_name)
Esempio n. 20
0
def _post_process(snapshot, bots_active, bots_inactive, tasks_active):
  """Completes the _Snapshot instance with additional data."""
  for dimensions_json, tasks in tasks_active.iteritems():
    snapshot.get_dimensions(dimensions_json).tasks_active = len(tasks)

  snapshot.bot_ids = sorted(bots_active)
  snapshot.bot_ids_bad = sorted(bots_inactive)
  for bot_id, dimensions in bots_active.iteritems():
    # Looks at the current buckets, do not create one.
    for bucket in snapshot.buckets:
      # If this bot matches these dimensions, mark it as a member of this group.
      if task_to_run.match_dimensions(
          json.loads(bucket.dimensions), dimensions):
        # This bot could be used for requests on this dimensions filter.
        if not bot_id in bucket.bot_ids:
          bucket.bot_ids.append(bot_id)
          bucket.bot_ids.sort()

  for bot_id, dimensions in bots_inactive.iteritems():
    # Looks at the current buckets, do not create one.
    for bucket in snapshot.buckets:
      # If this bot matches these dimensions, mark it as a member of this group.
      if task_to_run.match_dimensions(
          json.loads(bucket.dimensions), dimensions):
        # This bot could be used for requests on this dimensions filter.
        if not bot_id in bucket.bot_ids_bad:
          bucket.bot_ids_bad.append(bot_id)
          bucket.bot_ids_bad.sort()
 def loadCanopyList(self): #loads the canopy list once per mapper
     for line in open('C:\Users\Peter\workspace\mrjobTest\src\clusters\part-00000').readlines():
         lineArr = line.strip().split('\t')
         valuesList = json.loads(lineArr[1])
         colList = json.loads(lineArr[0])
         sprsCanopy = coo_matrix((array(valuesList), (zeros(len(colList)), array(colList))), shape=(1,self.options.numitems))
         self.canopyList.append(sprsCanopy)
Esempio n. 22
0
    def test_pubsub(self):

        check_pubsub = self.redis.pubsub()
        check_pubsub.psubscribe("gottwall:*")
        next(check_pubsub.listen())

        cli = RedisClient(
            private_key=private_key,
            public_key=public_key,
            project=project,
            host=HOST)
        ts = datetime.utcnow()

        self.assertEquals("gottwall:{0}:{1}:{2}".format(project, public_key, private_key), cli.channel)
        cli.incr(name="orders", value=2, timestamp=ts, filters={"current_status": "Completed"})

        message = next(check_pubsub.listen())

        self.assertEquals(message['channel'], 'gottwall:{0}:{1}:{2}'.format(project, public_key, private_key))

        notification_message = json.loads(message['data'])
        self.assertEquals(notification_message['type'], 'notification')

        data_dict = json.loads(self.redis.spop(cli.data_key))
        self.assertTrue(data_dict['name'], 'orders')
        self.assertTrue(data_dict['timestamp'], ts.strftime("%Y-%m-%dT%H:%M:%S"))
        self.assertTrue(data_dict['filters']['current_status'], 'Completed')

        self.assertEquals(self.redis.scard(cli.data_key), 0)
def lambda_handler(event, context):
    invoking_event = json.loads(event["invokingEvent"])
    configuration_item = invoking_event["configurationItem"]
    rule_parameters = json.loads(event["ruleParameters"])

    result_token = "No token found."
    if "resultToken" in event:
        result_token = event["resultToken"]

    evaluation = evaluate_compliance(configuration_item, rule_parameters)

    config = boto3.client("config")
    config.put_evaluations(
        Evaluations=[
            {
                "ComplianceResourceType":
                    configuration_item["resourceType"],
                "ComplianceResourceId":
                    configuration_item["resourceId"],
                "ComplianceType":
                    evaluation["compliance_type"],
                "Annotation":
                    evaluation["annotation"],
                "OrderingTimestamp":
                    configuration_item["configurationItemCaptureTime"]
            },
        ],
        ResultToken=result_token
    )
Esempio n. 24
0
    def test_post_optional_ttl(self):
        messages = [{'body': 239},
                    {'body': {'key': 'value'}, 'ttl': 200}]

        action = consts.MESSAGE_POST
        body = {"queue_name": "kitkat",
                "messages": messages}
        req = test_utils.create_request(action, body, self.headers)

        send_mock = mock.Mock()
        self.protocol.sendMessage = send_mock

        self.protocol.onMessage(req, False)

        resp = json.loads(send_mock.call_args[0][0].decode())
        self.assertEqual(201, resp['headers']['status'])
        msg_id = resp['body']['message_ids'][0]

        action = consts.MESSAGE_GET
        body = {"queue_name": "kitkat", "message_id": msg_id}

        req = test_utils.create_request(action, body, self.headers)

        self.protocol.onMessage(req, False)

        resp = json.loads(send_mock.call_args[0][0].decode())
        self.assertEqual(200, resp['headers']['status'])
        self.assertEqual(self.default_message_ttl,
                         resp['body']['messages']['ttl'])
Esempio n. 25
0
    def test_bad_client_id(self, text_id):
        action = consts.MESSAGE_POST
        body = {
            "queue_name": "kinder",
            "messages": [{"ttl": 60,
                          "body": ""}]
        }
        headers = {
            'Client-ID': text_id,
            'X-Project-ID': self.project_id
        }

        send_mock = mock.Mock()
        self.protocol.sendMessage = send_mock

        req = test_utils.create_request(action, body, headers)

        self.protocol.onMessage(req, False)

        resp = json.loads(send_mock.call_args[0][0].decode())
        self.assertEqual(400, resp['headers']['status'])

        action = consts.MESSAGE_GET
        body = {
            "queue_name": "kinder",
            "limit": 3,
            "echo": True
        }

        req = test_utils.create_request(action, body, headers)
        self.protocol.onMessage(req, False)

        resp = json.loads(send_mock.call_args[0][0].decode())
        self.assertEqual(400, resp['headers']['status'])
Esempio n. 26
0
def test_add_switch(host, add_switch, test_file):
	with open(test_file('list/switch_with_make_and_model_output.json')) as output:
		expected_output = output.read()

	result = host.run('stack list switch output-format=json')
	assert result.rc == 0
	assert json.loads(result.stdout) == json.loads(expected_output)
def get_relation_types():
    relation_arg_count = Counter()
    
    for line in sys.stdin:
        if line[0] == '#':
            continue
        line = line.strip()
        if  line.strip() == "":
            continue
        line = line.split("\t")
        entities = json.loads(line[0])
        relations = json.loads(line[1])
        
        if type(entities) == type([]):
            entity1 = entities[0]
            entity2 = entities[1]
            if entity1[:2] == "m." and  entity2[:2] == "m.":
                for relation in relations:
                    relation = json.dumps(relation);
                    if str(relation).find("date") > -1:
                        continue
                    relation_arg_count["%s # %s # left_arg" %(entity1, relation)] += 1 
                    relation_arg_count["%s # %s # right_arg" %(entity2, relation)] += 1
        if len(relation_arg_count) > 50000:
            relation_arg_count = Counter(dict(relation_arg_count.most_common(30000)))
            # break
        
    relation_counter = Counter()
    for key, value in relation_arg_count.most_common(100):
        relation = key.split(" # ", 1)[1]
        relation_counter[relation] += value
        # print "%s\t%s" %(key, value)
        
    for key, value in relation_counter.most_common(10):
        print "%s\t%s" %(key, value)
def parse_xobject(xobject, queue_name):
    """
        Parse a queue object from xqueue:
        { 'return_code': 0 (success), 1 (fail)
        'content': Message from xqueue (string)
        }
        """
    try:
        xobject = json.loads(xobject)
        
        header = json.loads(xobject['xqueue_header'])
        header.update({'queue_name': queue_name})
        body = json.loads(xobject['xqueue_body'])
        files = json.loads(xobject['xqueue_files'])
        
        content = {'xqueue_header': json.dumps(header),
            'xqueue_body': json.dumps(body),
            'xqueue_files': json.dumps(files)
        }
    except ValueError:
        error_message = "Unexpected reply from server."
        log.error(error_message)
        return (False, error_message)
    
    return True, content
Esempio n. 29
0
def fetch_token_header_payload(token):
    """
    Fetch the header and payload out of the JWT token.
    :param token:
    :return: :raise jwt.DecodeError:
    """
    token = token.encode('utf-8')
    try:
        signing_input, crypto_segment = token.rsplit(b'.', 1)
        header_segment, payload_segment = signing_input.split(b'.', 1)
    except ValueError:
        raise jwt.DecodeError('Not enough segments')

    try:
        header = json.loads(jwt.utils.base64url_decode(header_segment).decode('utf-8'))
    except TypeError as e:
        current_app.logger.exception(e)
        raise jwt.DecodeError('Invalid header padding')

    try:
        payload = json.loads(jwt.utils.base64url_decode(payload_segment).decode('utf-8'))
    except TypeError as e:
        current_app.logger.exception(e)
        raise jwt.DecodeError('Invalid payload padding')

    return (header, payload)
 def _call(self, method, params, json_rpc_call_context = None):
     arg_hash = {'method': method,
                 'params': params,
                 'version': '1.1',
                 'id': str(_random.random())[2:]
                 }
     if json_rpc_call_context:
         arg_hash['context'] = json_rpc_call_context
     body = json.dumps(arg_hash, cls=JSONObjectEncoder)
     ret = _requests.post(self.url, data=body, headers=self._headers,
                          timeout=self.timeout,
                          verify=not self.trust_all_ssl_certificates)
     if ret.status_code == _requests.codes.server_error:
         if 'content-type' in ret.headers and ret.headers['content-type'] == 'application/json':
             err = json.loads(ret.text)
             if 'error' in err:
                 raise ServerError(**err['error'])
             else:
                 raise ServerError('Unknown', 0, ret.text)
         else:
             raise ServerError('Unknown', 0, ret.text)
     if ret.status_code != _requests.codes.OK:
         ret.raise_for_status()
     resp = json.loads(ret.text)
     if 'result' not in resp:
         raise ServerError('Unknown', 0, 'An unknown server error occurred')
     return resp['result']
Esempio n. 31
0
import json
from glob import glob
filenames = glob("./segments/*.json")
print(filenames)

time_block = 0
blocks = []
for fn in filenames:
    with open(fn, 'r') as t:
        data = json.loads(t.read())
        for x in data['results']:
            best_alt = x['alternatives'][0]
            trans = best_alt['transcript']
            if len(blocks) > 0:
                if len(trans.split(' ')) < 10:
                    if len(blocks[-1]['text'].split(' ')) < 10:
                        blocks[-1]['text'] += ' ' + trans.strip()
                        continue
            block = {}
            block['time'] = best_alt['timestamps'][0][1] + time_block
            block['text'] = trans.strip()
            blocks.append(block)

    time_block += 300

with open('transcript.txt', 'w') as f:
    for block in blocks:
        time = str(int(block['time'] // 60)) + "m" + str(int(block['time'] % 60)) + "s"
        f.write(time + ': ' + block['text'] + '\n')
Esempio n. 32
0
def rasa_default_train_data():
    with io.open(
        "data/examples/rasa/demo-rasa.json", encoding="utf-8-sig"
    ) as train_file:
        return json.loads(train_file.read())
Esempio n. 33
0
def redirect_fb(request):
	if request.method=='GET':
		try:
			code=request.GET['code']
			state=request.GET['state']
		except Exception as e:
			if request.session.get("url",None):
				return HttpResponseRedirect("/place/"+request.session["url"]+"/")
			else:
				print("q1")
				return HttpResponseRedirect("/")
		else:
			if True:
				authorization_base_url = 'https://www.facebook.com/dialog/oauth/?scope=user_friends,email,public_profile'
				token_url = 'https://graph.facebook.com/oauth/access_token'
				facebook = OAuth2Session(client_id, redirect_uri=redirect_uri)
				facebook = facebook_compliance_fix(facebook)
				facebook.fetch_token(token_url, client_secret=client_secret, code=code)
				r1 = facebook.get('https://graph.facebook.com/v2.8/me/invitable_friends/?limit=500')
				r3 = facebook.get('https://graph.facebook.com/v2.8/me/friends/?limit=500')
				r2 = facebook.get('https://graph.facebook.com/v2.8/me?fields=id,name,email')
				entry = json.loads(r2.content.decode("utf-8"))
				context={}
				try:
					context['id']=entry['id']
					context['name']=entry['name']
					context['pic']="https://graph.facebook.com/"+entry['id']+"/picture?type=large"
					context['email']=entry['email']
					
					print("ok")
				except Exception as e:
					context['email']=""
				try:
					if context["email"]:
						people_object=get_object_or_404(People, email = context['email'])
					else:
						context['email']="*****@*****.**"
						people_object=get_object_or_404(People, userid = context['id'])
					request.session['sessionid'] = people_object.peopleid
					request.session['sessionpic'] = context['pic']
					print(request.session['sessionpic'])
					messages.info(request, 'Login success')
					if request.session.get("url",None):
						return HttpResponseRedirect("/place/"+request.session["url"]+"/")
					else:
						print("q2")
						return HttpResponseRedirect("/profile/"+people_object.username)    
				except Exception as e:
					hash = pbkdf2_sha256.encrypt(context['id'],rounds=500000, salt_size=32)
					entry1 = People(
						username=context['name'],
						email=context['email'],
						password=hash,
						photo=context['pic'],
						userid=context['id'],
						)
					entry1.save()
					people_object1=get_object_or_404(People,email=context['email'])
					request.session['sessionid']=people_object1.peopleid
					request.session['sessionpic']=context['pic']
					if request.session.get("url",None):
						return HttpResponseRedirect("/place/"+request.session["url"]+"/")
					else:
						print("q3")
						return HttpResponseRedirect("/profile/"+context['name'])
Esempio n. 34
0
def redirect_google(request):
	client_id = '762392110250-pj80v4b83mnqruv81mi9t6hv84234ol7.apps.googleusercontent.com'
	client_secret = 'SUTtdmLL60TwS_TDTFNd_jW5'
	redirect_uri = 'http://localhost:8000/login/gologin/'
	token_url = "https://www.googleapis.com/oauth2/v4/token"
	scope = [
		"https://www.googleapis.com/auth/userinfo.email",
		"https://www.googleapis.com/auth/userinfo.profile"
		]
	if request.method=='GET':
		try:
			code=request.GET['code']
			state=request.GET['state']
		except Exception as e:
			if request.session.get("url",None):
				return HttpResponseRedirect("/place/"+request.session["url"]+"/")
			else:
				print("q1")
				return HttpResponseRedirect("/")
		else:
			if True:
				#redirect_response = input('Paste the full redirect URL here:')

				# Fetch the access token
				google = OAuth2Session(client_id, scope=scope, redirect_uri=redirect_uri)
				google.fetch_token(token_url, client_secret=client_secret,code=code)

				# Fetch a protected resource, i.e. user profile
				r = google.get('https://www.googleapis.com/oauth2/v1/userinfo')
				entry = json.loads(r.content.decode("utf-8"))
				context = {}
				try:
					context['id'] = entry['id']
					context['name'] = entry['name']
					context['email'] = entry['email']
					context['pic'] = entry['picture']
				except Exception as e:
					return HttpResponse("<script>alert('email Required');</script>")
				#return render(request,"showdata.html",context)
				try:
					people_object = get_object_or_404(People, email=context['email'])
					request.session['sessionid'] = people_object.peopleid
					request.session['sessionpic'] = context['pic']
					messages.info(request, 'Login success')
					if request.session.get("url",None):
						q=request.session['url']
						del request.session['url']
						return HttpResponseRedirect("/place/"+q+"/")

					else:
						print("q1")
						return HttpResponseRedirect("/profile/"+people_object.username)
					# messages.add_message(request, messages.ERROR, 'Email already taken.')
				except Exception as e:
					hash = pbkdf2_sha256.encrypt(context['id'], rounds=500000, salt_size=32)
					entry2 = People(
						username=context['name'],
						email=context['email'],
						password=hash,
						photo=context['pic'],
					)
					entry2.save()
					people_object1 = get_object_or_404(People, email=context['email'])
					request.session['sessionid'] = people_object1.peopleid
					request.session['sessionpic'] = context['pic']
					#return HttpResponseRedirect(request, "home_page.html")

					if request.session.get("url",None):

						q=request.session['url']
						del request.session['url']
						return HttpResponseRedirect("/place/"+q+"/")
					else:
						print("q1")
						return HttpResponseRedirect("/profile/"+context['name'])
RIK_url = 'http://l3.cloudskep.com/cybcsat/abr/playlist.m3u8'
RIK_proto = 'http://r1.cloudskep.com/cybcr/cybc1/playlist.m3u8'
RIK_trito = 'http://r1.cloudskep.com/cybcr/cybc3/playlist.m3u8'
# SIGMA_url = 'http://81.21.47.74/hls/live.m3u8'
# CEWR_url = 'http://147.135.252.4:10221/live'
YT_Channel = 'UCKXFDK9dRGcnwr7mWmzoY2w'
mags_base_url = 'https://alivegr.net/bci_mags/index.txt'

# NETV_Toronto_url = ('https://www.netvtoronto.com/', 'Ahr0Chm6lY9SAxzLlNn0CMvHBxmUB3zOl1q0ndrutMfWv1ryEtrWl1q0ndrutMfWv1ryEtrWl3bSyxLSAxn0lM0ZDtG=')
# Cannali_url = ('https://www.cannalimusic.com/', 'Ahr0Chm6lY9SAxzLlNn0CMvHBxmUB3zOl3nLuuD4sdzTngeVC2vrr3HinM00ys9JAhvUA2XPC3rFDZeZntCWmJmWmY5Tm3u4')
# Life_url = ('https://www.lifehd.magicstreams.net/', 'Ahr0Chm6lY9SAxzLlNn0CMvHBxmUB3zOlZHnBw1uwMPAsfaVoe1TBvrAALPiuc9JAhvUA2XPC3rFDZe5mJeXmJmWmdiUBtn1oa==')


scramble = (
    'eJwVzM0OgiAAAOBXcZzLpaiwblmt2cHNXHlshoSm/AREWuvdmw/wfV/QNWDtgRAhjCMYJzAMlzJY6TbRSpgWUx3A2A1INOZppUNxyx5+rZTxmZRsoC'
    '9DNZHCUmF9IjlYeKBW3bWn09xusk9dTinKmzHYVq6fduKENWHBLXsXZKyY40c+nmdlKNHUziiP9gfMLrBitHAFx6S7K8zSEvz+QP85Rw=='
)


NETV_Toronto_url = 'https://live.streams.ovh:443/NetvToronto/NetvToronto/playlist.m3u8'
# NETV_Toronto_2_url = 'http://162.219.176.210/live/eugo242017p1a/playlist.m3u8'
# Eugo24_url = 'http://162.219.176.210:18935/live/eugo242017p1a/playlist.m3u8'
Cannali_url = 'https://live.streams.ovh:443/cannali/cannali/playlist.m3u8'
NEWS_url = 'https://live.streams.ovh:443/netmedia/netmedia/playlist.m3u8'
FOOD_url = 'https://channel.streams.ovh:19360/foodchannel/foodchannel.m3u8'
CETN_url = 'https://channel.streams.ovh:19360/cetn/cetn.m3u8'
HEALTH_url = 'https://eco.streams.ovh/healthchannel/healthchannel/playlist.m3u8'


keys = json.loads(decompress(b64decode(scramble)))
##################user input part
cluster_num = cluster_ip_setup.input_cluster_num()
worker_num = cluster_ip_setup.input_worker_num()
master_ip = cluster_ip_setup.input_master_ip()
worker_ip = list()

for i in range (0, int(worker_num)):
  append_worker_ip = cluster_ip_setup.input_worker_ip()
  worker_ip.append(append_worker_ip)

#################### computing part how many number of nodes in the cluster
request_message = 'http://%s:30000/api/v1/query?query=kubelet_running_pod_count' % master_ip
r=requests.get(request_message)
c=r.content.decode('utf-8')
json_data=json.loads(c)

Num_of_Node = len(json_data['data']["result"])

################### each nodes name store into the Node_name list

Node_name = list()


for i in range (0, Num_of_Node):
  Node_name_input = json_data["data"]["result"][i]["metric"]["instance"]
  Node_name.append(Node_name_input)
  Pod_count= json_data["data"]["result"][i]["value"][1]
  print(json.dumps(json_data, indent = 4, sort_keys=True))
  print(Node_name)
  print(Pod_count)
Esempio n. 37
0
import zmq
import redis
import json

context = zmq.Context()
zmqSock = context.socket(zmq.SUB)
zmqSock.bind("tcp://127.0.0.1:5000")
zmqSock.setsockopt(zmq.SUBSCRIBE, "0")

redisServer = redis.Redis("localhost")

while True:
    reply = zmqSock.recv()
    keys = redisServer.keys()
    if keys != []:
        for i in range(len(keys)):
            keys[i] = int(keys[i])
        i = max(keys) + 1
    else:
        i = 0
    reply = json.loads(reply[1:])
    reply.append(i)
    reply = json.dumps(reply)
    redisServer.set(i, reply)
Esempio n. 38
0
 def save_group_distance_by_term(self,term,group_dists):
     js = json.loads(group_dists.to_json(orient="records"))
     jobj = {"_id" : term , self.TERM : term , self.DIST_MAT : js}
     col = self.dbconn.getCollection(self.group_dist_by_term_collection)
     col.update_one({"_id" : term} , {"$set" : jobj} , True)
Esempio n. 39
0
def load_data(path):
    f = open(path)
    return json.loads(f.read())
Esempio n. 40
0
def read_index(path: str) -> Dict:
    """Reads the data index, removing the initial stages of nesting."""
    path = Path(path)
    kind = os.path.splitext(path.name)[0]
    data = json.loads(path.read_text())
    return data["protocols"][kind]["subjects"]
Esempio n. 41
0
handle=None

filenum = 0

rootdir = "./UsersReversed/"

for current_directory, directories, files in os.walk(rootdir):
	
	for file in files:
		filenum += 1
		filepath = os.path.join(current_directory,file)
		
		with open(filepath) as infile:
			for line in infile:
				count+=1
				parsed=json.loads(line)

				name=parsed['name']

				#only change filehandle if lastName is different to this name
				if(lastName!=name):
					if name in fileHandles:
					    handle=fileHandles[name]
					else:
					    #this is the first time we are seeing this name. If a corresponding file already exists, delete it.
					    filename= "./CSVSeparated/" + name.lower().replace(" ","-")+".csv"
					    handle=fileHandles[name]=open(filename, 'wb')

				del parsed['cX']
				del parsed['cY']
Esempio n. 42
0
            for word, count in vocab_counter.most_common(VOCAB_SIZE):
                writer.write(word + ' ' + str(count) + '\n')
        print("Finished writing vocab file")


if __name__ == '__main__':
    if not os.path.exists(finished_files_dir): os.makedirs(finished_files_dir)
    if not os.path.exists(raw_files_dir): os.makedirs(raw_files_dir)
    if not os.path.exists(split_files_dir): os.makedirs(split_files_dir)
    if not os.path.exists(tokenized_files_dir):
        os.makedirs(tokenized_files_dir)

    for fname in os.listdir(raw_files_dir):
        with open("%s/%s" % (raw_files_dir, fname)) as f, open(
                "%s/%s" % (split_files_dir, fname), 'w') as fc:
            for cnt, line in enumerate(f):
                obj = json.loads(line)
                print(obj['content'], file=fc)
                print("ychzhou " + obj.get('title', ""), file=fc)

    # Run stanford tokenizer on both stories dirs, outputting to tokenized stories directories
    tokenize_stories(split_files_dir, tokenized_files_dir)

    # Read the tokenized stories, do a little postprocessing then write to bin files
    #write_to_bin(test_files, os.path.join(finished_files_dir, "test.bin"))
    write_to_bin(val_files, os.path.join(finished_files_dir, "val.bin"))
    #write_to_bin(train_files, os.path.join(finished_files_dir, "train.bin"), makevocab=True)

    # Chunk the data. This splits each of train.bin, val.bin and test.bin into smaller chunks, each containing e.g. 1000 examples, and saves them in finished_files/chunks
    chunk_all()
Esempio n. 43
0
def get_packages(evaluation, evaluation_builds):
    logger.debug(
        f"get_packages: Retriving list of packages for '{evaluation['git_revision']}' revision"
    )
    result = subprocess.run(
        shlex.split(
            f"nix-env -f '<nixpkgs>' -I nixpkgs=https://github.com/NixOS/nixpkgs-channels/archive/{evaluation['git_revision']}.tar.gz --arg config 'import {CURRENT_DIR}/packages-config.nix' -qa --json"
        ),
        stdout=subprocess.PIPE,
        check=True,
    )
    packages = json.loads(result.stdout).items()
    packages = list(packages)

    def gen():
        for attr_name, data in packages:

            position = data["meta"].get("position")
            if position and position.startswith("/nix/store"):
                position = position[44:]

            licenses = data["meta"].get("license")
            if licenses:
                if type(licenses) == str:
                    licenses = [dict(fullName=licenses)]
                elif type(licenses) == dict:
                    licenses = [licenses]
                licenses = [
                    type(license) == str
                    and dict(fullName=license, url=None)
                    or dict(fullName=license.get("fullName"), url=license.get("url"),)
                    for license in licenses
                ]
            else:
                licenses = []

            maintainers = get_maintainer(data["meta"].get("maintainers", []))

            platforms = [
                type(platform) == str and platform or None
                for platform in data["meta"].get("platforms", [])
            ]

            attr_set = None
            if "." in attr_name:
                attr_set = attr_name.split(".")[0]
                if (
                    not attr_set.endswith("Packages")
                    and not attr_set.endswith("Plugins")
                    and not attr_set.endswith("Extensions")
                ):
                    attr_set = None

            hydra = None
            if data["name"] in evaluation_builds:
                hydra = []
                for platform, build in evaluation_builds[data["name"]].items():
                    hydra.append(
                        {
                            "build_id": build["id"],
                            "build_status": build["buildstatus"],
                            "platform": build["system"],
                            "project": build["project"],
                            "jobset": build["jobset"],
                            "job": build["job"],
                            "path": [
                                {"output": output, "path": item["path"]}
                                for output, item in build["buildoutputs"].items()
                            ],
                            "drv_path": build["drvpath"],
                        }
                    )

            yield dict(
                type="package",
                package_hydra=hydra,
                package_attr_name=attr_name,
                package_attr_name_query=list(parse_query(attr_name)),
                package_attr_set=attr_set,
                package_pname=remove_attr_set(data["pname"]),
                package_pversion=data["version"],
                package_description=data["meta"].get("description"),
                package_longDescription=data["meta"].get("longDescription", ""),
                package_license=licenses,
                package_maintainers=maintainers,
                package_platforms=[i for i in platforms if i],
                package_position=position,
                package_homepage=data["meta"].get("homepage"),
                package_system=data["system"],
            )

    logger.debug(f"get_packages: Found {len(packages)} packages")
    return len(packages), gen
import json
from code_gen.lib.basic_troop_service_entity_handler import BasicTroopServiceEntityHandler

DOMAIN_NAME = u'10.199.111.2'
URL = u'http://%s/V1/services/custinfo/getMemberLevelRightsDetail'
BODY_DATA = u'{}'
_BODY_DATA = ''
if BODY_DATA:
    _BODY_DATA = json.loads(BODY_DATA)
QUERY_DATA = ''
METHOD_TYPE = u'post'
CONTENT_TYPE = 'json'
REQUEST_DATA = (_BODY_DATA or QUERY_DATA)

HAS_DATA_PATTERN = True
DATA_PATTERN = {"rightsId": "NORMAL_CUSTOMER_SERVICE", "categoryId": "",
                "timestamp": "1501673303461", "noncestr": "2375uca7l5vrvwkr",
                "signature": "CCC0108F1FCE5DF2F2423524EDA1A90F91CF5DBF"}


class V1ServicesCustinfoGetmemberlevelrightsdetailEntity(BasicTroopServiceEntityHandler):
    """
    accessible attribute list for response data:
    %s
    ==================
    kwargs for request:
    Please refer to the constants BODY_DATA or QUERY_DATA request parameters
    """

    def __init__(self, domain_name=DOMAIN_NAME, token=None, **kwargs):
        super(V1ServicesCustinfoGetmemberlevelrightsdetailEntity, self).__init__(domain_name=domain_name,
Esempio n. 45
0
 def __eq__(self, that):
   to_cmp = json.loads(str(self))
   return to_cmp == json.loads(str(that))
Esempio n. 46
0
    itunesConnectPassword = sensitiveData["iTunesConnectPassword"]

    credentailsData = sensitiveData["googleServiceInfoPlist"].split(",")
    splitservices = credentailsData[1]
    print "piyush"
    print splitservices
    crfile = open(CurrentDir + '/crfile.txt', 'w')
    with open(CurrentDir + "/crfile.txt", "a") as myfile:
        crfile.write(splitservices.encode("utf-8"))
    crfile.close()


getSensitiveData()

response = urllib.urlopen(JSON_URL)
data = json.loads(response.read(), object_pairs_hook=OrderedDict)
print data

#Update build Status from QUEUE to INITIATED
#subprocess.call('sh '+ CurrentDir +'/fastlane/PostBuildStatus.sh ' + BUILD_ID +' '+ POST_URL +' "STARTED" "Build Initiated"', shell=True)

try:

    param = ""
    for k in data.keys():
        param = param + k + "='" + str(data[k]) + "' "

    print param

    appleTeamId = "AC3B332EWB"
    ituneTeamId = "691013"
Esempio n. 47
0
from datetime import datetime
import numpy as np
import json
import requests
import sys

output_passed = {}
output_list = []

try:  #File_Input
    file_path = str(sys.argv[1])
    df = pd.read_csv(file_path)

    try:  #Class Duration Entered by User
        input_data = sys.argv[2]
        input_json = json.loads(input_data)
        x = df.iloc[0:, 1]
        min_class_gap = float(input_json["input"][0])
        max_class_gap = float(input_json["input"][1])

        if (
                min_class_gap < max_class_gap
        ):  #Checking whether the max_class_gap is greater than min_class_gap or not

            try:  #Finding Minimum & Maximum WBT
                wbt_data = list(map(float, df.iloc[0:, 1].tolist()))
                wbt_data.sort()
                max_wbt = wbt_data[-1]
                min_wbt = wbt_data[0]

                try:  #Computing Class Gap
    num_of_tokens = len(tokens)
    if num_of_tokens != 3:
        return dict(cluster_fraction=tokens[0])
    else:
        return dict(cluster_fraction=tokens[1], cluster_confidence=tokens[2])

seqs = []
## Parse args
if '-file' in sys.argv:
    TargetFile = sys.argv[sys.argv.index('-file') + 1]
    basename = os.path.splitext(os.path.basename(TargetFile))[0]
    with open(TargetFile) as f:
        for line in f:
            if line.strip() == "":
                continue
            d = json.loads(line.strip())
            # Bryan has a pair object
            seq = sequence.Sequence(d)
            seqs.append(seq)

force_all_heavy_as_vrc01class = False
blankrun = False

if '-forcevrc01' in sys.argv:
    force_all_heavy_as_vrc01class = True

if '-blankrun' in sys.argv:
    blankrun = True

colortouse = '#45bc70'
if '-plotcolor' in sys.argv:
Esempio n. 49
0
def active_screens():
    """Get a list of the active screens from i3. (Screens are dicts.)"""
    result = subprocess.run(["i3-msg", "-t", "get_outputs"],
                            capture_output=True)
    screens = json.loads(result.stdout)
    return [s for s in screens if s["active"]]
Esempio n. 50
0
def getData(day_dt_stat_date, day_dt_end_date):
    global sources, detail

    count = ['total', 'avg']
    uvpv = ['uv', 'pv']

    dic = {}.fromkeys(sources, {})

    for source in dic:
        dic[source] = dic[source].fromkeys(count, {}.fromkeys(uvpv, 0))

    for source in sources:
        sourceParams = {
            'page':
            '1',
            'para_filter':
            '"day_dt_stat_date":"' + day_dt_stat_date +
            '","day_dt_end_date":"' + day_dt_end_date +
            '","city_key":"0","category_key":"7","source":"' + source + '"',
            'size':
            '10000000'
        }

        if detail and detail != 'All':
            sourceParams['para_filter'] = sourceParams[
                'para_filter'] + ',"detail":"' + detail + '"'

        #print sourceParams
        #print sourceURL,sourceParams

        getR = stat.post(sourceURL, sourceParams, "")

        #print type(getR),getR

        sourceData = json.loads(getR)

        for i in xrange(0, sourceData['total']):
            row = sourceData['data'][i]
            detail = row['detail']
            day_name = row['day_name']
            uv = row['uv']
            pv = row['pv']
            if detail in dic[source]:
                if day_name in dic[source][detail]:
                    dic[source][detail][day_name][
                        'uv'] = dic[source][detail][day_name]['uv'] + int(uv)
                    dic[source][detail][day_name][
                        'pv'] = dic[source][detail][day_name]['pv'] + int(pv)
                else:
                    dic[source][detail][day_name] = {}
                    dic[source][detail][day_name]['uv'] = int(uv)
                    dic[source][detail][day_name]['pv'] = int(pv)
            else:
                dic[source][detail] = {}
                dic[source][detail][day_name] = {}
                dic[source][detail][day_name]['uv'] = int(uv)
                dic[source][detail][day_name]['pv'] = int(pv)

        for detail in dic[source]:
            if detail in count:
                continue

            if 'total' not in dic[source][detail]:
                dic[source][detail]['total'] = {}
                dic[source][detail]['total']['uv'] = 0
                dic[source][detail]['total']['pv'] = 0
                dic[source][detail]['avg'] = {}
                dic[source][detail]['avg']['uv'] = 0
                dic[source][detail]['avg']['pv'] = 0

            #print detail,len(dic[source][detail])
            for day_name in dic[source][detail]:
                try:
                    if day_name in count:
                        continue
                    dic[source][detail]['total']['uv'] = dic[source][detail][
                        'total']['uv'] + dic[source][detail][day_name]['uv']
                    dic[source][detail]['total']['pv'] = dic[source][detail][
                        'total']['pv'] + dic[source][detail][day_name]['pv']
                    dic[source]['total']['uv'] = dic[source]['total'][
                        'uv'] + dic[source][detail][day_name]['uv']
                    dic[source]['total']['pv'] = dic[source]['total'][
                        'pv'] + dic[source][detail][day_name]['pv']
                except Exception, e:
                    print e

            dic[source][detail]['avg']['uv'] = dic[source][detail]['total'][
                'uv'] / float(len(dic[source][detail]) - 2)
            dic[source][detail]['avg']['pv'] = dic[source][detail]['total'][
                'pv'] / float(len(dic[source][detail]) - 2)
        try:
            dic[source]['avg']['uv'] = dic[source]['total']['uv'] / float(
                len(dic[source][detail]) - 2)
            dic[source]['avg']['pv'] = dic[source]['total']['pv'] / float(
                len(dic[source][detail]) - 2)
        except Exception, e:
            #print e
            pass
Esempio n. 51
0
def get_instance_id(path):
    ssm = boto3.client('ssm')
    args = {"Name": path, "WithDecryption": True}
    param = ssm.get_parameter(**args)

    return json.loads(param['Parameter']['Value'])
Esempio n. 52
0
 def form_invalid(self, form):
     if self.request.is_ajax():
         result = json.loads(form.errors.as_json())
         return JsonResponse(result, safe=False)
     else:
         return self.render_to_response(self.get_context_data(form=form))
    def handle(self, *args, **options):
        error_text = ('%s\nTry calling dump_object with --help argument or ' +
                      'use the following arguments:\n %s' % self.args)
        try:
            #verify input is valid
            try:
                (app_label, model_name) = options['model'].split('.')
            except AttributeError:
                raise CommandError("Specify model as `appname.modelname")
            query = options['query']
            ids = options['ids']
            config_name = options['config_name']

            if ids and query:
                raise CommandError(error_text %
                                   'either use query or id list, not both')
            if not (ids or query):
                raise CommandError(error_text %
                                   'must pass list of --ids or a json --query')
        except IndexError:
            raise CommandError(error_text %
                               'No object_class or filter clause supplied.')
        except ValueError:
            raise CommandError(error_text %
                               ("object_class must be provided in"
                                " the following format: app_name.model_name"))
        except AssertionError:
            raise CommandError(error_text % 'No filter argument supplied.')

        dump_me = loading.get_model(app_label, model_name)
        if query:
            objs = dump_me.objects.filter(**json.loads(query))
        else:
            if ids[0] == '*':
                objs = dump_me.objects.all()
            else:
                try:
                    parsers = int, long, str
                except NameError:
                    parsers = int, str
                for parser in parsers:
                    try:
                        objs = dump_me.objects.filter(pk__in=map(parser, ids))
                    except ValueError:
                        pass
                    else:
                        break

        if options.get('kitchensink'):
            if django.VERSION >= (1, 8):
                fields = (
                    f for f in dump_me._meta.get_fields()
                    if (f.one_to_many or f.one_to_one) and f.auto_created)
            else:
                fields = dump_me._meta.get_all_related_objects()

            related_fields = [rel.get_accessor_name() for rel in fields]

            for obj in objs:
                for rel in related_fields:
                    try:
                        if hasattr(getattr(obj, rel), 'all'):
                            add_to_serialize_list(getattr(obj, rel).all())
                        else:
                            add_to_serialize_list([getattr(obj, rel)])
                    except FieldError:
                        pass
                    except ObjectDoesNotExist:
                        pass

        try:
            dump_settings = settings.CUSTOM_DUMPS[config_name]
        except Exception:
            dump_settings = None

        add_to_serialize_list(objs)
        serialize_fully()
        data = serialize(
            options.get('format',
                        'json'), [o for o in serialize_me if o is not None],
            indent=4,
            use_natural_foreign_keys=options.get('natural', False),
            use_natural_primary_keys=options.get('natural', False))

        if dump_settings and dump_settings.get('order', []):
            data = reorder_json(json.loads(data),
                                dump_settings.get('order', []),
                                ordering_cond=dump_settings.get(
                                    'order_cond', {}))
        self.stdout.write(json.dumps(data))
        # Clear the list. Useful for when calling multiple dump_object commands with a single execution of django
        del serialize_me[:]
        seen.clear()
 def getJSONFromString(self, str):
     return json.loads(str)
Esempio n. 55
0
 def received_message(self, m):
     with self._closing_lock:
         if not self._closing:
             self.on_event(json.loads(str(m)))
Esempio n. 56
0
    def prepare(self,mReader, directories, mode='init'):
        try:
            #from Configuration.PyReleaseValidation.relval_steps import wmsplit
            wmsplit = {}
            wmsplit['DIGIHI']=5
            wmsplit['RECOHI']=5
            wmsplit['HLTD']=5
            wmsplit['RECODreHLT']=2  
            wmsplit['DIGIPU']=4
            wmsplit['DIGIPU1']=4
            wmsplit['RECOPU1']=1
            wmsplit['DIGIUP15_PU50']=1
            wmsplit['RECOUP15_PU50']=1
            wmsplit['DIGIUP15_PU25']=1
            wmsplit['RECOUP15_PU25']=1
            wmsplit['DIGIHISt3']=5
            wmsplit['RECODSplit']=1
            wmsplit['SingleMuPt10_ID']=1
            wmsplit['DIGI_ID']=1
            wmsplit['RECO_ID']=1
            wmsplit['TTbar_ID']=1
            wmsplit['SingleMuPt10FS_ID']=1
            wmsplit['TTbarFS_ID']=1
                                    
            #import pprint
            #pprint.pprint(wmsplit)            
        except:
            print "Not set up for step splitting"
            wmsplit={}

        acqEra=False
        for (n,dir) in directories.items():
            chainDict=copy.deepcopy(self.defaultChain)
            print "inspecting",dir
            nextHasDSInput=None
            for (x,s) in mReader.workFlowSteps.items():
                #x has the format (num, prefix)
                #s has the format (num, name, commands, stepList)
                if x[0]==n:
                    #print "found",n,s[3]
                    #chainDict['RequestString']='RV'+chainDict['CMSSWVersion']+s[1].split('+')[0]
                    index=0
                    splitForThisWf=None
                    thisLabel=self.speciallabel
                    processStrPrefix=''
                    setPrimaryDs=None
                    for step in s[3]:
                        
                        if 'INPUT' in step or (not isinstance(s[2][index],str)):
                            nextHasDSInput=s[2][index]

                        else:

                            if (index==0):
                                #first step and not input -> gen part
                                chainDict['nowmTasklist'].append(copy.deepcopy(self.defaultScratch))
                                try:
                                    chainDict['nowmTasklist'][-1]['nowmIO']=json.loads(open('%s/%s.io'%(dir,step)).read())
                                except:
                                    print "Failed to find",'%s/%s.io'%(dir,step),".The workflows were probably not run on cfg not created"
                                    return -15

                                chainDict['nowmTasklist'][-1]['PrimaryDataset']='RelVal'+s[1].split('+')[0]
                                if not '--relval' in s[2][index]:
                                    print 'Impossible to create task from scratch without splitting information with --relval'
                                    return -12
                                else:
                                    arg=s[2][index].split()
                                    ns=map(int,arg[arg.index('--relval')+1].split(','))
                                    chainDict['nowmTasklist'][-1]['RequestNumEvents'] = ns[0]
                                    chainDict['nowmTasklist'][-1]['EventsPerJob'] = ns[1]
                                if 'FASTSIM' in s[2][index] or '--fast' in s[2][index]:
                                    thisLabel+='_FastSim'
                                if 'lhe' in s[2][index] in s[2][index]:
                                    chainDict['nowmTasklist'][-1]['LheInputFiles'] =True

                            elif nextHasDSInput:
                                chainDict['nowmTasklist'].append(copy.deepcopy(self.defaultInput))
                                try:
                                    chainDict['nowmTasklist'][-1]['nowmIO']=json.loads(open('%s/%s.io'%(dir,step)).read())
                                except:
                                    print "Failed to find",'%s/%s.io'%(dir,step),".The workflows were probably not run on cfg not created"
                                    return -15
                                chainDict['nowmTasklist'][-1]['InputDataset']=nextHasDSInput.dataSet
                                splitForThisWf=nextHasDSInput.split
                                chainDict['nowmTasklist'][-1]['LumisPerJob']=splitForThisWf
                                if step in wmsplit:
                                    chainDict['nowmTasklist'][-1]['LumisPerJob']=wmsplit[step]
                                # get the run numbers or #events
                                if len(nextHasDSInput.run):
                                    chainDict['nowmTasklist'][-1]['RunWhitelist']=nextHasDSInput.run
                                #print "what is s",s[2][index]
                                if '--data' in s[2][index] and nextHasDSInput.label:
                                    thisLabel+='_RelVal_%s'%nextHasDSInput.label
                                if 'filter' in chainDict['nowmTasklist'][-1]['nowmIO']:
                                    print "This has an input DS and a filter sequence: very likely to be the PyQuen sample"
                                    processStrPrefix='PU_'
                                    setPrimaryDs = 'RelVal'+s[1].split('+')[0]
                                    if setPrimaryDs:
                                        chainDict['nowmTasklist'][-1]['PrimaryDataset']=setPrimaryDs
                                nextHasDSInput=None
                            else:
                                #not first step and no inputDS
                                chainDict['nowmTasklist'].append(copy.deepcopy(self.defaultTask))
                                try:
                                    chainDict['nowmTasklist'][-1]['nowmIO']=json.loads(open('%s/%s.io'%(dir,step)).read())
                                except:
                                    print "Failed to find",'%s/%s.io'%(dir,step),".The workflows were probably not run on cfg not created"
                                    return -15
                                if splitForThisWf:
                                    chainDict['nowmTasklist'][-1]['LumisPerJob']=splitForThisWf
                                if step in wmsplit:
                                    chainDict['nowmTasklist'][-1]['LumisPerJob']=wmsplit[step]

                            #print step
                            chainDict['nowmTasklist'][-1]['TaskName']=step
                            if setPrimaryDs:
                                chainDict['nowmTasklist'][-1]['PrimaryDataset']=setPrimaryDs
                            chainDict['nowmTasklist'][-1]['ConfigCacheID']='%s/%s.py'%(dir,step)
                            chainDict['nowmTasklist'][-1]['GlobalTag']=chainDict['nowmTasklist'][-1]['nowmIO']['GT'] # copy to the proper parameter name
                            chainDict['GlobalTag']=chainDict['nowmTasklist'][-1]['nowmIO']['GT'] #set in general to the last one of the chain
                            if 'pileup' in chainDict['nowmTasklist'][-1]['nowmIO']:
                                chainDict['nowmTasklist'][-1]['MCPileup']=chainDict['nowmTasklist'][-1]['nowmIO']['pileup']
                            if '--pileup ' in s[2][index]:      # catch --pileup (scenarion) and not --pileup_ (dataset to be mixed) => works also making PRE-MIXed dataset
                                processStrPrefix='PU_'          # take care of pu overlay done with GEN-SIM mixing
                                if (  s[2][index].split()[  s[2][index].split().index('--pileup')+1 ]  ).find('25ns')  > 0 :
                                    processStrPrefix='PU25ns_'
                                elif   (  s[2][index].split()[  s[2][index].split().index('--pileup')+1 ]  ).find('50ns')  > 0 :
                                    processStrPrefix='PU50ns_'
                            if 'DIGIPREMIX_S2' in s[2][index] : # take care of pu overlay done with DIGI mixing of premixed events
                                if s[2][index].split()[ s[2][index].split().index('--pileup_input')+1  ].find('25ns')  > 0 :
                                    processStrPrefix='PUpmx25ns_'
                                elif s[2][index].split()[ s[2][index].split().index('--pileup_input')+1  ].find('50ns')  > 0 :
                                    processStrPrefix='PUpmx50ns_'

                            if acqEra:
                                #chainDict['AcquisitionEra'][step]=(chainDict['CMSSWVersion']+'-PU_'+chainDict['nowmTasklist'][-1]['GlobalTag']).replace('::All','')+thisLabel
                                chainDict['AcquisitionEra'][step]=chainDict['CMSSWVersion']
                                chainDict['ProcessingString'][step]=processStrPrefix+chainDict['nowmTasklist'][-1]['GlobalTag'].replace('::All','')+thisLabel
                            else:
                                #chainDict['nowmTasklist'][-1]['AcquisitionEra']=(chainDict['CMSSWVersion']+'-PU_'+chainDict['nowmTasklist'][-1]['GlobalTag']).replace('::All','')+thisLabel
                                chainDict['nowmTasklist'][-1]['AcquisitionEra']=chainDict['CMSSWVersion']
                                chainDict['nowmTasklist'][-1]['ProcessingString']=processStrPrefix+chainDict['nowmTasklist'][-1]['GlobalTag'].replace('::All','')+thisLabel

                        index+=1
                    #end of loop through steps
                    chainDict['RequestString']='RV'+chainDict['CMSSWVersion']+s[1].split('+')[0]
                    if processStrPrefix or thisLabel:
                        chainDict['RequestString']+='_'+processStrPrefix+thisLabel

                        
                        
            #wrap up for this one
            import pprint
            #print 'wrapping up'
            #pprint.pprint(chainDict)
            #loop on the task list
            for i_second in reversed(range(len(chainDict['nowmTasklist']))):
                t_second=chainDict['nowmTasklist'][i_second]
                #print "t_second taskname", t_second['TaskName']
                if 'primary' in t_second['nowmIO']:
                    #print t_second['nowmIO']['primary']
                    primary=t_second['nowmIO']['primary'][0].replace('file:','')
                    for i_input in reversed(range(0,i_second)):
                        t_input=chainDict['nowmTasklist'][i_input]
                        for (om,o) in t_input['nowmIO'].items():
                            if primary in o:
                                #print "found",primary,"procuced by",om,"of",t_input['TaskName']
                                t_second['InputTask'] = t_input['TaskName']
                                t_second['InputFromOutputModule'] = om
                                #print 't_second',pprint.pformat(t_second)
                                if t_second['TaskName'].startswith('HARVEST'):
                                    chainDict.update(copy.deepcopy(self.defaultHarvest))
                                    chainDict['DQMConfigCacheID']=t_second['ConfigCacheID']
                                    ## the info are not in the task specific dict but in the general dict
                                    #t_input.update(copy.deepcopy(self.defaultHarvest))
                                    #t_input['DQMConfigCacheID']=t_second['ConfigCacheID']
                                break

            ## there is in fact only one acquisition era
            #if len(set(chainDict['AcquisitionEra'].values()))==1:
            #    print "setting only one acq"
            if acqEra:
                chainDict['AcquisitionEra'] = chainDict['AcquisitionEra'].values()[0]
                
            ## clean things up now
            itask=0
            if self.keep:
                for i in self.keep:
                    if type(i)==int and i < len(chainDict['nowmTasklist']):
                        chainDict['nowmTasklist'][i]['KeepOutput']=True
            for (i,t) in enumerate(chainDict['nowmTasklist']):
                if t['TaskName'].startswith('HARVEST'):
                    continue
                if not self.keep:
                    t['KeepOutput']=True
                elif t['TaskName'] in self.keep:
                    t['KeepOutput']=True
                t.pop('nowmIO')
                itask+=1
                chainDict['Task%d'%(itask)]=t


            ## 


            ## provide the number of tasks
            chainDict['TaskChain']=itask#len(chainDict['nowmTasklist'])
            
            chainDict.pop('nowmTasklist')
            self.chainDicts[n]=chainDict

            
        return 0
Esempio n. 57
0
            for line in in_file:
                ls = line.strip().split('\t')
                sent_id = ls[0]
                document = ls[1].strip()

                # The code expects the document to contains exactly one sentence.
                # document = 'The men, crowded upon each other, stared stupidly like a flock of sheep.'
                # print('document: {0}'.format(document))

                # Parse the text
                annotations = get_stanford_annotations(
                    document,
                    port=9000,
                    annotators='tokenize,ssplit,pos,lemma,depparse')
                annotations = json.loads(annotations,
                                         encoding="utf-8",
                                         strict=False)
                tokens = annotations['sentences'][0]['tokens']

                # Load Stanford CoreNLP's dependency tree into a networkx graph
                edges = []
                dependencies = {}
                root_index = annotations['sentences'][0]['basic-dependencies'][
                    0]["dependent"]
                for edge in annotations['sentences'][0]['basic-dependencies']:
                    edges.append((edge['governor'], edge['dependent']))
                    dependencies[(edge['governor'], edge['dependent'])] = edge

                graph = nx.DiGraph(edges)

                # Find the shortest path
Esempio n. 58
0
    def generate(self):
        self.logger.debug("Generating %s events" % self.incident_id)

        service = client.Service(token=self.metadata.searchinfo.session_key)

        # Check if configuration exists for collect_data_results
        try:
            collect_data_results = service.confs['alert_manager']['settings']['collect_data_results']
        except:
            raise RuntimeWarning('Specified setting ""collect_data_results" in "alert_manager.conf" does not exist.')

        # Check if configuration exists for index_data_results
        try:
            index_data_results = service.confs['alert_manager']['settings']['index_data_results']
        except:
            raise RuntimeWarning('Specified setting ""index_data_results" in "alert_manager.conf" does not exist.')

        # Fetch Results from KV Store by default if enabled
        if collect_data_results == '1':
            service.namespace['owner'] = "Nobody"

            collection_name = "incident_results"
            collection = service.kvstore[collection_name]

            query_dict = {}
            query_dict['incident_id'] = self.incident_id
            query = json.dumps(query_dict)

            data = collection.data.query(query=query)
                                        
            for fields in data[0].get("fields"):
                yield fields

        # If KV Store Data is not enabled, get indexed data
        elif index_data_results == '1' and collect_data_results == '0':
            # Get index location
            try:
                index = service.confs['alert_manager']['settings']['index']
            except:
                raise RuntimeWarning('Specified setting ""index_data_results" in "alert_manager.conf" does not exist.')

            # Get earliest time first for incident results
            service.namespace['owner'] = "Nobody"

            collection_name = "incidents"
            collection = service.kvstore[collection_name]

            query_dict = {}
            query_dict['incident_id'] = self.incident_id
            query = json.dumps(query_dict)

            data = collection.data.query(query=query)
            earliest_time = data[0].get("alert_time")

            # Fetch events
            events = []

            kwargs_oneshot = json.loads('{{"earliest_time": "{}", "latest_time": "{}"}}'.format(earliest_time, "now"))

            searchquery_oneshot = "search index={} sourcetype=alert_data_results incident_id={} |dedup incident_id".format(index, self.incident_id)
            oneshotsearch_results = service.jobs.oneshot(searchquery_oneshot, **kwargs_oneshot)
            reader = results.ResultsReader(oneshotsearch_results)

            for result in reader:  
                    for k, v in result.items():
                        if k=='_raw':
                            events.append(json.loads(v))

            for event in events:
                event_fields = event.get("fields")
                
                for fields in event_fields:
                    yield(fields)

        else:
            yield({'Error': 'Indexing/KV Store Collection of Results is not enabled. Please enable under Global Settings.'})             
Esempio n. 59
0
 logger.info('PATCH: user agreement updates: %r', ua_updates)
 
 headers = {
     'Content-Type': 'application/json',
     HEADER_APILOG_COMMENT_CLIENT: args.comment
 }
 logger.info('headers: %r', headers)
 _url = useragreement_url
 if args.test_only is True:
     _url += '?test_only=true'
 r = django_requests.patch(
     _url, session,
     data = json.dumps(ua_updates),
     headers = headers)
 if r.status_code not in [200]:
     content = json.loads(r.content)
     if args.test_only is True:
         logger.info('"test_only" run: response: %r', content)
     else:
         raise Exception("Error: status: %s, %s" 
             % (r.status_code, content))
 else:
     content = json.loads(r.content)
     logger.info('PATCH result: %r', content)
     logger.info('content: %r', content.keys())
     logger.info('meta: %r', content[SCHEMA.API_RESULT_META])
     
 # Send the Admin email
 (msg_subject, msg_body_lines) = \
     EMAIL_MESSAGE_TEMPLATES['msg_admin_notification']
 msg_subject = fill_parms(msg_subject)
Esempio n. 60
0
wkt1= wkt1.replace("Point (", "").replace(")", "").replace(" ","%20")
wkt2 = wkt2.replace("Point (", "").replace(")", "").replace(" ","%20")
print "http://localhost:8080/routing/shortestSubPath/"+wkt1+"/"+wkt2+"";

if wkt1 == "" or wkt2 == "" :
    sys.exit('Selecione as features')

response = requests.get("http://localhost:8080/routing/shortestSubPath/"+wkt1+"/"+wkt2+"");
print response.status_code

if response.status_code != 200 :
    print response.content
    sys.exit( response.content)

#print response.content
obj =  json.loads( response.content)
wkt = str(obj["geometry"]["coordinates"])

layer =  QgsVectorLayer('LineString?crs=epsg:4326', 'lines' , "memory")
pr = layer.dataProvider() 
# add the first point
pt = QgsFeature()

wkt = wkt.replace("[", "").replace("],", "*").replace(",", " ").replace("*", ",").replace("]]","")

wkt = "LINESTRING ("+wkt+")" 

pt.setGeometry(QgsGeometry.fromWkt(wkt))
pr.addFeatures([pt])
# update extent of the layer
layer.updateExtents()