def add_note(request): """ An ajax view for adding notes to the clients in admin. """ if not request.method == 'POST' or not request.is_ajax(): raise Http404 form = ClientNoteForm(request.POST) if not form.is_valid(): if 'datetime' in form.errors: err_msg = _(u'Zadejte prosím platné datum a čas.') elif 'text' in form.errors: err_msg = _(u'Zadejte prosím neprázdný text.') elif 'client' in form.errors: err_msg = _(u'Zadaný klient neexistuje. (Nebyl mezitím smazán?)') return HttpResponse(serialize({'error': err_msg})) client_note = form.save(commit=False) client_note.author = request.user client_note.save() ret = { 'id': client_note.pk, 'author': client_note.author.username, 'datetime': format(client_note.datetime, get_format('DATETIME_FORMAT')), 'text': client_note.text, } return HttpResponse(serialize(ret))
def get_build_jobs(request, uid=None): """ url handler that returns a list of jobs spawned by a specific build uid uid - the 32 digit alphanumeric build uid """ if not uid: return HttpResponseBadRequest( json.serialize({'message': 'valid uid required'}), content_type="application/json") redis_key = "build:%s" % (uid) try: r = redis_client('briar-patch') hashes = r.smembers(redis_key) except RedisError as e: logger.error('redis error: %s', e) hashes = set() result = [] for build_hash in hashes: t, job_info = build_hash.split(":") uid, master, build_number = job_info.split(".") result.append({'type': t, 'uid': uid, 'master': master, 'build_number': build_number}) return HttpResponse(json.serialize(result), content_type="application/json")
def instance_activate(request): """Called when an instance is done provisioning. Requires two GET Parameters: - site_api_key: for security reasons - instance: ID of the instance to activate """ if request.GET.get('site_api_key', '') != settings.SITE_API_KEY: return HttpResponse(serialize({ "status": "failure", "reason": 'wrong key' }), mimetype="application/json") i = get_object_or_404(Instance, pk=request.GET['instance']) i.active = True i.save() response = { "status": "success", "retval": i.domain } return HttpResponse(serialize(response), mimetype="application/json")
def get_machine_events(request, event_type=None): """ url handler that returns machine events and counts for how often they have occured event_type - restrict event types to one of `connect`, `disconnect`, 'build' """ if event_type not in ('connect', 'disconnect', 'build'): return HttpResponseBadRequest( json.serialize({'message': ('event_type must be one of', '`connect`, `disconnect`, or `builds`')}), content_type="application/json") redis_key = "metrics:%s" % (event_type) try: r = redis_client('brair-patch') events = r.hgetall(redis_key) except RedisError as e: logger.error('redis error: %s', e) events = {} metrics = [] for k, v in events.iteritems(): event, machine = k.rsplit(':', 1) if event_type is 'build': t, event = event.split(':') else: t = "machine" metrics.append({'type': t, 'event': event, 'count': v, 'machine_name': machine}) return HttpResponse(json.serialize(metrics), content_type="application/json")
def make_periodic_interval_task(name, task, every_sec, *args, **kwargs): '''creates a new periodic interval task, with name=name, updates fields if already exists''' pt, created = PeriodicTask.objects.get_or_create(name= name) if pt.crontab: print("Warning, old task entry had a crontab") if pt.crontab.periodictask_set.count() == 1: print("Was the only one used , deleting it") ct = pt.crontab ct.delete() pt.crontab = None if not pt.interval: i= IntervalSchedule() else: i= pt.interval i.period = "seconds" i.every = str(every_sec) i.save() pt.interval = i pt.task = task pt.args = anyjson.serialize(args) pt.kwargs = anyjson.serialize(kwargs) pt.save() print ("saved task: %s, created: %s" % (str(pt), str(created)))
def instance_activate(request): """Called when an instance is done provisioning. :param request: the request object Requires two GET Parameters: - site_api_key: for security reasons - instance: ID of the instance to activate :returns: a http response with data """ if request.GET.get('site_api_key', '') != settings.SITE_API_KEY: return HttpResponse(serialize({ "status": "failure", "reason": 'wrong key' }), mimetype="application/json") i = get_object_or_404(Instance, pk=request.GET['instance']) i.active = True i.save() response = {"status": "success", "retval": i.domain} return HttpResponse(serialize(response), mimetype="application/json")
def make_periodic_crontab_task(name, task, minute, hour, day_of_week, *args, **kwargs): '''creates a new periodic crontab task, with name=name, updates fields if already exists''' pt, created = PeriodicTask.objects.get_or_create(name= name) if pt.interval: print("Warning, old task entry had an interval") if pt.interval.periodictask_set.count() == 1: print("Was the only one, deleting it") i = pt.interval i.delete() pt.interval = None if not pt.crontab: ct= CrontabSchedule() else: ct= pt.crontab ct.minute = minute ct.hour = hour ct.day_of_week = day_of_week ct.save() pt.crontab = ct pt.task = task pt.args = anyjson.serialize(args) pt.kwargs = anyjson.serialize(kwargs) pt.save() print ("saved task: %s, created: %s" % (str(pt), str(created)))
def post(self, request, *args, **kwargs): """ Overloading post to update the stage and closed_date attributes for a Deal object. """ try: object_id = kwargs.pop('pk') instance = Deal.objects.get(pk=object_id) if 'stage' in request.POST.keys() and len(request.POST.keys()) == 1: instance.stage = int(request.POST['stage']) if instance.stage in [1, 3]: instance.closed_date = datetime.datetime.utcnow().replace(tzinfo=utc) elif instance.stage in [0, 2]: instance.closed_date = None instance.save() else: messages.error(self.request, _('Stage could not be changed')) raise Http404() except: messages.error(self.request, _('Stage could not be changed')) raise Http404() else: message = _('Stage has been changed to') + ' ' + unicode(Deal.STAGE_CHOICES[instance.stage][1]) messages.success(self.request, message) stage = unicode(Deal.STAGE_CHOICES[instance.stage][1]) # Return response if instance.closed_date is None: return HttpResponse(anyjson.serialize({'stage': stage}), content_type='application/json') else: closed_date_local = instance.closed_date.astimezone(timezone(settings.TIME_ZONE)) response = anyjson.serialize({'closed_date': closed_date_local.strftime('%d %b %y %H:%M'), 'stage': stage}) return HttpResponse(response, content_type='application/json')
def add_note(request): """ An ajax view for adding notes to the clients in admin. """ if not request.method == 'POST' or not request.is_ajax(): raise Http404 form = ClientNoteForm(request.POST) if not form.is_valid(): if 'datetime' in form.errors: err_msg = _(u'Zadejte prosím platný datum a čas.') elif 'text' in form.errors: err_msg = _(u'Zadejte prosím neprázdný text.') elif 'client' in form.errors: err_msg = _(u'Zadaný klient neexistuje. (Nebyl mezitím smazán?)') return HttpResponse(serialize({'error': err_msg})) client_note = form.save(commit=False) client_note.author = request.user client_note.save() ret = { 'id': client_note.pk, 'author': client_note.author.username, 'datetime_iso': client_note.datetime.isoformat(), 'datetime_formatted': format(client_note.datetime, get_format('DATETIME_FORMAT')), 'text': client_note.text, } return HttpResponse(serialize(ret))
def post(self, request, *args, **kwargs): """ Overloading post to update the stage and closed_date attributes for a Deal object. """ try: object_id = kwargs.pop('pk') instance = Deal.objects.get(pk=object_id) if 'stage' in request.POST.keys() and len(request.POST.keys()) == 1: instance.stage = int(request.POST['stage']) if instance.stage in [1, 3]: instance.closed_date = datetime.datetime.utcnow().replace(tzinfo=utc) elif instance.stage in [0, 2]: instance.closed_date = None instance.save() else: messages.error(self.request, _('Stage could not be changed')) raise Http404() except: messages.error(self.request, _('Stage could not be changed')) raise Http404() else: message = _('Stage has been changed to') + ' ' + Deal.STAGE_CHOICES[instance.stage][1] messages.success(self.request, message) stage = Deal.STAGE_CHOICES[instance.stage][1] # Return response if instance.closed_date is None: return HttpResponse(anyjson.serialize({'stage': stage}), content_type='application/json') else: closed_date_local = instance.closed_date.astimezone(timezone(settings.TIME_ZONE)) response = anyjson.serialize({'closed_date': closed_date_local.strftime('%d %b %y %H:%M'), 'stage': stage}) return HttpResponse(response, content_type='application/json')
def _notify(self, priority, ctxt, event_type, payload): payload = self._serializer.serialize_entity(ctxt, payload) # NOTE(sileht): simulate the kombu serializer # this permit to raise an exception if something have not # been serialized correctly anyjson.serialize(payload) msg = FakeMessage(self.publisher_id, priority, event_type, payload) NOTIFICATIONS.append(msg)
def test_queue_is_empty_after_purge(self): conn = create_connection(1) q = conn.Queue("test_queue") q.put(serialize({"name": "George Constanza"})) q.put(serialize({"name": "George Constanza"})) q.purge() self.assertRaises(Empty, q.get)
def handle_new(self, tweet): if 'extended_tweet' in tweet: log.info("Extended tweet {0}", tweet.get('extended_tweet')) tweet_text = tweet.get('extended_tweet', {}).get('full_text') else: tweet_text = tweet.get('text') log.notice("New tweet {tweet} from user {user_id}/{screen_name}", tweet=tweet.get('id'), user_id=tweet.get('user', {}).get('id'), screen_name=tweet.get('user', {}).get('screen_name')) log.notice("Full text: {0}", tweet_text) self.handle_possible_rename(tweet) cursor = self.database.cursor() cursor.execute("""SELECT COUNT(*), `deleted` FROM `tweets` WHERE `id` = %s""", (tweet['id'],)) info = cursor.fetchone() num_previous = info[0] if info[1] is not None: was_deleted = (int(info[1]) == 1) else: was_deleted = False retweeted_id = None retweeted_content = None retweeted_user_name = None if 'retweeted_status' in tweet: retweeted_id = tweet['retweeted_status']['id'] retweeted_content = replace_highpoints(tweet['retweeted_status']['text']) retweeted_user_name = tweet['retweeted_status']['user']['screen_name'] if num_previous > 0: cursor.execute("""UPDATE `tweets` SET `user_name` = %s, `politician_id` = %s, `content` = %s, `tweet`=%s, `retweeted_id`=%s, `retweeted_content`=%s, `retweeted_user_name`=%s, `modified`= NOW() WHERE id = %s""", (tweet['user']['screen_name'], self.users[tweet['user']['id']], replace_highpoints(tweet_text,""), replace_highpoints(anyjson.serialize(tweet),""), retweeted_id, retweeted_content, retweeted_user_name, tweet['id'])) log.info("Updated tweet {0}", tweet.get('id')) else: cursor.execute("""INSERT INTO `tweets` (`id`, `user_name`, `politician_id`, `content`, `created`, `modified`, `tweet`, retweeted_id, retweeted_content, retweeted_user_name) VALUES(%s, %s, %s, %s, NOW(), NOW(), %s, %s, %s, %s)""", (tweet['id'], tweet['user']['screen_name'], self.users[tweet['user']['id']], replace_highpoints(tweet_text,""), replace_highpoints(anyjson.serialize(tweet),""), retweeted_id, retweeted_content, retweeted_user_name)) log.info("Inserted new tweet {0}", tweet.get('id')) if was_deleted: log.warn("Tweet deleted {0} before it came!", tweet.get('id')) self.copy_tweet_to_deleted_table(tweet['id'])
def from_entry(cls, name, skip_fields=("relative", "options"), **entry): fields = dict(entry) for skip_field in skip_fields: fields.pop(skip_field, None) schedule = fields.pop("schedule") model_schedule, model_field = cls.to_model_schedule(schedule) fields[model_field] = model_schedule fields["args"] = serialize(fields.get("args") or []) fields["kwargs"] = serialize(fields.get("kwargs") or {}) return cls(PeriodicTask._default_manager.update_or_create(name=name, defaults=fields))
def post(self, request, integration_type): """ Get the authentication URL for the given integration type. """ client_id = request.POST.get('client_id') client_secret = request.POST.get('client_secret') integration_context = request.POST.get('integration_context') if integration_context: integration_context = anyjson.loads(integration_context) errors = {} if not client_id: errors.update({ 'client_id': ['Please enter a valid client ID'], }) if not client_secret: errors.update({ 'client_secret': ['Please enter a valid client secret'], }) if errors: return HttpResponseBadRequest(anyjson.serialize(errors), content_type='application/json') integration_type = IntegrationType.objects.get(name__iexact=integration_type) redirect_uri = request.build_absolute_uri() params = { 'client_id': client_id, 'client_secret': client_secret, 'redirect_uri': redirect_uri, 'scope': integration_type.scope, 'response_type': 'code', } details, created = IntegrationDetails.objects.get_or_create(type=integration_type) storage = Storage(IntegrationCredentials, 'details', details, 'credentials') credentials = LilyOAuthCredentials( client_id=client_id, client_secret=client_secret, redirect_uri=redirect_uri, integration_context=integration_context, ) storage.put(credentials) auth_url = integration_type.auth_url + urllib.urlencode(params) response = anyjson.serialize({'url': auth_url}) return HttpResponse(response, content_type='application/json')
def update(self, instance, validated_data): user = self.context.get('request').user status_id = validated_data.get('status', instance.status_id) assigned_to = validated_data.get('assigned_to') if assigned_to: assigned_to = assigned_to.get('id') if isinstance(status_id, dict): status_id = status_id.get('id') status = CaseStatus.objects.get(pk=status_id) # Automatically archive the case if the status is set to 'Closed'. if status.name == 'Closed' and 'is_archived' not in validated_data: validated_data.update({'is_archived': True}) # Check if the case being reassigned. If so we want to notify that user. if assigned_to and assigned_to != user.pk: validated_data.update({ 'newly_assigned': True, }) elif 'assigned_to' in validated_data and not assigned_to: # Case is unassigned, so clear newly assigned flag. validated_data.update({ 'newly_assigned': False, }) if (('status' in validated_data and status.name == 'Open') or ('is_archived' in validated_data and not validated_data.get('is_archived'))): # Case is reopened or unarchived, so we want to notify the user again. validated_data.update({ 'newly_assigned': True, }) if 'assigned_to' in validated_data or instance.assigned_to_id: Group('tenant-%s' % user.tenant.id).send({ 'text': anyjson.serialize({ 'event': 'case-assigned', }), }) if (not instance.assigned_to_id or instance.assigned_to_id and 'assigned_to' in validated_data and not validated_data.get('assigned_to')): Group('tenant-%s' % user.tenant.id).send({ 'text': anyjson.serialize({ 'event': 'case-unassigned', }), }) return super(CaseSerializer, self).update(instance, validated_data)
def _notify(self, priority, ctxt, event_type, payload): payload = self._serializer.serialize_entity(ctxt, payload) # NOTE(sileht): simulate the kombu serializer # this permit to raise an exception if something have not # been serialized correctly anyjson.serialize(payload) msg = dict(publisher_id=self.publisher_id, priority=priority, event_type=event_type, payload=payload) self.notifications.append(msg)
def index(self, doc, index, docType, id=None, parent=None, forceInsert=None, bulk=False, version=None, querystringArgs=None): """ Index a dict into a specific index and make it searchable """ self.refreshed = False if bulk: optype = "index" if forceInsert: optype = "create" cmd = {optype: {"_index": index, "_type": docType}} if parent: cmd[optype]["_parent"] = parent if version: cmd[optype]["_version"] = version if id: cmd[optype]["_id"] = id data = '\n'.join([anyjson.serialize(cmd), anyjson.serialize(doc)]) data += '\n' self.bulkData.append(data) return self.flushBulk() if not querystringArgs: querystringArgs = {} if forceInsert: querystringArgs["opType"] = "create" if parent: querystringArgs["parent"] = parent if version: querystringArgs["version"] = version if id: requestMethod = "PUT" else: requestMethod = "POST" path = self._makePath([index, docType, id]) d = self._sendRequest(requestMethod, path, body=doc, params=querystringArgs) return d
def create(self, validated_data): user = self.context.get('request').user status_id = validated_data.get('status').get('id') status = DealStatus.objects.get(pk=status_id) closed_date = validated_data.get('closed_date') # Set closed_date if status is lost/won and not manually provided. if (status.is_won or status.is_lost) and not closed_date: closed_date = datetime.datetime.utcnow().replace(tzinfo=utc) else: closed_date = None validated_data.update({ 'created_by_id': user.pk, 'closed_date': closed_date, }) assigned_to = validated_data.get('assigned_to') if assigned_to: Group('tenant-%s' % user.tenant.id).send({ 'text': anyjson.serialize({ 'event': 'deal-assigned', }), }) if assigned_to.get('id') != user.pk: validated_data.update({ 'newly_assigned': True, }) else: Group('tenant-%s' % user.tenant.id).send({ 'text': anyjson.serialize({ 'event': 'deal-unassigned', }), }) instance = super(DealSerializer, self).create(validated_data) # Track newly ceated accounts in segment. if not settings.TESTING: analytics.track( user.id, 'deal-created', { 'assigned_to_id': instance.assigned_to_id if instance.assigned_to else '', 'status': instance.status.name, 'next_step': instance.next_step.name, 'creation_type': 'automatic' if is_external_referer(self.context.get('request')) else 'manual', }, ) return instance
def handle_new(self, tweet): log.notice("New tweet {tweet} from user {user_id}/{screen_name}", tweet=tweet.get('id'), user_id=tweet.get('user', {}).get('id'), screen_name=tweet.get('user', {}).get('screen_name')) self.handle_possible_rename(tweet) cursor = self.database.cursor() cursor.execute( """SELECT COUNT(*), `deleted` FROM `tweets` WHERE `id` = %s""", (tweet['id'], )) info = cursor.fetchone() num_previous = info[0] if info[1] is not None: was_deleted = (int(info[1]) == 1) else: was_deleted = False # cursor.execute("""SELECT COUNT(*) FROM `tweets`""") # total_count = cursor.fetchone()[0] # self._debug("Total count in table: %s" % total_count) retweeted_id = None retweeted_content = None retweeted_user_name = None if tweet.has_key('retweeted_status'): retweeted_id = tweet['retweeted_status']['id'] retweeted_content = tweet['retweeted_status']['text'] retweeted_user_name = tweet['retweeted_status']['user'][ 'screen_name'] if num_previous > 0: cursor.execute( """UPDATE `tweets` SET `user_name` = %s, `politician_id` = %s, `content` = %s, `tweet`=%s, `retweeted_id`=%s, `retweeted_content`=%s, `retweeted_user_name`=%s, `modified`= NOW() WHERE id = %s""", (tweet['user']['screen_name'], self.users[tweet['user']['id']], tweet['text'], anyjson.serialize(tweet), retweeted_id, retweeted_content, retweeted_user_name, tweet['id'])) log.info("Updated tweet {0}", tweet.get('id')) else: cursor.execute( """INSERT INTO `tweets` (`id`, `user_name`, `politician_id`, `content`, `created`, `modified`, `tweet`, retweeted_id, retweeted_content, retweeted_user_name) VALUES(%s, %s, %s, %s, NOW(), NOW(), %s, %s, %s, %s)""", (tweet['id'], tweet['user']['screen_name'], self.users[tweet['user']['id']], tweet['text'], anyjson.serialize(tweet), retweeted_id, retweeted_content, retweeted_user_name)) log.info("Inserted new tweet {0}", tweet.get('id')) if was_deleted: log.warn("Tweet deleted {0} before it came!", tweet.get('id')) self.copy_tweet_to_deleted_table(tweet['id']) self.stathat_add_count('tweets')
def index(self, doc, index, doc_type, id=None, parent=None, force_insert=None, bulk=False, version=None, **query_params): """Index a dict into an index.""" self.refreshed = False if bulk: optype = 'index' if force_insert: optype = 'create' cmd = {optype: {'_index': index, '_type': doc_type}} if parent: cmd[optype]['_parent'] = parent if version: cmd[optype]['_version'] = version if id: cmd[optype]['_id'] = id if 'routing' in query_params: cmd[optype]['_routing'] = query_params['routing'] data = '\n'.join([anyjson.serialize(cmd), anyjson.serialize(doc)]) data += '\n' self.bulk_data.append(data) return self.flush_bulk() if force_insert: query_params['op_type'] = 'create' if parent: query_params['parent'] = parent if version: query_params['version'] = version if id: request_method = 'PUT' else: request_method = 'POST' path = make_path([index, doc_type, id]) d = self._send_request(request_method, path, body=doc, params=query_params) return d
def delete(self, environ, start_response, route_vars): """DELETE /id: Delete specific Inception Cloud.""" id = route_vars['id'] session = _SESSION() inception_cloud = session.query(InceptionCloud).get(id) if inception_cloud is None: status = '404 Not Found' response_headers = [('Content-type', 'text/json')] start_response(status, response_headers) return [anyjson.serialize({})] request_body = _read_request_body(environ) auth_env = anyjson.deserialize(request_body) opt_dict = inception_cloud.to_dict() response_headers = [('Content-type', 'text/json')] status = '200 OK' result = {} try: # Copy request authorization environment to local environment for kw in OS_AUTH_KEYWORDS: os.environ[kw] = auth_env[kw] # detach inception_cloud from our session ao = OrchestratorThread(opt_dict, 'destroy', inception_cloud.id) ao.start() result = { 'action': 'delete', 'id': opt_dict['id'], 'prefix': opt_dict['prefix'] } except KeyError as ke: # KeyError almost certainly means the OpenStack authorization # environment (OS_*) wasn't provided making this a bad request t, v, tb = sys.exc_info() # type, value, traceback status = '400 Bad Request' result = { 'exception': { 'type': t.__name__, 'value': v.args, }, } except Exception: t, v, tb = sys.exc_info() # type, value, traceback print traceback.format_tb(tb) status = '500 Internal Server Error' finally: start_response(status, response_headers) return [anyjson.serialize(result)]
def calc_rival_nonrival_matrics_dist_norm(): tty_polys, hbk_poly = load.loadLocPoly() hbk_all_tweets = load.loadAllTweets() hbk_user_home_loc = load.loadAllHomeLoc(hbk_poly) hbk_users_in_gang_t = load.loadUsersInGangTty(tty_polys, hbk_user_home_loc) # Different distance norm functions #dist_norm = calcDistNorm() dist_norm = calcDistNormCDF() visit_mat = calcVisitationMat(hbk_all_tweets, tty_polys, hbk_users_in_gang_t, dist_norm, hbk_user_home_loc) #print visit_mat norm = calcNorm(calcVisitationMat(hbk_all_tweets, tty_polys, hbk_users_in_gang_t)) measure1 = {} measure2 = {} for gang_id in my.HBK_GANG_AND_RIVAL_IDS: measure1[gang_id] = { 'rival' : [], 'nonrival' : [] } measure2[gang_id] = { 'rival' : [], 'nonrival' : [] } non_home_sum = sum(visit_mat[gang_id].values()) - visit_mat[gang_id][gang_id] for rival_id in my.HBK_GANG_AND_RIVAL_IDS[gang_id]: if gang_id != rival_id and visit_mat[gang_id][rival_id] != 0: frac = visit_mat[gang_id][rival_id]/float(non_home_sum) measure1[gang_id]['rival'].append(round(frac, 5)) measure2[gang_id]['rival'].append(round(frac/norm[rival_id], 5)) for non_rival_id in my.HBK_GANG_ID_LIST: if gang_id != non_rival_id and non_rival_id not in my.HBK_GANG_AND_RIVAL_IDS[gang_id]: if visit_mat[gang_id][non_rival_id] != 0 and norm[non_rival_id] != 0: frac = visit_mat[gang_id][non_rival_id]/float(non_home_sum) measure1[gang_id]['nonrival'].append(round(frac, 5)) measure2[gang_id]['nonrival'].append(round(frac/norm[non_rival_id], 5)) # Store metrics if not os.path.exists('data/' + my.DATA_FOLDER + 'metrics_dist-norm/'): os.makedirs('data/' + my.DATA_FOLDER + 'metrics_dist-norm/') with open('data/' + my.DATA_FOLDER + 'metrics_dist-norm/' + 'measure1.json', 'wb') as fp2: fp2.write(anyjson.serialize(measure1)) with open('data/' + my.DATA_FOLDER + 'metrics_dist-norm/' + 'measure2.json', 'wb') as fp2: fp2.write(anyjson.serialize(measure2))
def update_layout(request): """ This view updates the page. """ layout = request.POST.get('layout', None) if layout is None: return http.HttpResponseBadRequest() try: layout = json.deserialize(layout) except ValueError: # No JSON object could be decoded return http.HttpResponseBadRequest() # temporary from [u'1', u'1'] -> [1, 1] layout['containers'] = check_containers(layout['containers']) page = get_page(request.user, request.session, for_update=True) if layout['timestamp'] <= page.layout['timestamp']: return http.HttpResponseForbidden(json.serialize(['KO', 'Expired'])) page.layout.arrange_widgets(layout['containers']) page.layout['timestamp'] = layout['timestamp'] page.save() return http.HttpResponse()
def _request(method, url, params={}, data={}, headers={}): splits = urlparse.urlsplit(url) netloc = splits[1] if '@' in netloc: netloc_noauth = netloc.split('@')[1] else: netloc_noauth = netloc scheme = splits[0] path = splits[2] query = splits[3] fragment = splits[4] username = '' password = '' if '@' in netloc: password = netloc.split('@')[0][1:] if ':' in netloc_noauth: netloc_noauth, port = netloc_noauth.split(':') else: port = 80 url = urlparse.urlunsplit((scheme, netloc_noauth, path, query, fragment)) if method in ['GET', 'DELETE']: params = urllib.urlencode(params, True) if params: if '?' not in url: url += '?' + params else: url += '&' + params connection = httplib.HTTPConnection(netloc_noauth, port) if username or password: credentials = "%s:%s" % (username, password) base64_credentials = base64.encodestring(credentials) authorization = "Basic %s" % base64_credentials[:-1] headers['Authorization'] = authorization headers['User-Agent'] = __USER_AGENT if data: body = anyjson.serialize(data) else: body = '' connection.request(method, url, body, headers) response = connection.getresponse() response.body = response.read() if _is_ok(response.status): if response.body: try: response.body = anyjson.deserialize(response.body) except ValueError, e: raise InvalidResponseFromServer( 'The JSON response could not be parsed: %s.\n%s' % (e, response.body)) ret = response.status, response.body else: ret = response.status, None
def from_entry(cls, name, skip_fields=('relative', 'options'), **entry): options = entry.get('options') or {} fields = dict(entry) for skip_field in skip_fields: fields.pop(skip_field, None) schedule = fields.pop('schedule') model_schedule, model_field = cls.to_model_schedule(schedule) fields[model_field] = model_schedule fields['args'] = serialize(fields.get('args') or []) fields['kwargs'] = serialize(fields.get('kwargs') or {}) fields['queue'] = options.get('queue') fields['exchange'] = options.get('exchange') fields['routing_key'] = options.get('routing_key') return cls(PeriodicTask._default_manager.update_or_create( name=name, defaults=fields, ))
def __init__(self): self.last_beat = datetime.datetime.now() config = tweetsclient.Config().get() try: self.interval = datetime.timedelta(seconds=float(config.get('tweets-client', 'heartbeat_interval'))) except: logbook.warning("No heartbeat_interval configuration parameter, skipping heartbeat.") raise StopIteration try: directory = config.get('tweets-client', 'heartbeats_directory') except: logbook.warning("No heartbeats_directory configuration parameter, skipping heartbeat.") raise StopIteration if not os.path.isdir(directory): logbook.warning("The heartbeats_directory parameter ({0}) is not a directory.", directory) raise StopIteration scriptname = os.path.basename(sys.argv[0]) self.filepath = os.path.join(directory, scriptname) start_time = datetime.datetime.now().isoformat() self.pid = os.getpid() with file(self.filepath, 'w') as fil: fil.write(anyjson.serialize({ 'pid': self.pid, 'started': start_time }))
def post(self, request, *args, **kwargs): """ Set case to archived and status to last position (probably closed status) Arguments: archive (boolean): True if object should be archived, False to unarchive. """ try: if 'id' in request.POST.keys(): new_status = CaseStatus.objects.last() instance = Case.objects.get(pk=int(request.POST['id'])) instance.is_archived = True instance.status = new_status instance.save() else: messages.error(self.request, _('Case could not be archived')) raise Http404() except: messages.error(self.request, _('Case could not be archived')) raise Http404() else: message = _('Case has been archived') messages.success(self.request, message) return HttpResponse(anyjson.serialize({'archived': 'true'}), content_type='application/json')
def force_build(argv=None, config=None, do_exit=True): from anyjson import serialize parser = ArgumentParser(description='CI tools CthulhuBot Build Forcer') parser.add_argument( '--branch', type=unicode, help=u"What branch would You like to build" ) parser.add_argument( '--changeset', type=unicode, help=u"Which hangeset would You like build" ) parser.add_argument( 'uri', type=unicode, help=u"Cut & paste URI of an assignment to be forced from Your CthulhuBot web interface" ) namespace = parser.parse_args(argv) uri = urljoin(namespace.uri, "force")+"/" args = {} for i in ['branch', 'changeset']: if hasattr(namespace, i): args[i] = getattr(namespace, i) f = None try: f = urlopen(uri, data=urlencode([("data", quote_plus(serialize(args)))])) print f.read() except URLError, e: print e.fp.read() raise
def test_from_message_missing_required_fields(self): body = {} m = Message(None, body=anyjson.serialize(body), backend="foo", content_type="application/json", content_encoding="utf-8") with self.assertRaises(KeyError): TaskRequest.from_message(m, m.decode())
def test_put__get(self): conn = create_connection(1) q = conn.Queue("testing") q.put(serialize({"name": "George Constanza"})) self.assertEquals(deserialize(q.get()), {"name": "George Constanza"})
def get(self, request, integration_type, format=None): """ Exchange a authorization code for an access token for the given integration type. """ code = str(request.GET.get('code')) error = request.GET.get('error') if error: messages.error( self.request._request, # add_message needs an HttpRequest object _('Sorry, Please authorize Lily to use the integration.') ) return HttpResponseRedirect('/#/preferences/admin/integrations/%s' % integration_type) credentials = get_credentials(integration_type) if not credentials: response = anyjson.serialize({'error': 'No credentials found. Please enter your credentials again'}) return HttpResponse(response, content_type='application/json') get_access_token(credentials, integration_type, code) messages.success( self.request._request, # add_message needs an HttpRequest object _('Your credentials have been saved.') ) return HttpResponseRedirect('/#/preferences/admin/integrations')
def pushEvent(self, event, ui=True, existing=None): """ pushEvent(self, event) Creates redis transaction for: - Add event to beginning of list {0} as json string - Trim list to 100 events :param event: trigger state changing event :type event: dict """ event_json = anyjson.serialize(event) t = yield self.rc.multi() yield t.lpush(EVENTS, event_json) trigger_id = event.get("trigger_id") if trigger_id is not None: yield t.zadd(TRIGGER_EVENTS.format(event["trigger_id"]), event["timestamp"], event_json) yield t.zremrangebyscore(TRIGGER_EVENTS.format(trigger_id), min="-inf", max=int(time.time() - TRIGGER_EVENTS_TTL)) if ui: yield t.lpush(EVENTS_UI, event_json) yield t.ltrim(EVENTS_UI, 0, 100) yield t.commit()
def request(self, method, url, **kwargs): # Fix up request headers hdrs = kwargs.get('headers', {}) hdrs['Accept'] = 'application/json' hdrs['User-Agent'] = self.USER_AGENT # If request has a body, treat it as JSON if 'body' in kwargs: hdrs['Content-Type'] = 'application/json' kwargs['data'] = anyjson.serialize(kwargs['body']) del kwargs['body'] kwargs['headers'] = hdrs resp = requests.request(method, (self.endpoint + self.project_id) + url, **kwargs) if resp.text: if resp.status_code == 400: if ('Connection refused' in resp.text or 'actively refused' in resp.text): raise exceptions.ConnectionRefused(resp.text) try: body = anyjson.deserialize(resp.text) except ValueError: pass body = None else: body = None return resp, body
def test_from_message(self): us = u"æØåveéðƒeæ" body = { "task": mytask.name, "id": uuid(), "args": [2], "kwargs": { us: "bar" } } m = Message(None, body=anyjson.serialize(body), backend="foo", content_type="application/json", content_encoding="utf-8") tw = TaskRequest.from_message(m, m.decode()) self.assertIsInstance(tw, Request) self.assertEqual(tw.task_name, body["task"]) self.assertEqual(tw.task_id, body["id"]) self.assertEqual(tw.args, body["args"]) us = from_utf8(us) if sys.version_info < (2, 6): self.assertEqual(tw.kwargs.keys()[0], us) self.assertIsInstance(tw.kwargs.keys()[0], str) self.assertTrue(tw.logger)
def testUserSubscriptions(self): contact = {'value': '*****@*****.**', 'type': 'email'} response, contact = yield self.request('PUT', 'contact', anyjson.dumps(contact)) response, sub = yield self.request( 'PUT', 'subscription', anyjson.dumps({ "contacts": [contact["id"]], "tags": ["devops", "tag1"] })) response, body = yield self.request( 'PUT', 'subscription/' + str(sub["id"]) + "/test") response, subscriptions = yield self.request('GET', 'subscription') self.assertEqual(sub['id'], subscriptions["list"][0]["id"]) response, settings = yield self.request('GET', 'user/settings') self.assertEqual(sub['id'], settings["subscriptions"][0]["id"]) subs = yield self.db.getTagSubscriptions("devops") self.assertEqual(sub["id"], subs[0]["id"]) subs = yield self.db.getTagSubscriptions("tag1") self.assertEqual(sub["id"], subs[0]["id"]) sub["tags"].remove("tag1") response, updated_sub = yield self.request('PUT', 'subscription', anyjson.serialize(sub)) subs = yield self.db.getTagSubscriptions("tag1") self.assertEqual(len(subs), 0) response, updated_sub = yield self.request( 'DELETE', 'subscription/' + str(sub["id"])) subs = yield self.db.getTagSubscriptions("devops") self.assertEqual(len(subs), 0)
def getdata(request): logger.debug( request.GET) data = filter_data(request) #data = data.filter(sector_id__exact='AP-Skibb') #data = Customer.objects.all() markers = {} rows = [] for d in data: a = {} a['name'] = "%s %s"%( d.first_name, d.last_name) a['long'] = d.gps_longitude a['lat'] = d.gps_latitude a['id'] = str(d.customer_id) a['data1'] = str(1) a['data2'] = str(2) a['billing'] = str(d.billing_active) a['ip'] = str(d.ip) a['voip'] = str(d.voip_number) rows.append(a) #rows = sorted(rows) markers['markers'] = rows markers = anyjson.serialize(markers) #print markers return render(request, 'mapping/data.html', { "data":rows })
def insert(self, event, data, dimension_map): """ Insert an event instance. """ id = self._get_next_id(event) key = '%s:%d' % (event, id) data['id'] = id pipe = self.redis.pipeline() pipe.set(key, anyjson.serialize(data)) for dimension, values in dimension_map.items(): # no match for this dimension if not values: continue # add the id to individual dimension buckets for v in values: pipe.sadd('%s:%s:%s' % (event, dimension, v), id) top_v = values[0] # store the top level dimension pipe.sadd('%s:%s' % (event, dimension), top_v) # report dependencies between subbuckets for v in values[1:]: pipe.sadd('%s:%s:%s:subkeys' % (event, dimension, top_v), v) top_v = v pipe.execute()
def test_from_message_missing_required_fields(self): body = {} m = Message(None, body=anyjson.serialize(body), backend="foo", content_type="application/json", content_encoding="utf-8") with self.assertRaises(InvalidTaskError): TaskRequest.from_message(m, m.decode())
def post_intercom_event(event_name, user_id): """ Sends a request to Intercom to track the given event. Args: event_name (str): Name of the event that we want to track. user_id (int): ID of the Lily user. Returns: response (Response): Object containing the response information. """ if not settings.DEBUG: payload = { 'event_name': event_name, 'user_id': user_id, 'created_at': int(time()) } response = requests.post(url='https://api.intercom.io/events', data=anyjson.serialize(payload), auth=(settings.INTERCOM_APP_ID, settings.INTERCOM_KEY), headers={'Content-Type': 'application/json'}) return response return None
def get(self, request, ticket_id): try: children = tracker.list_children(ticket_id) except IAmSterile: return HttpResponse(status=404) return HttpResponse( anyjson.serialize([ticket.as_dict() for ticket in children]))
def check_feeds(self): cursor = self.database.cursor() chk_day = datetime.datetime.today() - datetime.timedelta(days=2) #cursor.execute("SELECT `id`, `url`, `feed` FROM `feeds` WHERE `deleted` =0 and politician_id=50") cursor.execute("SELECT `id`, `url`, `feed` FROM `feeds` WHERE `deleted` = 0 and created>%s", chk_day.strftime("%Y/%m/%d")) feeds = cursor.fetchall() log.notice(u"counts:{0}", len(feeds)) for data in feeds: time.sleep(0.1) #delay a tick. try: # feed exist, put into for work. feed = self.fb_api.get_object(data[0]) #log.notice(u"from {0}", feed['from']['name']) self.beanstalk.put(anyjson.serialize(feed)) except Exception as e: # can't access feed by api, try through url. cursor.execute("""UPDATE `feeds` SET `unaccessable`=1 WHERE id = %s""",data[0]) raw_feed = anyjson.deserialize(data[2]) isactivity = True if u"likes a" in raw_feed.get('story','') or u"like a" in raw_feed.get('story','') or u"commented on" in raw_feed.get('story','') or u"a activity" in data[1] else False log.notice(u"raw_story:{0}, isactivity:{1}, raw_url:{2}", raw_feed.get('story',''), isactivity, data[1]) if not isactivity: html = requests.get(data[1], allow_redirects=True) log.notice("status code:{0}", html.status_code) if html.status_code == requests.codes.not_found: title = BeautifulSoup(html.text).title.string time.sleep(0.5) #sleep a I/O tick. log.notice(u"Title:{0}, url:{1}", title, data[1]) if u"找不到網頁" in title or u"Page Not Found" in title: # be deleted. self.handle_deletion(data[0])
def test_get_instances_returns_correct_data(self): self.redis.sadd('error:dimensions', 'time') self.redis.sadd('error:dimensions', 'name') self.redis.sadd('error:time:201008', '1') self.redis.sadd('error:time:201008', '2') self.redis.sadd('error:time:201008', '3') self.redis.sadd('error:name:ValueError', '1') self.redis.sadd('error:name:ValueError', '3') self.redis.sadd('error:name:ValueError', '4') for x in ('1', '2', '3', '4'): self.redis.set('error:%s' % x, anyjson.serialize({'event': 'error:%s' % x})) keys = self.olap.get_keys('error', time='201008', name='ValueError') values = set(map(anyjson.serialize, self.olap.get_instances('error', keys))) self.assertEquals(2, len(values)) self.assertEquals(set([anyjson.serialize({'event': 'error:1'}), anyjson.serialize({'event': 'error:3'})]), values)
def httptest(request, x, y): z = x + y print "===start=====" print z print "===end=====" response = {'status': 'success', 'retval': z} return HttpResponse(serialize(response), mimetype='application/json')
def test_get_instances_can_union_more_than_one_slice_from_each_dimension(self): self.redis.sadd('error:dimensions', 'time') self.redis.sadd('error:dimensions', 'name') self.redis.sadd('error:time:201008', '1') self.redis.sadd('error:time:201008', '2') self.redis.sadd('error:time:201009', '3') self.redis.sadd('error:name:ValueError', '1') self.redis.sadd('error:name:ValueError', '3') self.redis.sadd('error:name:ValueError', '4') for x in ('1', '2', '3', '4'): self.redis.set('error:%s' % x, anyjson.serialize({'event': 'error:%s' % x})) keys = self.olap.get_keys('error', time__union=('201008', '201009'), name='ValueError') values = set(map(anyjson.serialize, self.olap.get_instances('error', keys))) self.assertEquals(2, len(values)) self.assertEquals(set([anyjson.serialize({'event': 'error:1'}), anyjson.serialize({'event': 'error:3'})]), values)
def getsectorjson(request): logger.debug('getsectorjson') logger.debug( request.GET) display_sectors = request.GET['display_sectors'] data = Sector.objects.all() markers = {} rows = [] for d in data: a = {} a['name'] = "%s"%( d.name) a['long'] = d.gps_longitude a['lat'] = d.gps_latitude a['direction'] = d.direction a['angle'] = d.angle a['distance'] = d.distance a['color'] = d.color rows.append(a) markers['count'] = len(rows) markers['markers'] = rows markers = anyjson.serialize(markers) #print markers if display_sectors == 'off': markers = [] return render(request, 'mapping/json.html', { "json":markers })
def get(self, request): state = b64encode(anyjson.serialize({ 'token': generate_token(settings.SECRET_KEY, request.user.pk), })) authorize_url = FLOW.step1_get_authorize_url(state=state) return HttpResponseRedirect(authorize_url)
def saveUserSubscription(self, login, sub, existing=None): """ saveUserSubscription(self, login, sub) Creates redis transaction for: - save *sub* json to key {0} - add *sub_id* to set {1} :param login: user login :type login: string :param sub: subscription data :type sub: json dict :rtype: json dict """ sub_id = sub.get("id") existing = existing if existing is None: sub_id = str(uuid4()) t = yield self.rc.multi() sub["user"] = login sub["id"] = sub_id sub_tags = sub.get("tags", []) if existing is not None: for tag in existing.get("tags", []): yield t.srem(TAG_SUBSCRIPTIONS_PREFIX.format(tag), sub_id) for tag in sub_tags: yield t.sadd(TAG_SUBSCRIPTIONS_PREFIX.format(tag), sub_id) yield t.sadd(USER_SUBSCRIPTIONS_PREFIX.format(login), sub_id) yield t.set(SUBSCRIPTION_PREFIX.format(sub_id), anyjson.serialize(sub)) yield t.commit() defer.returnValue(sub)
def access(self, remote_ip): access_info = {'remote_ip': remote_ip, 'time': '%.6f' % time.time()} self._store.set_session( self._sessionid, 'last_access', serialize(access_info) )
def delete(self, request, *args, **kwargs): """ Overloading super().delete to remove the related models and the instance itself. """ self.object = self.get_object() # Prevents deleting an account with users and checks if the account has a user that's linked to an admin group if self.object.user.exists() or has_user_in_group(self.object, 'account_admin'): raise Http404() self.object.email_addresses.remove() self.object.addresses.remove() self.object.phone_numbers.remove() self.object.tags.remove() functions = Function.objects.filter(account=self.object) functions.delete() # Show delete message messages.success(self.request, _('%s (Account) has been deleted.') % self.object.name) self.object.delete() # TODO: check for contacts and websites .. redirect_url = self.get_success_url() if is_ajax(request): response = anyjson.serialize({ 'error': False, 'redirect_url': redirect_url }) return HttpResponse(response, content_type='application/json') return redirect(redirect_url)
def deleteUserContact(self, contact_id, login, existing=None): """ deleteUserContact(self, contact_id, login) Creates redis transaction for: - remove key {0} - remove *contact_id* from set {1} :param contact_id: contact id :type contact_id: string :param login: user login :type login: string """ changed_subs = [] subs = yield self.getUserSubscriptions(login) for sub_id in subs: sub = yield self.getSubscription(sub_id) if sub and contact_id in sub["contacts"]: sub["contacts"].remove(contact_id) changed_subs.append(sub) t = yield self.rc.multi() yield t.delete(CONTACT_PREFIX.format(contact_id)) yield t.srem(USER_CONTACTS_PREFIX.format(login), contact_id) for sub in changed_subs: yield t.set(SUBSCRIPTION_PREFIX.format(sub["id"]), anyjson.serialize(sub)) yield t.commit()