def response_for_exception(request, exception): response = base_response_for_exception(request, exception) if isinstance(exception, ValidationError): dont_flash = [ 'password', 'password_confirmation', ] old_input = pydash.merge(parser.parse(request.GET.urlencode()), parser.parse(request.POST.urlencode())) errors = exception.message_dict if hasattr( exception, 'error_dict') else { '__all__': exception.messages } response = HttpRedirector(request) \ .back() \ .with_input(pydash.omit(old_input, dont_flash)) \ .with_errors(errors) if request.match('api/*') and not isinstance(response, JsonResponse): message = str(exception) data = {} if isinstance(exception, ValidationError): status_code = 422 message = _('Invalid data!') data['errors'] = exception.message_dict if hasattr( exception, 'error_dict') else { '__all__': exception.messages } elif isinstance(exception, Http404): status_code = 404 elif isinstance(exception, BadRequest): status_code = 400 elif isinstance(exception, AuthenticationException): status_code = 401 elif isinstance(exception, PermissionDenied): status_code = 403 else: status_code = 500 if not settings.DEBUG: message = _('Something went wrong') if len(message.strip()) == 0: message = _(status_codes._codes[status_code][0]) return JsonResponse(pydash.merge({'message': message}, data), status=status_code) return response
def callback(request, auth_name): if auth_name == 'dropbox': post_dict = parser.parse(request.POST.urlencode()) code = str(post_dict['code']) userid = str(post_dict['userid']) json_response = dbauth.handleCallback(userid, code, request) return HttpResponse(json_response) if auth_name == 'google': post_dict = parser.parse(request.POST.urlencode()) code = str(post_dict['code']) json_response = gauth.handleCallback(code, request) return HttpResponse(json_response) return HttpResponse('Invalid URL')
def process_query_parameters(url_data, provider_serializer, tag_keys=None, **kwargs): """Process query parameters and raise any validation errors. Args: url_data (String): the url string Returns: (Boolean): True if query params are valid, False otherwise (Dict): Dictionary parsed from query params string """ try: query_params = parser.parse(url_data) except parser.MalformedQueryStringError: LOG.error('Invalid query parameter format %s.', url_data) error = {'details': _(f'Invalid query parameter format.')} raise ValidationError(error) if tag_keys: tag_keys = process_tag_query_params(query_params, tag_keys) qps = provider_serializer(data=query_params, tag_keys=tag_keys, context=kwargs) else: qps = provider_serializer(data=query_params, context=kwargs) output = None validation = qps.is_valid() if not validation: output = qps.errors else: output = qps.data return (validation, output)
def get(self, request, question_slug, *args, **kwargs): try: arguments = parser.parse(request.GET.urlencode()) size = int(arguments.pop('size', 20)) index = int(arguments.pop('index', 0)) size, index = permissions.pagination_permission( request.user, size, index) size = index + size sort = None if arguments.get('order_by'): sort = arguments.pop('order_by') question = self.model.objects.get(question_slug=question_slug, is_deleted=False) if not isinstance(request.user, AnonymousUser): question.view.add(request.user) question.total_view += 1 question.save() result = question.answer_set result = result.filter(is_deleted=False) if sort: result = result.order_by(sort) result = result.all() total = len(result) result = result[index:size] data = self.get_serializer(result, many=True) return responses.SuccessResponse(data.data, index=index, total=total).send() except FieldError as e: return responses.ErrorResponse(message=str(e)).send()
def get_model_version_artifact_handler(): query_string = request.query_string.decode('utf-8') request_dict = parser.parse(query_string, normalized=True) name = request_dict.get('name') version = request_dict.get('version') artifact_uri = _get_model_registry_store().get_model_version_download_uri(name, version) return _send_artifact(get_artifact_repository(artifact_uri), request_dict['path'])
def page_report(request, report, fmt='csv', conf=None): get_dict = parser.parse(request.GET.urlencode()) objs = get_queryset(request, report, get_dict)[:100] if objs.count()==0: return HttpResponse('No Data') # if(isinstance(objs, list)): # conn = DictListConnector(objs, expand_obs=True) # else: # conn = DjangoQuerySetConnector(objs) # if report in REPORTS: # cls = REPORTS[report] # if cls.Meta.fields: # conn.header = cls.Meta.fields # elif cls.Meta.exclude: # conn.header = Set(conn.header) - Set(cls.Meta.exclude) # elif cls.Meta.sequence: # conn.header = Set(cls.Meta.sequence) | Set(conn.header) tf = tempfile.NamedTemporaryFile() fn = tf.name fp = open(fn, "w+") DataProvider.WriteData(objs, fmt, fn) fp.close() response = StreamingHttpResponse(open(fn), content_type='text/csv') response['Content-Disposition'] = 'attachment; filename=' + report + '.csv' return response
def delete_equipment(request): if request.method == 'POST': post = parser.parse(request.POST.urlencode()) e = Equipment.objects.get(pk=int(post['equip'])) print 'vai deletar aqui' #e.delete() return HttpResponse("ok", mimetype='application/json')
def create_or_update_specified_term_with_provided_json(request): logger.info( "[administration/views.py create_or_update_specified_term_with_provided_json()] " f"[create_or_update_specified_term_with_provided_json] request.POST={request.POST}" ) groups = list(request.user.groups.values_list('name', flat=True)) context = { 'tab': 'administration', 'authenticated': request.user.is_authenticated, 'Exec': ('Exec' in groups), 'ElectionOfficer': ('ElectionOfficer' in groups), 'Staff': request.user.is_staff, 'Username': request.user.username, 'URL_ROOT': settings.URL_ROOT } if not (request.user.is_staff or 'Exec' in groups): return render(request, 'administration/invalid_access.html', context) if JSON_INPUT_POST_KEY in request.POST: logger.info( "[administration/views.py create_or_update_specified_term_with_provided_json()] creating new election" ) post_dict = parser.parse(request.POST.urlencode()) post_dict = json.loads(request.POST['input_json']) logger.info( f"[administration/views.py create_or_update_specified_term_with_provided_json()] post_dict={post_dict}" ) term = get_term_json(json.loads(request.POST['input_json'])) # post_dict = parser.parse(request.POST.urlencode()) logger.info( f"[administration/views.py create_or_update_specified_term_with_provided_json()] post_dict={post_dict}" ) save_execs_from_json(post_dict[JSON_EXEC_KEY], term) return render(request, 'administration/update_officers_for_term_json.html', context) return render(request, 'administration/update_officers_for_term_json.html', context)
def _validate(self): """Validate query parameters. Raises: ValidationError Returns: (Boolean): True if query params are valid, False otherwise (Dict): Dictionary parsed from query params string """ try: query_params = parser.parse(self.url_data) except parser.MalformedQueryStringError: LOG.info('Invalid query parameter format %s.', self.url_data) error = {'details': _(f'Invalid query parameter format.')} raise ValidationError(error) if self.tag_keys: self.tag_keys = self._process_tag_query_params(query_params) qps = self.serializer(data=query_params, tag_keys=self.tag_keys, context={'request': self.request}) else: qps = self.serializer(data=query_params, context={'request': self.request}) if not qps.is_valid(): raise ValidationError(detail=qps.errors) self.parameters = qps.data
def get(self): # parse the url args into a dict parsed = parser.parse(request.query_string) # column names for this table dtcols = get_columns(Table, parsed) #for col in dtcols: # print col # pre build the query so we can add filters to it here try: query = Table.query # Flask-SQLAlchemy except: query = Session.query(Table) # vanilla SQLALchemy # check if we are filtering the rows some how # this uses the restless view code if 'q' in parsed.keys(): query = views.search(Session, Table, parsed) log_debug(str(query)) # get our DataTable object dtobj = DataTable( parsed, Table, query, dtcols) # return the query result in json return dtobj.json()
def post(self, request, **kwargs): # Minidump request payloads do not have the same structure as # usual events from other SDKs. Most notably, the event needs # to be transfered in the `sentry` form field. All other form # fields are assumed "extra" information. The only exception # to this is `upload_file_minidump`, which contains the minidump. if any(key.startswith('sentry[') for key in request.POST): # First, try to parse the nested form syntax `sentry[key][key]` # This is required for the Breakpad client library, which only # supports string values of up to 64 characters. extra = parser.parse(request.POST.urlencode()) data = extra.pop('sentry', {}) else: # Custom clients can submit longer payloads and should JSON # encode event data into the optional `sentry` field. extra = request.POST json_data = extra.pop('sentry', None) data = json.loads(json_data[0]) if json_data else {} # Merge additional form fields from the request with `extra` # data from the event payload and set defaults for processing. extra.update(data.get('extra', {})) data['extra'] = extra # Assign our own UUID so we can track this minidump. We cannot trust the # uploaded filename, and if reading the minidump fails there is no way # we can ever retrieve the original UUID from the minidump. event_id = data.get('event_id') or uuid.uuid4().hex data['event_id'] = event_id # At this point, we only extract the bare minimum information # needed to continue processing. This requires to process the # minidump without symbols and CFI to obtain an initial stack # trace (most likely via stack scanning). If all validations # pass, the event will be inserted into the database. try: minidump = request.FILES['upload_file_minidump'] except KeyError: raise APIError('Missing minidump upload') if settings.SENTRY_MINIDUMP_CACHE: if not os.path.exists(settings.SENTRY_MINIDUMP_PATH): os.mkdir(settings.SENTRY_MINIDUMP_PATH, 0o744) with open('%s/%s.dmp' % (settings.SENTRY_MINIDUMP_PATH, event_id), 'wb') as out: for chunk in minidump.chunks(): out.write(chunk) merge_minidump_event(data, minidump) response_or_event_id = self.process(request, data=data, **kwargs) if isinstance(response_or_event_id, HttpResponse): return response_or_event_id # Return the formatted UUID of the generated event. This is # expected by the Electron http uploader on Linux and doesn't # break the default Breakpad client library. return HttpResponse(six.text_type(uuid.UUID(response_or_event_id)), content_type='text/plain')
def get(self, request, *args, **kwargs): try: arguments = parser.parse(request.GET.urlencode()) size = int(arguments.pop('size', 20)) index = int(arguments.pop('index', 0)) size, index = permissions.pagination_permission( request.user, size, index) size = index + size sort = None if arguments.get('order_by'): sort = arguments.pop('order_by') result = self.model.objects.filter(**arguments) if self.model in MODELS_HAVE_IS_DELETED: result = result.filter(is_deleted=False) if self.model == CompanyReview or self.model == Interview: result = result.filter(approved=True) if sort: result = result.order_by(sort) if self.model in [City, Pros, Cons]: result = result.order_by('-priority') result = result.all() total = len(result) result = result[index:size] data = self.get_serializer(result, many=True) return responses.SuccessResponse(data.data, index=index, total=total).send() except FieldError as e: return responses.ErrorResponse(message=str(e)).send()
def decaf_train(request): post_dict = parser.parse(request.POST.urlencode()) try: if 'urls' not in post_dict: data = {'error': 'NoFileSelected'} else: data = {'info': 'ProcessingImages'} # Download these images. Run Feature Extraction. Post results. uuid, image_path = downloadAndSaveImages(post_dict['urls'], post_dict['socketid']) output_path = os.path.join(image_path, 'results') if not os.path.exists(output_path): os.makedirs(output_path) decaf_wrapper_local(image_path, output_path, post_dict['socketid'], os.path.join(conf.PIC_URL, uuid)) log_to_terminal('Processing Images Now', post_dict['socketid']) response = JSONResponse(data, {}, response_mimetype(request)) response['Content-Disposition'] = 'inline; filename=files.json' return response except: data = {'result': str(traceback.format_exc())} response = JSONResponse(data, {}, response_mimetype(request)) response['Content-Disposition'] = 'inline; filename=files.json' return response
def get(self): args = parser.parse(request.query_string) options = { "start": args.get("start", 0), "limit": args.get("length", 10), "search": args.get("search", {}).get("value"), } columns = [] for i in range(len(args["columns"])): columns.append(args["columns"][i]) order = [] for i in range(len(args["order"])): column_id = args["order"][i]["column"] order.append({"column": columns[column_id]["data"], "dir": args["order"][i]["dir"]}) options["order"] = order rows, total = self.get_queryset(**options) data = [] for row in rows: row_data = {} for column in columns: callback = getattr(self, "process_%s" % column["data"], lambda row, val: val) val = getattr(row, column["data"], None) row_data[column["data"]] = callback(row, val) data.append(row_data) return jsonify(recordsTotal=total, recordsFiltered=total, data=data)
def parse(self, stream, media_type=None, parser_context=None): from querystring_parser import parser from django.conf import settings encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET) encoded_data = stream.read().decode(encoding) return parser.parse(encoded_data)
def parse(self, stream, media_type=None, parser_context=None): import urllib from querystring_parser import parser from django.conf import settings encoding = parser_context.get("encoding", settings.DEFAULT_CHARSET) return parser.parse(stream.read(), unquote=True, encoding=encoding)
def get_bookmarks(request): #sync the client-side bookmarks with the server side bookmarks #update the server-side bookmarks and return the new list try: bookmark_dict = parser.parse(request.POST.urlencode())['bookmarks'] except: bookmark_dict = {} try: #loop through the list from the client #if user, bm_name, and bm_state match then skip #otherwise, add to the db for key,bookmark in bookmark_dict.items(): try: Bookmark.objects.get(user=request.user, name=bookmark['name'], url_hash=bookmark['hash']) except: new_bookmark = Bookmark(user=request.user, name=bookmark['name'], url_hash=bookmark['hash']) new_bookmark.save() #grab all bookmarks belonging to this user #serialize bookmarks into 'name', 'hash' objects and return simplejson dump content = [] bookmark_list = Bookmark.objects.filter(user=request.user) for bookmark in bookmark_list: content.append({'name': bookmark.name, 'hash': bookmark.url_hash}) return HttpResponse(simplejson.dumps(content), mimetype="application/json", status=200) except: return HttpResponse(status=304)
def timing_vacation(request): ''' vacations dates routes for clinician ''' c = Clinician.objects.filter(user__email=request.session['email']).first() if request.method == "POST": p_dict = parser.parse(request.POST.urlencode()) try: vacs = p_dict['timings'][""] vacs = [ list( map( lambda x: datetime.datetime.strptime(x, '%Y-%m-%d'). date(), [i, j])) for i, j in zip(vacs[::2], vacs[1::2]) ] c.vacations = vacs c.save() except: pass vacation_timings = [] for vacation in c.vacations: vacation_timings.append({ 'start': vacation[0].isoformat(), 'end': vacation[-1].isoformat(), }) return render(request, 'clinician/dashboard/timings/vacationtime.html.j2', context={ 'title': "Timings - vacation", 'clinician': c, 'timings': vacation_timings, })
def post(self, request, **kwargs): # Minidump request payloads do not have the same structure as # usual events from other SDKs. Most notably, the event needs # to be transfered in the `sentry` form field. All other form # fields are assumed "extra" information. The only exception # to this is `upload_file_minidump`, which contains the minidump. extra = parser.parse(request.POST.urlencode()) data = extra.pop('sentry', {}) extra.update(data.get('extra', {})) data['extra'] = extra data['platform'] = 'native' # At this point, we only extract the bare minimum information # needed to continue processing. This requires to process the # minidump without symbols and CFI to obtain an initial stack # trace (most likely via stack scanning). If all validations # pass, the event will be inserted into the database. try: minidump = request.FILES['upload_file_minidump'] except KeyError: raise APIError('Missing minidump upload') merge_minidump_event(data, minidump) response_or_event_id = self.process(request, data=data, **kwargs) if isinstance(response_or_event_id, HttpResponse): return response_or_event_id return HttpResponse(json.dumps({'id': response_or_event_id}), content_type='application/json')
def show_deploy(request, cluster_id): #get_dict = parser.parse( unicode(request.GET.urlencode()).encode("utf-8") ) #return uncache_response(HttpResponse(json.dumps(get_dict, indent=2), content_type="text/plain")) #return uncache_response(HttpResponse( get_dict['password'] , content_type="text/plain")) context = {} password = None try: cluster = ClusterDefinition.objects.get( id=cluster_id ) if cluster.encryption_checksum != '': try: get_dict = parser.parse( unicode(request.GET.urlencode()).encode("utf-8") ) password = get_dict['password'] cluster.decrypt(password) except Exception as e: #messages.error(request, "The password provided is wrong: %s" % e) messages.error(request, "The password provided is wrong") return render(request, "cluster/deploy.html", {}) except ClusterDefinition.DoesNotExist, Http404: messages.error(request, 'Cluster does not exist') return render(request, "cluster/deploy.html", {})
def __parsePacket(packet, check_data=True): if packet is None: debug("__parsePacket", "Packet is empty") return None else: #packet = simplejson.loads(packet) packet = parser.parse(packet) print(packet) return packet if TOKEN_TRANS not in packet: debug("__parsePacket", "Packet doesn't contain idToken") return None else: cred = apiPri.__verify_idToken(packet[TOKEN_TRANS]) if cred is None: debug("__parsePacket", "IdToken is not valid") return None elif check_data: if DATA_TRANS not in packet: debug("__parsePacket", "Data is empty") return None else: data = packet[DATA_TRANS] return data else: return dict()
def get(self, request, *args, **kwargs): try: arguments = parser.parse(request.GET.urlencode()) size = int(arguments.pop('size', 20)) index = int(arguments.pop('index', 0)) size, index = permissions.pagination_permission( request.user, size, index) size = index + size sort = None if arguments.get('order_by'): sort = arguments.pop('order_by') result = self.model.objects.filter(**arguments) if sort: result = result.order_by(sort) result = result.all() total = len(result) result = result[index:size] data = self.get_serializer(result, many=True) return responses.SuccessResponse(data.data, index=index, total=total).send() except FieldError as e: return responses.ErrorResponse(message=str(e)).send()
def trainamodel(request): data = {} try: post_dict = parser.parse(request.POST.urlencode()) socketid = post_dict['socketid'] log_to_terminal('Beginning training a new model', post_dict['socketid']) old_save_dir = conf.PIC_DIR folder_name = str(socketid) save_dir = os.path.join(conf.PIC_DIR, folder_name) train_dir = os.path.join(save_dir, 'train') test_dir = os.path.join(save_dir, 'test') util_dir = os.path.join(save_dir, 'util') if not os.path.exists(os.path.join(old_save_dir, folder_name)): raise Exception('No training images has been provided for this job.') trainModel(save_dir, post_dict['socketid']) data['info'] = 'completed' response = JSONResponse(data, {}, response_mimetype(request)) response['Content-Disposition'] = 'inline; filename=files.json' log_to_terminal('Finished training your model with the new categories. Now, upload some test images to test this model. ', socketid) return response except Exception as e: data['error'] = str(traceback.format_exc()) log_to_terminal(str(traceback.format_exc()), socketid) response = JSONResponse(data, {}, response_mimetype(request)) response['Content-Disposition'] = 'inline; filename=files.json' return response
def appointment_past(request): ''' complete history of past appointments''' seeker = Seeker.objects.filter(user__email=request.session['email']).first() if request.method == "POST": # add feedback if valid params = parser.parse(request.POST.urlencode()) print(params) try: ratings = {} [ratings.update({_ : float(params['categories'][_])}) for _ in params['categories']] apt = Appointment.objects.get(uid=params['apt_id']) provider = Provider.objects.get(pk=params['hosp_id']) clinician = provider.clinicians.get(pk=params['clinician_id']) feedback = Feedback(user=seeker, provider=provider, clinician=clinician, appointment=apt, categories=ratings, message=params['message']) feedback.save() except Exception as e: print("Error in feedback | %s" % str(e)) records = seeker.appointments.filter(Q(status='completed') | Q(status='cancelled')).order_by('-from_timestamp') return render(request, 'healthseeker/appointment/appointment.html.j2', { 'title' : 'Appointment - History', 'type' : 'past', 'records' : records, 'review_cats' : Feedback.cat_keys.keys })
def geocoder(request): parsed_dict = parser.parse(request.POST.urlencode()) r = Geocode(parsed_dict['addresses']) return HttpResponse(r, mimetype='application/json')
def _get_request_message(request_message, flask_request=request): if flask_request.method == 'GET' and len(flask_request.query_string) > 0: # This is a hack to make arrays of length 1 work with the parser. # for example experiment_ids%5B%5D=0 should be parsed to {experiment_ids: [0]} # but it gets parsed to {experiment_ids: 0} # but it doesn't. However, experiment_ids%5B0%5D=0 will get parsed to the right # result. query_string = re.sub('%5B%5D', '%5B0%5D', flask_request.query_string.decode("utf-8")) request_dict = parser.parse(query_string, normalized=True) parse_dict(request_dict, request_message) return request_message request_json = _get_request_json(flask_request) # Older clients may post their JSON double-encoded as strings, so the get_json # above actually converts it to a string. Therefore, we check this condition # (which we can tell for sure because any proper request should be a dictionary), # and decode it a second time. if isinstance(request_json, six.string_types): request_json = json.loads(request_json) # If request doesn't have json body then assume it's empty. if request_json is None: request_json = {} parse_dict(request_json, request_message) return request_message
def getargs(request): '''Returns the arguments sent in the request as a dictionary in this format: args = { 'skip': the number of employees to skip or '' if the argument was not recieved. 'take': the number of employees to return or '' if the argument was not recieved. 'filters': { index: { 'field': the name of the field to use for filtering. 'value': the value used to filter the field. 'operator': a string representing the logical operator used to filter the field. e.g. eq for equals to, lt for less than, ect. } } ''' args = argparser.parse(request.GET.urlencode()) skip = args.get('skip', '') if skip != '': skip = int(skip) take = args.get('take', '') if take != '': take = int(take) sort = args.get('sort', '') if sort != '': sort = sort[0] filters = args.get('filter', '') if filters != '': filters = filters.get('filters', '') return {'skip': skip, 'take': take, 'filters': filters, 'sort': sort}
def experience_training(request): ''' account route for experience and training''' c = Clinician.objects.filter(user__email=request.session['email']).first() class ExperienceForm(forms.Form): type_choices = ( ('experience', 'experience'), ('training', 'training'), ('other', 'other'), ) year = forms.CharField(required=False) position = forms.CharField(max_length=30) description = forms.CharField(max_length=100, widget=forms.widgets.Textarea()) type = forms.ChoiceField(choices=type_choices) experience_form = ExperienceForm() if request.method == "POST": if request.POST['btn-type'] == "1": experience_form = ExperienceForm(request.POST, request.FILES) if experience_form.is_valid(): e = experience_form.cleaned_data c.experience_training.append(e) experience_form = ExperienceForm() elif request.POST['btn-type'] == "0": post_params = parser.parse(request.POST.urlencode()) fields = ['year', 'position', 'description', 'type'] cur_all = [] if any([_ in post_params for _ in fields]): for _ in fields: if not isinstance(post_params[_], list): post_params[_] = [post_params[_]] cur_all = zip(post_params[fields[0]]) for field in fields[1:]: cur_all = zip(*cur_all) cur_all = zip(*cur_all, post_params[field]) cur_all = list(cur_all) records = [] for r in cur_all: x = {} [x.update({j: i}) for i, j in zip(r, fields)] records.append(x) print(records) c.experience_training = records c.save() return render(request, 'clinician/dashboard/account/experience.html.j2', context={ 'title': "Account - Experience & Training", 'clinician': c, 'all_exp': c.experience_training, 'experience_form': experience_form, })
def make_folders_public_gdrive(request): """ makes a list of requested google drive folders publicly available """ logger.info( f"[resource_management/gdrive_views.py make_folders_public_gdrive()] request.POST={request.POST}") (render_value, error_message, context) = verify_access_logged_user_and_create_context(request, TAB_STRING) if context is None: # if the user accessing the page is not authorized to access it request.session[ERROR_MESSAGE_KEY] = '{}<br>'.format(error_message) return render_value gdrive = GoogleDrive(settings.GDRIVE_TOKEN_LOCATION, settings.GDRIVE_ROOT_FOLDER_ID) if gdrive.connection_successful: post_dict = parser.parse(request.POST.urlencode()) if there_are_multiple_entries(post_dict, GOOGLE_DRIVE_USERS_FILE_ID_KEY): number_of_entries = len(post_dict[GOOGLE_DRIVE_USERS_FILE_ID_KEY]) logger.info( f"[resource_management/gdrive_views.py make_folders_public_gdrive()] {number_of_entries} " "total multiple entries detected" ) for index in range(number_of_entries): success, result = make_folder_public_gdrive(gdrive, post_dict[GOOGLE_DRIVE_USERS_FILE_ID_KEY][index]) if not success: if ERROR_MESSAGE_KEY in request.session: request.session[ERROR_MESSAGE_KEY] += '{}<br>'.format(result) else: request.session[ERROR_MESSAGE_KEY] = '{}<br>'.format(result) else: success, result = make_folder_public_gdrive(gdrive, post_dict[GOOGLE_DRIVE_USERS_FILE_ID_KEY]) if not success: if ERROR_MESSAGE_KEY in request.session: request.session[ERROR_MESSAGE_KEY] += '{}<br>'.format(result) else: request.session[ERROR_MESSAGE_KEY] = '{}<br>'.format(result) return HttpResponseRedirect(f'{settings.URL_ROOT}resource_management/gdrive/')
def get(self, request, **kwargs): arguments = parser.parse(request.GET.urlencode()) take = arguments.get('take', 10) skip = arguments.get('skip', 0) total = skip+take filter_arg = dict() sort_arg = list() filter_logic = 'and' if(arguments.has_key('filter') and arguments['filter'].has_key('filters')): filter_arg = self._build_filters(arguments['filter']['filters'], filter_arg) filter_logic = arguments['filter']['logic'].upper() if(arguments.has_key('group')): sort_arg = self._build_sorts(arguments['group'], sort_arg) if(arguments.has_key('sort')): sort_arg = self._build_sorts(arguments['sort'], sort_arg) output = dict() try: filters = Q(**filter_arg) filters.connector = filter_logic items = self.model.objects.filter(filters).order_by(*sort_arg) if(self.distinct): items = items.distinct() self.queryset = items[skip:total] output = {'result':1, 'count':items.count(), 'payload':self.get_queryset()} except FieldError: output = {'result':0, 'error':'Invalid request. Tried to filter or sort using invalid field.'} return response_json(request, output)
def set_led_values(device_name): check_device(device_name) dev = devices[device_name] args = parser.parse(request.get_data()) for led, value in args['led'].items(): dev.set_pwm(int(led), int(value)) return create_response({'result': 'ok'})
def get(self, request, id, *args, **kwargs): try: arguments = parser.parse(request.GET.urlencode()) size = int(arguments.pop('size', 20)) index = int(arguments.pop('index', 0)) size, index = permissions.pagination_permission(request.user, size, index) size = index + size instance = self.model.objects.get(id=id, is_deleted=False) if not instance.has_legal_issue: if not instance.approved: if request.user == instance.creator or (not request.user.is_anonymous and request.user.is_staff): pass else: raise self.model.DoesNotExist serialize_data = self.get_serializer(instance.interviewcomment_set.filter( is_deleted=False, approved=True ).all(), many=True) data = serialize_data.data data = sorted(data, key=lambda x: x['vote_count'], reverse=True) else: data = [] return responses.SuccessResponse(data[index:size], index=index, total=len(data)).send() except self.model.DoesNotExist as e: return responses.ErrorResponse(message='Instance does not Found.', status=404).send() except ValidationError as e: return responses.ErrorResponse(message=e.detail, status=e.status_code).send()
def create_abstract(request): post_dict = parser.parse(unicode(request.POST.urlencode()).encode("utf-8")) # We are interested in values, enabled and abstract. Let"s insert empty # values in case some of them are null (values and abstract are never null) if post_dict.get("enabled") == None: post_dict["enabled"] = {} # There is no specific model for the abstract context, so we will just use # the ContextDefinition model. Since this context is abstract, no rendered # version will be saved in ContextStorage c_uuid = gen_context_key() c_data = pickle.dumps({ "values": post_dict["values"], "enabled": post_dict["enabled"], "abstract": post_dict["abstract"] }) # For debug # return uncache_response(HttpResponse(json.dumps(post_dict, indent=2), \ # content_type="text/plain")) ContextDefinition.objects.create( id=c_uuid, name=tou(post_dict["values"]["name"]), description=u"", # TODO owner=request.user, key=u"", public=False, # TODO data=c_data, checksum=0, # TODO inherited=False, abstract=True) return redirect("dashboard")
def _validate_for_save(request): """ Validates the HttpRequest. It uses the forms defined in forms.py to check all the fields and runs some additional checks. It returns an HttpResponse instance if request is invalid and the dictionary of clean date otherwise. """ data = parser.parse(request.POST.urlencode()) clean_data = {} # Cluster section cluster_f = ClusterForm(data.get("cluster", {})) if not cluster_f.is_valid(): for label, msg in cluster_f.errors_list: messages.error(request, "Cluster %s: %s" % (label, msg)) return _show_cluster_def(request, data) clean_data["cluster"] = cluster_f.clean() # Check that contexts belong to user or is public for context_field_code in ["master_context_id", "worker_context_id"]: c = ContextDefinition.objects.get( id=clean_data["cluster"][context_field_code] ) if not c.public and c.owner != request.user: messages.error( request, "Context with id '%s' is not public and does not belong to\ you" % (c.id) ) return _show_cluster_def(request, data) #if c.is_encrypted: # messages.error(request, "Context '%s' is encrypted!" % (c.name)) # return _show_cluster_def(request, data) # elastiq section elastiq_f = ElastiqForm(data.get("elastiq", {})) if not elastiq_f.is_valid(): for label, msg in elastiq_f.errors_list: messages.error(request, "elastiq %s: %s" % (label, msg)) return _show_cluster_def(request, data) clean_data["elastiq"] = elastiq_f.clean() # EC2 section ec2_f = EC2Form(data.get("ec2", {})) if not ec2_f.is_valid(): for label, msg in ec2_f.errors_list: messages.error(request, "EC2 %s: %s" % (label, msg)) return _show_cluster_def(request, data) clean_data["ec2"] = ec2_f.clean() # Quota section quota_f = QuotaForm(data.get("quota", {})) if not quota_f.is_valid(): for label, msg in quota_f.errors_list: messages.error(request, "Quota %s: %s" % (label, msg)) return _show_cluster_def(request, data) clean_data["quota"] = quota_f.clean() return clean_data
def update_cart(self): """ Cart update form action: update quantities, remove marked for removal items """ post_dict = parser.parse(self.request.POST.urlencode()) self.request.cart.update_quantities(post_dict["item_quantity"]) if "remove_item" in post_dict: self.request.cart.remove_items(list(post_dict["remove_item"]))
def decafDemo(request): post_dict = parser.parse(request.POST.urlencode()) log_to_terminal('Processing Demo Images Now', post_dict['socketid']) if 'src' in post_dict and post_dict['src']!='': file_name = basename(urlparse(post_dict['src']).path) redis_obj.publish(decaf_channel_name, json.dumps({'dir': DEMO_IMAGE_PATH, 'flag': '2', 'socketid': post_dict['socketid'], 'demo':'True', 'filename': file_name})) else: redis_obj.publish(decaf_channel_name, json.dumps({'dir': DEMO_IMAGE_PATH, 'flag': '2', 'socketid': post_dict['socketid']}))
def get_artifact_handler(): from querystring_parser import parser query_string = request.query_string.decode("utf-8") request_dict = parser.parse(query_string, normalized=True) run_id = request_dict.get("run_id") or request_dict.get("run_uuid") run = _get_tracking_store().get_run(run_id) return _send_artifact(_get_artifact_repo(run), request_dict["path"])
def process_new_election_information_from_webform(request): """Processes the user's input from the WebForm page for creating a new election""" logger.info( f"[elections/election_management.py process_new_election_information_from_webform()] " f"request.POST={request.POST}") (render_value, error_message, context) = verify_access_logged_user_and_create_context( request, TAB_STRING) if context is None: request.session[ERROR_MESSAGE_KEY] = '{}<br>'.format(error_message) return render_value updated_elections_information = parser.parse(request.POST.urlencode()) if ELECTION_TYPE_POST_KEY in updated_elections_information and \ ELECTION_DATE_POST_KEY in updated_elections_information and \ ELECTION_TIME_POST_KEY in updated_elections_information and \ ELECTION_WEBSURVEY_LINK_POST_KEY in updated_elections_information and \ NOM_NAME_POST_KEY in updated_elections_information and \ NOM_POSITION_POST_KEY in updated_elections_information and \ NOM_SPEECH_POST_KEY in updated_elections_information and \ NOM_FACEBOOK_POST_KEY in updated_elections_information and \ NOM_LINKEDIN_POST_KEY in updated_elections_information and \ NOM_EMAIL_POST_KEY in updated_elections_information and \ NOM_DISCORD_USERNAME_POST_KEY in updated_elections_information: logger.info( f"[elections/election_management.py " f"process_new_election_information_from_webform()] " f"updated_elections_information={updated_elections_information}") election = _create_new_election_from_webform( updated_elections_information) if there_are_multiple_entries(updated_elections_information, NOM_NAME_POST_KEY): _save_nominees_for_new_election_from_webform( election, updated_elections_information) else: success, nominee, error_message = _validate_and_return_new_nominee( updated_elections_information[NOM_NAME_POST_KEY], updated_elections_information[NOM_POSITION_POST_KEY], updated_elections_information[NOM_SPEECH_POST_KEY], updated_elections_information[NOM_FACEBOOK_POST_KEY], updated_elections_information[NOM_LINKEDIN_POST_KEY], updated_elections_information[NOM_EMAIL_POST_KEY], updated_elections_information[NOM_DISCORD_USERNAME_POST_KEY], 0) if success and nominee is not None: nominee.nomination_page = election nominee.save() logger.info( "[elections/election_management.py save_new_nominee()] saved user " f"full_name={nominee.name} position_index={nominee.position_index}" f" facebook_link={nominee.facebook} linkedin_link={nominee.linked_in} " f"email_address={nominee.email} discord_username={nominee.discord}" ) return HttpResponseRedirect( f'{settings.URL_ROOT}elections/{election.slug}/') else: request.session[ERROR_MESSAGE_KEY] = '{}<br>'.format( "Not all necessary fields were detected in your input") return render_value
def user_availability_create(request): #try: data = parser.parse(request.POST.urlencode()) user = get_current_user(request) if data.get('availability'): availability_raw = data['availability'][''] else: availability_raw = [] availability = [] if not isinstance(availability_raw, list): availability_raw = [availability_raw] for available_raw_item in availability_raw: avail_data = string.split(available_raw_item, '_') if avail_data[1] == 'morning': time_array = [16, 18, 20, 22] elif avail_data[1] == 'afternoon': time_array = [24, 26, 28, 30] elif avail_data[1] == 'night': time_array = [32, 34, 36, 38] elif avail_data[1] == 'late-night': time_array = [40, 42, 44] elif avail_data[1] == 'night-owl': time_array = [46, 48, 50] for time in time_array: availability.append({ 'day': int(avail_data[0]), 'start_time': int(time), 'end_time': int(time) + 2 }) availability_array = [] for available_slot in availability: newSlot = available_slot newSlot['start_time'] -= user.timezone * 2 newSlot['end_time'] -= user.timezone * 2 if newSlot['start_time'] < 0: newSlot['day'] = newSlot['day'] - 1 newSlot['start_time'] = 48 + newSlot['start_time'] if newSlot['end_time'] < 0: newSlot['end_time'] = 48 + newSlot['end_time'] if newSlot['start_time'] > 47: newSlot['start_time'] = newSlot['start_time'] - 48 newSlot['day'] = newSlot['day'] + 1 if newSlot['end_time'] > 47: newSlot['end_time'] = newSlot['end_time'] - 48 if newSlot['day'] < 0: newSlot['day'] = 7 + newSlot['day'] if newSlot['day'] > 6: newSlot['day'] = newSlot['day'] - 7 avail_document = UserAvailability( day = newSlot['day'], start_time = newSlot['start_time'], end_time = newSlot['end_time'] ) availability_array.append(avail_document) user.availability = availability_array user.save() return HttpResponse(model_encode(user.availability_tz()), mimetype="application/json")
def course_list_ajax_handler(request): request_dict = querystring_parser.parse(request.GET.urlencode()) draw = int(request_dict['draw']) start = request_dict['start'] length = request_dict['length'] search = request_dict.get('search', None) objects = Course.objects.all() if search and search['value']: objects = objects.filter(Q(name__icontains=search['value']) | Q(school__name__icontains=search['value']) | Q(department__school__name__icontains=search['value'])) order_fields = [] for order_index in request_dict['order']: order_field = None order = request_dict['order'][order_index] if order['column'] == 1: order_field = 'updated_at' elif order['column'] == 2: order_field = 'file_count' elif order['column'] == 3: order_field = 'thank_count' if order['dir'] == 'desc': order_field = '-' + order_field if order_field: order_fields.append(order_field) objects = objects.order_by(*order_fields) displayRecords = objects.count() if start > 0: objects = objects[start:] objects = objects[:length] row_data = [ [ course_json(course), calendar.timegm(course.updated_at.timetuple()), course.file_count, course.thank_count, course.school.name if course.school else course.department.school.name, ] for course in objects ] response_dict = { 'draw': draw, 'recordsTotal': Course.objects.count(), 'recordsFiltered': displayRecords, 'data': row_data } return HttpResponse(json.dumps(response_dict), mimetype='application/json')
def education(request): ''' account route for ''' c = Clinician.objects.filter(user__email=request.session['email']).first() class EducationForm(forms.Form): type_choices = ( ('post-graduate', 'post-graduate'), ('high-school', 'high-school'), ('undergraduate', 'undergraduate'), ('masters', 'masters'), ('other', 'other'), ) year = forms.CharField(min_length=4, max_length=4) title = forms.CharField(max_length=30) description = forms.CharField(max_length=100, widget=forms.widgets.Textarea()) type = forms.ChoiceField(choices=type_choices) edu_form = EducationForm() if request.method == "POST": if request.POST['btn-type'] == "1": edu_form = EducationForm(request.POST, request.FILES) if edu_form.is_valid(): e = edu_form.cleaned_data c.education.append(e) elif request.POST['btn-type'] == "0": post_params = parser.parse(request.POST.urlencode()) fields = ['year', 'title', 'type', 'description'] cur_all = [] if any([_ in post_params for _ in fields]): for _ in fields: if not isinstance(post_params[_], list): post_params[_] = [post_params[_]] cur_all = zip(post_params[fields[0]]) for field in fields[1:]: cur_all = zip(*cur_all) cur_all = zip(*cur_all, post_params[field]) cur_all = list(cur_all) records = [] for r in cur_all: x = {} [x.update({j: i}) for i, j in zip(r, fields)] records.append(x) c.education = records c.save() edu_records = c.education return render(request, 'clinician/dashboard/account/education.html.j2', context={ 'title': "Account - Education & Training", 'clinician': c, 'education': edu_form, 'edu_records': edu_records, })
def create_specified_election(request): logger.info(f"[administration/views.py create_specified_election()] request.POST={request.POST}") groups = list(request.user.groups.values_list('name', flat=True)) context = { 'tab': 'administration', 'authenticated': request.user.is_authenticated, 'Exec': ('Exec' in groups), 'ElectionOfficer': ('ElectionOfficer' in groups), 'Staff': request.user.is_staff, 'Username': request.user.username } if not ('ElectionOfficer' in groups or request.user.is_staff or 'Exec' in groups): return render(request, 'administration/invalid_access.html', context) if ELECTION_TYPE_KEY in request.POST and ELECTION_DATE_POST_KEY in request.POST and \ ELECTION_TIME_POST_KEY in request.POST and ELECTION_WEBSURVEY_LINK_KEY in request.POST: logger.info("[administration/views.py create_specified_election()] creating new election") nomination_page = get_nomination_page(request.POST) post_dict = parser.parse(request.POST.urlencode()) logger.info(f"[administration/views.py create_specified_election()] post_dict={post_dict}") logger.info( "[administration/views.py create_specified_election()] " f"full_name={post_dict[NOM_NAME_KEY]} len = {len(post_dict[NOM_NAME_KEY])}" ) position_index = 0 if (len(post_dict[NOM_NAME_KEY][0]) > 1): save_nominees(post_dict, nomination_page, position_index) position_index += 1 else: full_name = post_dict[NOM_NAME_KEY] exec_position = post_dict[NOM_POSITION_KEY] speech = post_dict[NOM_SPEECH_KEY] facebook_link = post_dict[NOM_FACEBOOK_KEY] linkedin_link = post_dict[NOM_LINKEDIN_KEY] email_address = post_dict[NOM_EMAIL_KEY] discord_username = post_dict[NOM_DISCORD_USERNAME_KEY] if full_name != 'NONE': logger.info( "[administration/views.py create_specified_election()] " f"saved user full_name={full_name} exec_position={exec_position} " f"speech={speech} facebook_link={facebook_link} linkedin_link=" f"{linkedin_link} email_address={email_address} discord_username" f"={discord_username}" ) nom = Nominee( nomination_page=nomination_page, name=full_name, exec_position=exec_position, speech=speech, facebook=facebook_link, linked_in=linkedin_link, email=email_address, discord=discord_username, position=position_index ) nom.save() return render(request, 'administration/create_election.html', context) return render(request, 'administration/create_election.html', context)
def route_calculator(request): parsed_dict = parser.parse(request.POST.urlencode()) p = parsed_dict['points'] t = parsed_dict['tolerance'] r = Routecalc(p,t) return HttpResponse(r, mimetype='application/json')
def demoVqa(request): post_dict = parser.parse(request.POST.urlencode()) try: socketid = post_dict['socketid'] if 'src' not in post_dict: data = {'error': 'NoImageSelected'} else: data = {'info': 'Processing'} result_prefix_url = post_dict['src'] imgname = basename(urlparse(result_prefix_url).path) image_path = os.path.join(conf.LOCAL_DEMO_VQA_PIC_DIR, imgname) # folder_name = str(shortuuid.uuid()) # For now lets use socket id folder_name = socketid save_dir = os.path.join(conf.PIC_DIR, folder_name) feat_folder = os.path.join(save_dir, 'results') # Make the new directory based on time if not os.path.exists(save_dir): os.makedirs(save_dir) os.makedirs(feat_folder) feat_path = os.path.join(feat_folder, imgname) print image_path print result_prefix_url log_to_terminal('Processing image...', socketid) # This is for running it locally ie on Godel vqa_wrapper_feat(image_path, socketid, result_prefix_url, feat_path) # This is for posting it on Redis - ie to Rosenblatt # classify_wrapper_redis(image_path, post_dict['socketid'], result_path) data = {'info': 'Completed'} try: request.META['REMOTE_ADDR'] except: print str(traceback.format_exc()) log_to_terminal(str(traceback.format_exc()), socketid) response = JSONResponse(data, {}, response_mimetype(request)) response['Content-Disposition'] = 'inline; filename=files.json' return response except: data = {'result': str(traceback.format_exc())} response = JSONResponse(data, {}, response_mimetype(request)) response['Content-Disposition'] = 'inline; filename=files.json' log_to_terminal(str(traceback.format_exc()), socketid) return response
def handleQuestion(request): post_dict = parser.parse(request.POST.urlencode()) try: """ try: if post_dict['pass'] != 'Passphrase#123!': response = JSONResponse({'passworderror': 'Error'}, {}, response_mimetype(request)) response['Content-Disposition'] = 'inline; filename=files.json' return response except Exception as e: return 'Error' """ socketid = post_dict['socketid'] imageid = post_dict['imageid'] result_url = post_dict['src'] question = post_dict['qn'] data = {'info': 'Processing'} # data_row = VQA_Question.create(socketid = socketid, questionText = question, imageName = imageid, imagePath = '/home/ubuntu/cloudcv/cloudcv17/cloudcv17/media/pictures/cloudcv') imgname = basename(urlparse(result_url).path) # folder_name = str(shortuuid.uuid()) # For now lets use socket id feat_folder = os.path.join(conf.PIC_DIR, socketid, 'results') feat_path = os.path.join(feat_folder, imgname) # Throw exception here if file does not exist # if not os.path.exists(feat_path): log_to_terminal(feat_path, socketid) log_to_terminal(result_url, socketid) log_to_terminal('Processing image...', socketid) # This is for running it locally ie on Godel vqa_wrapper_answer(feat_path, question, socketid, imageid) # This is for posting it on Redis - ie to Rosenblatt # classify_wrapper_redis(image_path, post_dict['socketid'], result_path) data = {'info': 'Completed', 'questionid': "1"} response = JSONResponse(data, {}, response_mimetype(request)) response['Content-Disposition'] = 'inline; filename=files.json' return response except Exception as e: data = {'result': str(traceback.format_exc(e))} response = JSONResponse(data, {}, response_mimetype(request)) response['Content-Disposition'] = 'inline; filename=files.json' return response
def embedded_template_requesting(request): try: hsclient = HSClient(api_key=API_KEY) except NoAuthMethod: return render(request, 'hellosign/embedded_template_requesting.html', { 'error_message': "Please update your settings to include a " + "value for API_KEY.", }) if request.method == 'POST': try: signers = [] post_dict = parser.parse(request.POST.urlencode()) template_id = post_dict["template"] for key, value in post_dict["signerRole"].iteritems(): if value: value['role_name'] = key signers.append(value) ccs = [] for key, value in post_dict["ccRole"].iteritems(): # if value: ccs.append({'role_name': key, 'email_address': value}) custom_fields = [] for key, value in post_dict["cf"].iteritems(): if value: custom_fields.append({key: value}) sr = hsclient.send_signature_request_embedded_with_rf(test_mode = "1", client_id = CLIENT_ID, reusable_form_id = template_id, title = "NDA with Acme Co.", subject = "The NDA we talked about", message = "Please sign this NDA and then we" + " can discuss more. Let me know if you have any questions.", signing_redirect_url = "", signers = signers, ccs = ccs, custom_fields = custom_fields) embedded = hsclient.get_embeded_object(sr.signatures[0]["signature_id"]) # TODO: need some more validations here # except KeyError: # return render(request, 'hellosign/embedded_template_requesting.html', { # 'error_message': "Please enter both your name and email.", # }) except NoAuthMethod: pass else: return render(request, 'hellosign/embedded_template_requesting.html', { 'client_id': CLIENT_ID, 'sign_url': str(embedded.sign_url) }) else: rf_list = hsclient.get_reusable_form_list() templates = "["; for rf in rf_list: # print json.dumps(rf.json_data) templates = templates + json.dumps(rf.json_data) + ", " templates = templates + "]" return render(request, 'hellosign/embedded_template_requesting.html', { 'templates': templates })
def get_bookmarks(request): # sync the client-side bookmarks with the server side bookmarks # update the server-side bookmarks and return the new list try: bookmark_dict = parser.parse(request.POST.urlencode())["bookmarks"] except: bookmark_dict = {} try: # loop through the list from the client # if user, bm_name, and bm_state match then skip # otherwise, add to the db for key, bookmark in bookmark_dict.items(): try: Bookmark.objects.get(user=request.user, name=bookmark["name"], url_hash=bookmark["hash"]) except Bookmark.DoesNotExist: new_bookmark = Bookmark(user=request.user, name=bookmark["name"], url_hash=bookmark["hash"]) new_bookmark.save() except: continue # grab all bookmarks belonging to this user # serialize bookmarks into 'name', 'hash' objects and return simplejson dump content = [] bookmark_list = Bookmark.objects.filter(user=request.user) for bookmark in bookmark_list: sharing_groups = [group.name for group in bookmark.sharing_groups.all()] content.append( { "uid": bookmark.uid, "name": bookmark.name, "hash": bookmark.url_hash, "sharing_groups": sharing_groups, } ) shared_bookmarks = Bookmark.objects.shared_with_user(request.user) for bookmark in shared_bookmarks: if bookmark not in bookmark_list: username = bookmark.user.username actual_name = bookmark.user.first_name + " " + bookmark.user.last_name content.append( { "uid": bookmark.uid, "name": bookmark.name, "hash": bookmark.url_hash, "shared": True, "shared_by_username": username, "shared_by_name": actual_name, } ) return HttpResponse(simplejson.dumps(content), mimetype="application/json", status=200) except: return HttpResponse(status=304)
def handleQuestion(request): post_dict = parser.parse(request.POST.urlencode()) try: socketid = post_dict['socketid'] imageid = post_dict['imageid'] result_url = post_dict['src'] question = post_dict['qn'] data = {'info': 'Processing'} imgname = basename(urlparse(result_url).path) # folder_name = str(shortuuid.uuid()) # For now lets use socket id feat_folder = os.path.join(conf.PIC_DIR, socketid, 'results') feat_path = os.path.join(feat_folder, imgname) # Throw exception here if file does not exist # if not os.path.exists(feat_path): log_to_terminal(feat_path, socketid) log_to_terminal(result_url, socketid) log_to_terminal('Processing image...', socketid) # This is for running it locally ie on Godel vqa_wrapper_answer(feat_path, question, socketid, imageid) # This is for posting it on Redis - ie to Rosenblatt #classify_wrapper_redis(image_path, post_dict['socketid'], result_path) data = {'info': 'Completed'} # try: # client_address = request.META['REMOTE_ADDR'] # log_file.write('Demo classify request from IP:'+client_address) # log_file.close() # # except Exception as e: # log_file.write('Exception when finding client ip:'+str(traceback.format_exc())+'\n'); # log_file.close() response = JSONResponse(data, {}, response_mimetype(request)) response['Content-Disposition'] = 'inline; filename=files.json' return response except Exception as e: data = {'result': str(traceback.format_exc())} response = JSONResponse(data, {}, response_mimetype(request)) response['Content-Disposition'] = 'inline; filename=files.json' return response
def parse_params(params): def listify(params): if isinstance(params, dict): if params.keys() == range(len(params)): params = params.values() else: for k, v in params.iteritems(): params[k] = listify(v) if isinstance(params, list): return [listify(param) for param in params] return params return listify(querystring_parser.parse('&'.join(params)))
def request_callback(request): payload = querystring_parser.parse(request.body) if validate_zeropush_payload(payload): response_body = { "sent_count": len(payload["device_tokens"][""]), "inactive_tokens": [], "unregistered_tokens": [], } headers = {"Content-Type": "application/json"} return (200, headers, json.dumps(response_body)) else: return (400, {}, {})
def show_new(request, cluster_id=None): if cluster_id is None: # New context return _show_cluster_def(request, {}) else: # # Cloning existing context # # Try to fetch context from the database. Don't filter on current user. try: cluster = ClusterDefinition.objects.get( id=cluster_id ) # Unmarshal data try: if cluster.is_encrypted: post_dict = parser.parse( unicode(request.POST.urlencode()).encode("utf-8") ) try: cluster.decrypt( post_dict['password'] ) except ClusterDefinition.CryptographyError: messages.error(request, 'Wrong password') return _show_cluster_def(request, {}) cluster_data_dict = json.loads( cluster.data ) except ValueError: messages.error(request, 'Corrupted cluster data: creating a new cluster.') return _show_cluster_def(request, {}) # Mangle the dictionary to suit form structure cluster_data_dict['cluster'] = { 'master_context_id': cluster.master_context_id, 'worker_context_id': cluster.worker_context_id, 'id': cluster.id, 'name': cluster.name, 'description': cluster.description } if 'passphrase' in cluster_data_dict: cluster_data_dict['cluster']['passphrase'] = cluster_data_dict['passphrase'] del cluster_data_dict['passphrase'] #return uncache_response(HttpResponse(json.dumps(cluster_data_dict, indent=2), content_type="text/plain")) return _show_cluster_def(request, cluster_data_dict) except ClusterDefinition.DoesNotExist, Http404: messages.error(request, 'The specified cluster does not exist: creating a new cluster instead.') return _show_cluster_def(request, {})
def login_view(request): try: user = User.objects.get(fb_id=request.POST['id']) if user.check_access_token(request.POST['access_token']): user.backend = 'mongoengine.django.auth.MongoEngineBackend' login(request, user) request.session.set_expiry(60 * 60 * 24 * 30) # 1 month timeout return HttpResponse(model_encode(user), mimetype="application/json") else: return HttpResponse('login failed') except DoesNotExist: data = parser.parse(request.POST.urlencode()) if not data.get('email'): data['email'] = str(data['id']) + '@facebook.com' if not data.get('username'): data['username'] = data['email'] if not data.get('bio'): data['bio'] = '' user = User( username = data['email'], name = data['name'], first_name = data['first_name'], last_name = data['last_name'], fb_id = data['id'], fb_access_token = data['access_token'], fb_link = data['link'], fb_username = data['username'], bio = data['bio'], email = data['email'], timezone = data['timezone'], locale = data['locale'], gender = data['gender'] ) imgdata = urllib2.urlopen('https://graph.facebook.com/'+str(data['id'])+'/picture?type=large') image_type,width,height = getImageInfo(imgdata) user.picture_width = width user.picture_height = height user.save() if data.get('hometown'): user.add_property('location', data['hometown'], 650) if data.get('location'): user.add_property('location', data['location'], 750) for i in range(len(data['work'])): work_score = 200 * 3/(min(i,4)+1) user.add_property('work', data['work'][i], work_score) for i in range(len(data['education'])): user.add_property('school', data['education'][i], 680*(4+i)/4) for i in range(len(data['inspirational_people'])): user.add_property('inspirational_person', data['inspirational_people'][i], 200) user.set_default_featured_properties() return HttpResponse(model_encode(user), mimetype="application/json")
def pass1(request): try: data = {'success': 'false'} if request.method == 'POST': post_dict = parser.parse(request.POST.urlencode()) print post_dict if post_dict['pass'] == 'Passphrase#123!': data = {'success': 'true'} response = JSONResponse(data, {}, response_mimetype(request)) response['Content-Disposition'] = 'inline; filename=files.json' return response except: # print str(traceback.format_exc()) data['error'] = 'Error' response = JSONResponse(data, {}, response_mimetype(request)) return response
def view_cart(request): # Get cart cart = cart_from_request(request) error = None # If is a submit, pre validate the data if request.method == 'POST' and isinstance(cart, list) and len(cart) > 0: # Get cart options post = parser.parse(request.POST.urlencode()) coupon_code = post.get('coupon_code') coupon = Coupon.objects.get_valid_coupon(coupon_code) page_datetime = datetime.fromtimestamp(post.get('datetime')) allow_sub_detail = post.get('allow_sub_detail') allow_sub_detail = {} if allow_sub_detail is None else allow_sub_detail # Validate delivery choices delivery_choices = post.get('delivery_choices') delivery_options = validate_delivery_choices(delivery_choices) if delivery_options == False: error = 'The delivery option you select is no longer valid' elif len(coupon_code) > 0 and coupon == False: error = 'The coupon you have entered is no longer valid' else: request.session['checkout_package'] = { 'cart': cart, 'delivery_choices': delivery_choices, 'allow_sub_detail': allow_sub_detail, 'coupon_code': coupon_code, 'page_datetime': page_datetime, } response = HttpResponseRedirect(reverse('order:checkout')) return response store_items = cart_to_store_items(cart) template = loader.get_template('order/cart.html') context = RequestContext(request, { 'store_items': store_items, 'datetime': datetime.now(), 'error': error, }) return HttpResponse(template.render(context))