def obj_create(self, bundle, **kwargs): try: return super(ApiKeyResource, self).obj_create(bundle, **kwargs) except IntegrityError: raise BadRequest("Api key label must be unique")
def obj_delete(self, bundle, **kwargs): raise BadRequest("can not delete project")
def delete_list(self, request=None, **kwargs): raise BadRequest('not allowed')
def check_required_params(bundle, required): for r in required: try: bundle.data[r] except KeyError: raise BadRequest(_(u'Please enter your %s') % r)
def obj_create(self, bundle, **kwargs): self_register = SettingProperties \ .get_bool(constants.OPPIA_ALLOW_SELF_REGISTRATION, settings.OPPIA_ALLOW_SELF_REGISTRATION) if not self_register: raise BadRequest(_(u'Registration is disabled on this server.')) required = [ 'username', 'password', 'passwordagain', 'firstname', 'lastname' ] check_required_params(bundle, required) data = { 'username': bundle.data['username'], 'password': bundle.data['password'], 'password_again': bundle.data['passwordagain'], 'email': bundle.data['email'] if 'email' in bundle.data else '', 'first_name': bundle.data['firstname'], 'last_name': bundle.data['lastname'], } custom_fields = CustomField.objects.all() for custom_field in custom_fields: try: data[custom_field.id] = bundle.data[custom_field.id] except KeyError: pass rf = RegisterForm(data) if not rf.is_valid(): error_str = "" for key, value in rf.errors.items(): for error in value: error_str += error + "\n" raise BadRequest(error_str) else: username = bundle.data['username'] password = bundle.data['password'] email = bundle.data['email'] if 'email' in bundle.data else '', first_name = bundle.data['firstname'] last_name = bundle.data['lastname'] try: bundle.obj = User.objects.create_user(username=username, password=password) bundle.obj.first_name = first_name bundle.obj.email = email bundle.obj.last_name = last_name bundle.obj.save() except IntegrityError: raise BadRequest( _(u'Username "%s" already in use, please select another' % username)) self.process_register_base_profile(bundle) self.process_register_custom_fields(bundle) u = authenticate(username=username, password=password) if u is not None and u.is_active: login(bundle.request, u) # Add to tracker tracker = Tracker() tracker.user = u tracker.type = 'register' tracker.ip = bundle.request.META.get('REMOTE_ADDR', DEFAULT_IP_ADDRESS) tracker.agent = bundle.request.META.get('HTTP_USER_AGENT', 'unknown') tracker.save() key = ApiKey.objects.get(user=u) bundle.data['api_key'] = key.key del bundle.data['passwordagain'] del bundle.data['password'] del bundle.data['firstname'] del bundle.data['lastname'] return bundle
def obj_create(self, bundle, **kwargs): """ Handles creating Samples through the API. :param bundle: Bundle containing the information to create the Sample. :type bundle: Tastypie Bundle object. :returns: Bundle object. :raises BadRequest: If filedata is not provided or creation fails. """ analyst = bundle.request.user.username type_ = bundle.data.get('upload_type', None) if not type_: raise BadRequest('Must provide an upload type.') if type_ not in ('metadata', 'file'): raise BadRequest('Not a valid upload type.') if type_ == 'metadata': filename = bundle.data.get('filename', None) md5 = bundle.data.get('md5', None) password = None filedata = None elif type_ == 'file': md5 = None password = bundle.data.get('password', None) file_ = bundle.data.get('filedata', None) if not file_: raise BadRequest("Upload type of 'file' but no file uploaded.") filedata = file_ filename = None campaign = bundle.data.get('campaign', None) confidence = bundle.data.get('confidence', None) source = bundle.data.get('source', None) method = bundle.data.get('method', None) reference = bundle.data.get('reference', None) file_format = bundle.data.get('file_format', None) parent_md5 = bundle.data.get('parent_md5', None) bucket_list = bundle.data.get('bucket_list', None) ticket = bundle.data.get('ticket', None) sample_md5 = handle_uploaded_file(filedata, source, method, reference, file_format, password, user=analyst, campaign=campaign, confidence=confidence, parent_md5=parent_md5, filename=filename, md5=md5, bucket_list=bucket_list, ticket=ticket, is_return_only_md5=False) if len(sample_md5) > 0: if not sample_md5[0].get('success') and 'message' in sample_md5[0]: raise BadRequest(sample_md5[0]['message']) return bundle else: raise BadRequest('Unable to create sample from data.')
def is_authenticated(self, request, **kwargs): # check for the environment variable to skip auth if not SNAP_AUTHENTICATION: return True try: # get the Authorization header auth = request.META['HTTP_AUTHORIZATION'].strip().split(' ') auth_snap = auth[0].lower() # get the request verb and path request_method = request.META['REQUEST_METHOD'] request_path = request.path if 'signature' in auth[1]: # get signature info all in Authorization header auth_parts = auth[1].strip().split(',') auth_params = dict() for part in auth_parts: items = part.replace('"', '').split('=') auth_params[items[0]] = items[1] # add the parts to proper varibles for signature try: key = auth_params['key'] except: key = auth_params[ 'snap_key'] # deprecated(2013-05-10) kept for compatibility try: signature = auth_params['signature'] except: signature = auth_params[ 'snap_signature'] # deprecated(2013-05-10) kept for compatibility try: x_snap_nonce = auth_params['nonce'] except: x_snap_nonce = auth_params[ 'snap_nonce'] # deprecated(2013-05-10) kept for compatibility try: x_snap_timestamp = auth_params['timestamp'] except: # deprecated(2013-05-10) kept for compatibility if 'snap_timestamp' in auth_params: x_snap_timestamp = auth_params['snap_timestamp'] else: x_snap_date = auth_params['snap_date'] try: secret = settings.APIKEY[key] except KeyError: return False # deprecated but kept for compatibility else: # api signature info in multiple headers key = auth[1].split(':')[0] try: secret = settings.APIKEY[key] except KeyError: return False signature = auth[1].split(':')[1] x_snap_nonce = request.META['HTTP_X_SNAP_NONCE'] x_snap_date = request.META['HTTP_X_SNAP_DATE'] # create the raw string to hash try: raw = key + request_method + request_path + x_snap_nonce + x_snap_timestamp except: raw = key + request_method + request_path + x_snap_nonce + x_snap_date # calculate the hash hashed = hmac.new(secret, raw, hashlib.sha1) # calculate time differences try: x_snap_datetime = datetime.fromtimestamp(int(x_snap_timestamp), tz=pytz.utc) except: x_snap_datetime = dateutil.parser.parse( x_snap_date) # parse the date header now_datetime = datetime.now(pytz.utc) # current time on server pre_now_datetime = now_datetime + timedelta( 0, -300) # 5 minutes in the past post_now_datetime = now_datetime + timedelta( 0, 300) # 5 minutes in the future # if all conditions pass, return true if auth_snap == 'snap' and (x_snap_datetime >= pre_now_datetime and x_snap_datetime <= post_now_datetime ) and signature == hashed.hexdigest(): return True else: return False # we failed, return false except KeyError as e: raise BadRequest('Missing authentication param: ' + str(e))
def obj_update(self, bundle, **kwargs): data = bundle.data user = bundle.request.user contact = bundle.obj = Contact.objects.all_them().get(id=data['id']) if not (bundle.request.user.is_superuser or bundle.request.user.is_staff or bundle.request.user == bundle.obj.creator): raise BadRequest("It appears you cannot edit this contact") if len(contact_is_valid(bundle)) > 0: raise BadRequest(contact_is_valid(bundle)) MAX_EMAIL_ADDRESSES = 1 for field in ['first_name', 'last_name', 'middle_name', 'dob']: if field in data: setattr(contact, field, data[field]) if 'hidden' in data: hidden = data['hidden'] contact.hidden = hidden if hidden: #deactive all the emails map(lambda email: email.deactive(), contact.emails.all()) else: for email in contact.emails.all(): if EmailAddress.objects.filter(content=email).count() > 1: #there is more than one email raise BadRequest( "Another contact has been created with this email. To un-delete this contact please change its email address or remove it." ) else: email.deprecated = None email.save() if 'emails' in data: if type(data['emails']) == list: # Just a list of email addresses try: for i in range( min(MAX_EMAIL_ADDRESSES, len(data['emails']))): if data['emails'][i].strip() != '': try: email = contact.emails.all()[i] email.content = data['emails'][i] email.save() except: #the email is new and not edited eaddr = EmailAddress(content=data['emails'][i]) eaddr.save() contact.emails.add(eaddr) except Exception as e: logger.exception("Failure to update email %s" % e) elif type(data['emails']) == dict: # By ID # Should be of the form { '123' : '*****@*****.**' } try: contactEmails = contact.emails.all() for pk, address in data['emails'].iteritems(): email = EmailAddress.objects.get(id=int(pk)) assert (email in contactEmails) email.content = address email.save() except Exception as e: logger.exception("Failure to update email %s" % e) if 'notes' in data: notes = [Note(content=x) for x in data['notes'] if x] for note in notes: note.save() contact.notes = notes if 'phone' in data: phone_numbers = [Phone(content=x) for x in data['phone'] if x] for number in phone_numbers: number.save() contact.phone_numbers = phone_numbers if 'titles' in data: titles = [Title(content=x) for x in data['titles'] if x] for title in titles: title.save() contact.titles = titles if 'addresses' in data: addresses = [Address(content=x) for x in data['addresses'] if x] for address in addresses: address.save() contact.addresses = addresses contact.save() return bundle
def obj_create(self, bundle, **kwargs): request_method = bundle.request.method.lower() if request_method == 'put': raise BadRequest("Invalid primary key provided.") return super(CampaignResource, self).obj_create(bundle, **kwargs)
def post_validate_files(self, request, **kwargs): """Receive a CBHFlowFile ID which points at an uploaded SDF, XLSX or CDX file Perform validation on the file's contents and then send the resultant elasticsearch index to the validate mult batch method More about data import information can be found in the wiki """ automapped_structure = False deserialized = self.deserialize(request, request.body, format=request.META.get( 'CONTENT_TYPE', 'application/json')) deserialized = self.alter_deserialized_detail_data( request, deserialized) bundle = self.build_bundle(data=dict_strip_unicode_keys(deserialized), request=request) self.authorized_create_detail(self.get_object_list(bundle.request), bundle) file_name = bundle.data['file_name'] session_key = request.COOKIES[settings.SESSION_COOKIE_NAME] correct_file = CBHFlowFile.objects.get(identifier="%s-%s" % (session_key, file_name)) self.get_file_name_test(correct_file.file) multiple_batch = CBHCompoundMultipleBatch.objects.create( project=bundle.data["project"], uploaded_file=correct_file) if (correct_file.extension in (".xls", ".xlsx")): # we need to know which column contains structural info - this needs to be defined on the mapping page and passed here # read in the specified structural column df = None try: df = pd.read_excel(correct_file.file) multiple_batch.batch_count = df.shape[0] multiple_batch.save() bundle.data["total_processing"] = multiple_batch.batch_count except IndexError: raise BadRequest("no_headers") bundledata = bundle.data creator_user = request.user cfr = ChemRegCustomFieldConfigResource() jsondata = json.loads( cfr.get_detail( request, pk=bundledata["project"].custom_field_config_id).content) schemaform = [ field["edit_form"]["form"][0] for field in jsondata["project_data_fields"] ] if (correct_file.extension == ".sdf"): # read in the filepalter_batch_data_after_save(self, batch_list, python_file_obj, request, multi_batch): self.preprocess_sdf_file(correct_file.file, request) multiple_batch.batch_count = get_sdf_count(correct_file) multiple_batch.save() bundle.data["total_processing"] = multiple_batch.batch_count skinconfig = SkinningConfig.objects.all()[0] if bundle.data["project"].project_type.show_compounds: if correct_file.extension not in (".xls", ".xlsx", ".sdf", ".cdxml"): raise BadRequest("file_format_error") if multiple_batch.batch_count > skinconfig.max_non_chem_upload_size: raise BadRequest("file_too_large") else: if correct_file.extension not in ( ".xls", ".xlsx", ): raise BadRequest("file_format_error") if multiple_batch.batch_count > skinconfig.max_chem_upload_size: raise BadRequest("file_too_large") bundle.data["current_batch"] = multiple_batch.pk bundle.data["multiplebatch"] = multiple_batch.pk if multiple_batch.batch_count < 100: process_file_request(multiple_batch, bundledata, creator_user, schemaform, correct_file, session_key) return self.create_response(request, bundle, response_class=http.HttpAccepted) id = async ("cbh_chem_api.tasks.process_file_request", multiple_batch, bundledata, creator_user, schemaform, correct_file, session_key) request.session["mb_inprogress_%d" % multiple_batch.id] = id bundle.data = bundledata return self.create_response(request, bundle, response_class=http.HttpAccepted)
def obj_create(self, bundle, request=None, **kwargs): form = forms.CreateFace(bundle.data) if form.is_valid(): return models.Face.submit(**form.cleaned_data) else: raise BadRequest("I just don't know what went wrong")
def alter_hydrated_object_list(self, request, object_list): for obj in object_list: if obj not in request.user.projects.all(): raise BadRequest("User is not a member of project with id %s" % obj.project_id) return object_list
def do_export(self, request, serializer, export_objects): """Export""" if len(export_objects) is not 1: raise BadRequest('PDF export can only be done on a single item.') return serializer.serialize(export_objects[0]), None
def obj_delete(self, bundle, **kwargs): """Raises a BadRequest if the :class:`~invoicing.models.InvoiceBase` is not in a deletable state""" try: super(InvoiceBaseResource, self).obj_delete(bundle, **kwargs) except NotDeletableInvoice as e: raise BadRequest(e)
def stream_response_from_statement(self, request, stmt, count_stmt, output_filename, field_hash={}, param_hash={}, rowproxy_generator=None, is_for_detail=False, downloadID=None, title_function=None, use_caching=None, meta=None ): ''' Execute the SQL stmt provided and stream the results to the response: Caching (for json responses only): resources will be cached if: - self.use_caching is True and use_caching is not False and limit > 0 - limit == 0 and use_caching is True ''' DEBUG_STREAMING = False or logger.isEnabledFor(logging.DEBUG) logger.info('stream_response_from_statement: %r', self._meta.resource_name ) temp_param_hash = param_hash.copy() if 'schema' in temp_param_hash: del temp_param_hash['schema'] if DEBUG_STREAMING: logger.info('stream_response_from_statement: %r, %r', self._meta.resource_name,temp_param_hash) limit = param_hash.get('limit', 25) try: limit = int(limit) except Exception: raise BadRequest( "Invalid limit '%s' provided. Please provide a positive integer." % limit) if limit > 0: stmt = stmt.limit(limit) if is_for_detail: limit = 1 offset = param_hash.get('offset', 0 ) try: offset = int(offset) except Exception: raise BadRequest( "Invalid offset '%s' provided. Please provide a positive integer." % offset) if offset < 0: offset = -offset stmt = stmt.offset(offset) conn = get_engine().connect() try: logger.debug('offset: %s, limit: %s', offset, limit) if DEBUG_STREAMING: logger.info('stmt: %s, param_hash: %s ', str(stmt.compile( dialect=postgresql.dialect(), compile_kwargs={"literal_binds": True})), temp_param_hash) logger.info( 'count stmt %s', str(count_stmt.compile( dialect=postgresql.dialect(), compile_kwargs={"literal_binds": True}))) content_type = self.get_accept_content_type(request, format=param_hash.get('format', None)) logger.debug('---- content_type: %r, hash: %r', content_type, temp_param_hash) result = None if content_type == JSON_MIMETYPE: logger.info( 'streaming json, use_caching: %r, self.use_cache: %r, limit: %d, %r', use_caching, self.use_cache, limit, is_for_detail) if ((self.use_cache is True and use_caching is not False) and ( use_caching is True or limit > 0)): cache_hit = self._cached_resultproxy( conn, stmt, count_stmt, param_hash, limit, offset) if cache_hit: logger.info('cache hit: %r', output_filename) result = cache_hit['cached_result'] count = cache_hit['count'] else: # cache routine should always return a cache object logger.error('error, cache not set: execute stmt') count = conn.execute(count_stmt).scalar() result = conn.execute(stmt) logger.info('result: %r', [x for x in result]) logger.info('====count: %d====', count) else: logger.info('not cached, execute count stmt...') count = conn.execute(count_stmt).scalar() logger.info('excuted count stmt: %d', count) result = conn.execute(stmt) logger.info('excuted stmt') if not meta: meta = { 'limit': limit, 'offset': offset, 'total_count': count } else: temp = { 'limit': limit, 'offset': offset, 'total_count': count } temp.update(meta) meta = temp if rowproxy_generator: result = rowproxy_generator(result) logger.info('is for detail: %r, count: %r', is_for_detail, count) if is_for_detail and count == 0: logger.info('detail not found') conn.close() return HttpResponse(status=404) if DEBUG_STREAMING: logger.info('json setup done, meta: %r', meta) else: # not json logger.info('excute stmt') result = conn.execute(stmt) logger.info('excuted stmt') if rowproxy_generator: result = rowproxy_generator(result) # FIXME: test this for generators other than json generator result = closing_iterator_wrapper(result, conn.close) return self.stream_response_from_cursor(request, result, output_filename, field_hash=field_hash, param_hash=param_hash, is_for_detail=is_for_detail, downloadID=downloadID, title_function=title_function, meta=meta) except Exception, e: logger.exception('on stream response') raise e
def obj_get_list(self, request=None, **kwargs): raise BadRequest('not allowed')
def to_file(self, data, options=None): """ Respond with filedata instead of metadata. :param data: The data to be worked on. :type data: dict for multiple objects, :class:`tastypie.bundle.Bundle` for a single object. :param options: Options to alter how this serializer works. :type options: dict :returns: :class:`django.http.HttpResponse`, :class:`tastypie.exceptions.BadRequest` """ get_file = options.get('file', None) file_format = options.get('file_format', 'raw') response = None zipfile = None if get_file: files = [] if hasattr(data, 'obj'): if hasattr(data.obj, 'filedata'): filename = data.obj.md5 filedata = data.obj.filedata.read() if filedata: filedata = self._format_data(filedata, file_format) files.append([filename, filedata]) elif hasattr(data.obj, 'screenshot'): filename = "%s.png" % data.obj.md5 filedata = data.obj.screenshot.read() if filedata: files.append([filename, filedata]) elif 'objects' in data: try: objs = data['objects'] for obj_ in objs: if hasattr(obj_.obj, 'filedata'): filename = obj_.obj.md5 filedata = obj_.obj.filedata.read() if filedata: filedata = self._format_data( filedata, file_format) files.append([filename, filedata]) elif hasattr(obj_.obj, 'screenshot'): filename = "%s.png" % data.obj.md5 filedata = data.obj.screenshot.read() if filedata: files.append([filename, filedata]) except: pass try: if len(files): zipfile = create_zip(files) response = HttpResponse( zipfile, content_type="application/octet-stream; charset=utf-8") response[ 'Content-Disposition'] = 'attachment; filename="results.zip"' else: response = BadRequest("No files found!") except Exception, e: response = BadRequest(str(e))
def obj_create(self, bundle, **kwargs): """ Handles creating Emails through the API. :param bundle: Bundle containing the information to create the Campaign. :type bundle: Tastypie Bundle object. :returns: Bundle object. :raises BadRequest: If a type_ is not provided or creation fails. """ analyst = bundle.request.user.username type_ = bundle.data.get('upload_type', None) if not type_: raise BadRequest('You must specify the upload type.') elif type_ not in ('eml', 'msg', 'raw', 'yaml', 'fields'): raise BadRequest('Unknown or unsupported upload type.') # Remove this so it doesn't get included with the fields upload del bundle.data['upload_type'] result = None # Extract common information source = bundle.data.get('source', None) reference = bundle.data.get('reference', None) campaign = bundle.data.get('campaign', None) confidence = bundle.data.get('confidence', None) if type_ == 'eml': file_ = bundle.data.get('filedata', None) if not file_: raise BadRequest('No file uploaded.') filedata = file_.read() result = handle_eml(filedata, source, reference, analyst, 'Upload', campaign, confidence) if type_ == 'msg': raw_email = bundle.data.get('filedata', None) password = bundle.data.get('password', None) result = handle_msg(raw_email, source, reference, analyst, 'Upload', password, campaign, confidence) if type_ == 'raw': raw_email = bundle.data.get('filedata', None) result = handle_pasted_eml(raw_email, source, reference, analyst, 'Upload', campaign, confidence) if type_ == 'yaml': yaml_data = bundle.data.get('filedata', None) email_id = bundle.data.get('email_id', None) save_unsupported = bundle.data.get('save_unsupported', False) result = handle_yaml(yaml_data, source, reference, analyst, 'Upload', email_id, save_unsupported, campaign, confidence) if type_ == 'fields': fields = bundle.data result = handle_email_fields(fields, analyst, 'Upload') if not result: raise BadRequest('No upload type found.') if not result['status']: raise BadRequest(result['reason']) else: return bundle
def obj_get_list(self, bundle, **kwargs): smiles = kwargs.pop('smiles', None) std_inchi_key = kwargs.pop('standard_inchi_key', None) chembl_id = kwargs.pop('chembl_id', None) if not smiles and not std_inchi_key and not chembl_id: raise BadRequest("Structure or identifier required.") if not smiles: try: if chembl_id: mol_filters = {'chembl_id': chembl_id} else: mol_filters = { 'compoundstructures__standard_inchi_key': std_inchi_key } objects = self.apply_filters( bundle.request, mol_filters).values_list( 'compoundstructures__canonical_smiles', flat=True) stringified_kwargs = ', '.join( ["%s=%s" % (k, v) for k, v in mol_filters.items()]) length = len(objects) if length <= 0: raise ObjectDoesNotExist( "Couldn't find an instance of '%s' which matched '%s'." % (self._meta.object_class.__name__, stringified_kwargs)) elif length > 1: raise MultipleObjectsReturned( "More than '%s' matched '%s'." % (self._meta.object_class.__name__, stringified_kwargs)) smiles = objects[0] if not smiles: raise ObjectDoesNotExist( "No chemical structure defined for identifier {0}". format(chembl_id or std_inchi_key)) except TypeError as e: if e.message.startswith('Related Field has invalid lookup:'): raise BadRequest(e.message) else: raise e except ValueError: raise BadRequest( "Invalid resource lookup data provided (mismatched type).") if not isinstance(smiles, basestring): raise BadRequest( "Substructure can only handle a single chemical query identified by SMILES, " "InChiKey or ChEMBL ID.") elif len(smiles) < minimal_substructure_length: raise BadRequest( "Structure %s is too short. Minimal structure length is %s" % (smiles, minimal_substructure_length)) mols = CompoundMols.objects.with_substructure(smiles).defer( 'molfile').values_list('molecule_id', flat=True) filters = {} standard_filters, distinct = self.build_filters(filters=kwargs) filters.update(standard_filters) only = filters.get('only') if only: del filters['only'] if isinstance(only, basestring): only = only.split(',') only = list(set(list_flatten(only))) objects = self.get_object_list( bundle.request).filter(pk__in=mols).filter(**filters) if only: objects = objects.only(*[ self.fields[field].attribute for field in only if field in self.fields ]) if distinct: objects = objects.distinct() return self.authorized_read_list(objects, bundle)
def obj_create(self, bundle, **kwargs): data = { 'username': bundle.data['username'], 'password': bundle.data['password'], 'password_again': bundle.data['passwordagain'], 'email': bundle.data['email'], 'phoneno': bundle.data['phoneno'], 'current_working_city': bundle.data['current_working_city'], 'currently_working_facility': bundle.data['currently_working_facility'], 'current_place_employment': bundle.data['current_place_employment'], 'staff_type': bundle.data['staff_type'], 'nurhi_sponsor_training': bundle.data['nurhi_sponsor_training'], 'highest_education_level': bundle.data['highest_education_level'], 'religion': bundle.data['religion'], 'sex': bundle.data['sex'], 'age': bundle.data['age'], 'first_name': bundle.data['firstname'], 'last_name': bundle.data['lastname'], } rf = RegisterForm(data) if not rf.is_valid(): str = "" for key, value in rf.errors.items(): for error in value: str += error + "\n" raise BadRequest(str) else: username = bundle.data['username'] password = bundle.data['password'] email = bundle.data['email'] phoneno = bundle.data['phoneno'] current_working_city = bundle.data['current_working_city'] currently_working_facility = bundle.data[ 'currently_working_facility'] current_place_employment = bundle.data['current_place_employment'] staff_type = bundle.data['staff_type'] nurhi_sponsor_training = bundle.data['nurhi_sponsor_training'] highest_education_level = bundle.data['highest_education_level'] religion = bundle.data['religion'] sex = bundle.data['sex'] age = bundle.data['age'] first_name = bundle.data['firstname'] last_name = bundle.data['lastname'] try: bundle.obj = CustomUser.objects.create_user( username, email, password) bundle.obj.first_name = first_name bundle.obj.last_name = last_name bundle.obj.phoneno = phoneno bundle.obj.current_working_city = current_working_city bundle.obj.currently_working_facility = currently_working_facility bundle.obj.current_place_employment = current_place_employment bundle.obj.staff_type = staff_type bundle.obj.nurhi_sponsor_training = nurhi_sponsor_training bundle.obj.highest_education_level = highest_education_level bundle.obj.religion = religion bundle.obj.sex = sex bundle.obj.age = age bundle.obj.save() u = authenticate(username=username, password=password) if u is not None: if u.is_active: login(bundle.request, u) key = ApiKey.objects.get(user=u) bundle.data['api_key'] = key.key except IntegrityError: # TODO translation raise BadRequest( _(u'Username "%s" already in use, please select another' % username)) del bundle.data['passwordagain'] del bundle.data['password'] del bundle.data['firstname'] del bundle.data['lastname'] del bundle.data['phoneno'] del bundle.data['current_working_city'] del bundle.data['currently_working_facility'] del bundle.data['current_place_employment'] del bundle.data['staff_type'] del bundle.data['nurhi_sponsor_training'] del bundle.data['highest_education_level'] del bundle.data['religion'] del bundle.data['sex'] del bundle.data['age'] return bundle
def get_object_list(self, request): raise BadRequest(_("Please specify a user"))
def obj_create(self, bundle, **kwargs): raise BadRequest("Operation not permitted")
def post_list(self, request, **kwargs): """Attempt to redirect to QGIS Server Style management. A post method should have the following field: name: Slug name of style title: Title of style style: the style file uploaded Also, should have kwargs: layername or layer__name: The layer name associated with the style or layer__id: The layer id associated with the style """ from geonode.qgis_server.views import qml_style # Extract layer name information POST = request.POST FILES = request.FILES layername = POST.get('layername') or POST.get('layer__name') if not layername: layer_id = POST.get('layer__id') layer = Layer.objects.get(id=layer_id) layername = layer.name # move style file FILES['qml'] = FILES['style'] response = qml_style(request, layername) if isinstance(response, TemplateResponse): if response.status_code == 201: obj = QGISServerStyle.objects.get( layer_styles__layer__name=layername, name=POST['name']) updated_bundle = self.build_bundle(obj=obj, request=request) location = self.get_resource_uri(updated_bundle) if not self._meta.always_return_data: return http.HttpCreated(location=location) else: updated_bundle = self.full_dehydrate(updated_bundle) updated_bundle = self.alter_detail_data_to_serialize( request, updated_bundle) return self.create_response( request, updated_bundle, response_class=http.HttpCreated, location=location) else: context = response.context_data # Check form valid style_upload_form = context['style_upload_form'] if not style_upload_form.is_valid(): raise BadRequest(style_upload_form.errors.as_text()) alert_message = context['alert_message'] raise BadRequest(alert_message) elif isinstance(response, HttpResponse): response_class = None if response.status_code == 403: response_class = http.HttpForbidden return self.error_response(request, response.content, response_class=response_class)
def rollback(self, bundles): raise BadRequest("Operation not permitted")
def obj_create(self, bundle, **kwargs): if not settings.OPPIA_ALLOW_SELF_REGISTRATION: raise BadRequest(_(u'Registration is disabled on this server.')) required = [ 'username', 'password', 'passwordagain', 'email', 'firstname', 'lastname' ] for r in required: try: bundle.data[r] except KeyError: raise BadRequest(_(u'Please enter your %s') % r) data = { 'username': bundle.data['username'], 'password': bundle.data['password'], 'password_again': bundle.data['passwordagain'], 'email': bundle.data['email'], 'first_name': bundle.data['firstname'], 'last_name': bundle.data['lastname'], } rf = RegisterForm(data) if not rf.is_valid(): str = "" for key, value in rf.errors.items(): for error in value: str += error + "\n" raise BadRequest(str) else: username = bundle.data['username'] password = bundle.data['password'] email = bundle.data['email'] first_name = bundle.data['firstname'] last_name = bundle.data['lastname'] try: bundle.obj = User.objects.create_user(username, email, password) bundle.obj.first_name = first_name bundle.obj.last_name = last_name bundle.obj.save() user_profile = UserProfile() user_profile.user = bundle.obj if 'jobtitle' in bundle.data: user_profile.job_title = bundle.data['jobtitle'] if 'organisation' in bundle.data: user_profile.organisation = bundle.data['organisation'] if 'phoneno' in bundle.data: user_profile.phone_number = bundle.data['phoneno'] user_profile.save() u = authenticate(username=username, password=password) if u is not None: if u.is_active: login(bundle.request, u) # Add to tracker tracker = Tracker() tracker.user = u tracker.type = 'register' tracker.ip = bundle.request.META.get( 'REMOTE_ADDR', '0.0.0.0') tracker.agent = bundle.request.META.get( 'HTTP_USER_AGENT', 'unknown') tracker.save() key = ApiKey.objects.get(user=u) bundle.data['api_key'] = key.key except IntegrityError: raise BadRequest( _(u'Username "%s" already in use, please select another' % username)) del bundle.data['passwordagain'] del bundle.data['password'] del bundle.data['firstname'] del bundle.data['lastname'] return bundle
def obj_delete_list_for_update(self, bundle, **kwargs): raise BadRequest("Operation not permitted")
def obj_get(self, bundle, **kwargs): domain = kwargs['domain'] location_id = kwargs['pk'] if not user_can_access_location_id(domain, bundle.request.couch_user, location_id): raise BadRequest(LOCATION_ACCESS_DENIED) return get_object_or_not_exist(Location, location_id, domain)
def stream_response_from_cursor( self,request,result,output_filename, field_hash={}, param_hash={}, is_for_detail=False, downloadID=None, title_function=None, meta=None): try: list_brackets = LIST_BRACKETS if ( param_hash.get(HTTP_PARAM_DATA_INTERCHANGE, False) or request.GET.get(HTTP_PARAM_RAW_LISTS, False)): list_brackets = None content_type = self.get_accept_content_type( request, format=param_hash.get('format', None)) logger.debug('content_type: %s',content_type) image_keys = [key for key,field in field_hash.items() if field.get('display_type', None) == 'image'] ordered_keys = sorted(field_hash.keys(), key=lambda x: field_hash[x].get('ordinal',key)) list_fields = [ key for (key,field) in field_hash.items() if( field.get('json_field_type',None) == 'fields.ListField' or field.get('linked_field_type',None) == 'fields.ListField' or field.get('data_type', None) == 'list' ) ] value_templates = {key:field['value_template'] for key,field in field_hash.items() if field.get('value_template', None)} data = cursor_generator( result,ordered_keys,list_fields=list_fields, value_templates=value_templates) response = None if content_type == JSON_MIMETYPE: response = StreamingHttpResponse( ChunkIterWrapper( json_generator( image_generator(data, image_keys, request), meta, is_for_detail=is_for_detail))) response['Content-Type'] = content_type elif( content_type == XLS_MIMETYPE or content_type == XLSX_MIMETYPE ): data = { 'data': data } response = get_xls_response( data, output_filename, request=request, title_function=title_function, image_keys=image_keys, list_brackets=list_brackets) elif content_type == SDF_MIMETYPE: response = StreamingHttpResponse( ChunkIterWrapper( sdf_generator( image_generator(data,image_keys, request), title_function=title_function)), content_type=content_type) response['Content-Disposition'] = \ 'attachment; filename=%s.sdf' % output_filename elif content_type == CSV_MIMETYPE: response = StreamingHttpResponse( ChunkIterWrapper( csv_generator( image_generator(data, image_keys, request), title_function=title_function, list_brackets=list_brackets)), content_type=content_type) response['Content-Disposition'] = \ 'attachment; filename=%s.csv' % output_filename else: msg = 'unknown content_type: %r' % content_type raise BadRequest(msg) return response except Exception, e: logger.exception('on stream response') raise e
def patch_list(self, request, **kwargs): raise BadRequest('not allowed')
def move_document(self, request: HttpRequest, **kwargs): self.method_check(request, allowed=['post']) self.is_authenticated(request) self.throttle_check(request) def save_modified(modified): [obj.save() for obj in modified] def extract_from_tree(cur_node: Document): # Are we in the middle of a chain ? if hasattr(cur_node, 'prev_node'): print("ET:PATH1") # Linked List Extraction! prev_node = cur_node.prev_node prev_node.next_node = cur_node.next_node cur_node.next_node = None return [cur_node, prev_node] # We are the head of list, are we a child ? elif hasattr(cur_node, 'parent_node'): print("ET:PATH2") # We are! Substitute parent's first_child with next_node parent_node = cur_node.parent_node parent_node.first_child = cur_node.next_node cur_node.next_node = None return [cur_node, parent_node] # We are the root of the tree! elif hasattr(cur_node, 'binder_root'): print("ET:PATH3") binder = cur_node.binder_root binder.first_child = cur_node.next_node cur_node.next_node = None return [cur_node, binder] # Already out of tree # should not happen else: print("ET:PATH4") return [] def insert_after(node: Document, target: Document): node.next_node = target.next_node target.next_node = node return [target, node] def insert_before(node: Document, target: Document): # Target is in the middle of the list if hasattr(target, 'prev_node'): print("IB:PATH1") prev_node = target.prev_node prev_node.next_node = node node.next_node = target return [prev_node, node] # Target is at the head of the list, are we a child? elif hasattr(target, 'parent_node'): print("IB:PATH2") parent_node = target.parent_node parent_node.first_child = node node.next_node = target return [parent_node, node] # Target is root of the tree else: print("IB:PATH3") binder = target.binder_root binder.first_child = node node.next_node = target return [binder, node] def insert_inside(node: Document, target: Document): node.next_node = target.first_child target.first_child = node return [target, node] with transaction.atomic(): node_to_move = Document.objects.get(id=kwargs.pop('pk')) modified = extract_from_tree(node_to_move) save_modified(modified) body = json.loads(request.body.decode('utf-8')) if 'before' in body: target_node = Document.objects.get(id=body['before']) modified = insert_before(node_to_move, target_node) elif 'after' in body: target_node = Document.objects.get(id=body['after']) modified = insert_after(node_to_move, target_node) elif 'inside' in body: target_node = Document.objects.get(id=body['inside']) modified = insert_inside(node_to_move, target_node) else: raise BadRequest() save_modified(modified) return self.create_response(request, {})