) response = client.put( endpoint, data={ "id": response.json()["id"], "first_name": "Test2", "email_address": "*****@*****.**", }, content_type="application/json", ) assert response.status_code == 202 assert response.json() == {"id": "123"} mock_raise_bad_id_error = Mock(put=Mock( side_effect=ObjectDoesNotExist("bad id"))) @patch("polaris.sep10.utils.check_auth", mock_check_auth_success) @patch("polaris.sep12.customer.rci", mock_raise_bad_id_error) def test_bad_existing_id(client): response = client.put( endpoint, data={ "id": "notanid", "first_name": "Test2", "email_address": "*****@*****.**", }, content_type="application/json", ) assert response.status_code == 404
def _get(cls, fname): "Wrapper for getting database field" if _GA(cls, "_is_deleted"): raise ObjectDoesNotExist("Cannot access %s: Hosting object was already deleted." % fname) return _GA(cls, fieldname)
def parse_req(self, request, rqdata, **kw): """Parse the incoming HttpRequest and translate it into keyword arguments to be used by :meth:`setup`. The `mt` url param is parsed only when needed. Usually it is not needed because the `master_class` is constant and known per actor. But there are exceptions: - `master` is `ContentType` - `master` is some abstract model - `master` is not a subclass of Model, e.g. :class:`lino_xl.lib.polls.models.AnswersByResponse`, a virtual table which defines :meth:`get_row_by_pk <lino.core.actors.Actor.get_row_by_pk>`. """ # logger.info("20120723 %s.parse_req() %s", self.actor, rqdata) #~ rh = self.ah master = kw.get('master', self.actor.master) if master is not None: if not isinstance(master, type): raise Exception("20150216 not a type: %r" % master) if settings.SITE.is_installed('contenttypes'): from django.contrib.contenttypes.models import ContentType if issubclass(master, models.Model) and (master is ContentType or master._meta.abstract): mt = rqdata.get(constants.URL_PARAM_MASTER_TYPE) try: master = kw['master'] = ContentType.objects.get( pk=mt).model_class() except ContentType.DoesNotExist: pass # master is None if 'master_instance' not in kw: pk = rqdata.get(constants.URL_PARAM_MASTER_PK, None) #~ print '20100406a', self.actor,URL_PARAM_MASTER_PK,"=",pk #~ if pk in ('', '-99999'): if pk == '': pk = None if pk is None: kw['master_instance'] = None else: mi = self.actor.get_master_instance(self, master, pk) if mi is None: raise ObjectDoesNotExist( "Invalid master key {0} for {1}".format( pk, self.actor)) kw['master_instance'] = mi # ~ print '20100212', self #, kw['master_instance'] #~ print '20100406b', self.actor,kw if settings.SITE.use_filterRow: exclude = dict() for f in self.ah.store.fields: if f.field: filterOption = rqdata.get('filter[%s_filterOption]' % f.field.name) if filterOption == 'empty': kw[f.field.name + "__isnull"] = True elif filterOption == 'notempty': kw[f.field.name + "__isnull"] = False else: filterValue = rqdata.get('filter[%s]' % f.field.name) if filterValue: if not filterOption: filterOption = 'contains' if filterOption == 'contains': kw[f.field.name + "__icontains"] = filterValue elif filterOption == 'doesnotcontain': exclude[f.field.name + "__icontains"] = filterValue else: print("unknown filterOption %r" % filterOption) if len(exclude): kw.update(exclude=exclude) if settings.SITE.use_gridfilters: filter = rqdata.get(constants.URL_PARAM_GRIDFILTER, None) if filter is not None: filter = json.loads(filter) kw['gridfilters'] = [constants.dict2kw(flt) for flt in filter] kw = ActionRequest.parse_req(self, request, rqdata, **kw) #~ kw.update(self.report.known_values) #~ for fieldname, default in self.report.known_values.items(): #~ v = request.REQUEST.get(fieldname,None) #~ if v is not None: #~ kw[fieldname] = v quick_search = rqdata.get(constants.URL_PARAM_FILTER, None) if quick_search: kw.update(quick_search=quick_search) sort = rqdata.get(constants.URL_PARAM_SORT, None) if sort: sortfld = self.actor.get_data_elem(sort) if isinstance(sortfld, FakeField): sort = sortfld.sortable_by # sort might be None when user asked to sort a virtual # field without sortable_by. else: sort = [sort] if sort is not None: def si(k): if k[0] == '-': return k[1:] else: return '-' + k sort_dir = rqdata.get(constants.URL_PARAM_SORTDIR, 'ASC') if sort_dir == 'DESC': sort = [si(k) for k in sort] # sort = ['-' + k for k in sort] # print("20171123", sort) kw.update(order_by=sort) try: offset = rqdata.get(constants.URL_PARAM_START, None) if offset: kw.update(offset=int(offset)) limit = rqdata.get(constants.URL_PARAM_LIMIT, self.actor.preview_limit) if limit: kw.update(limit=int(limit)) except ValueError: # Example: invalid literal for int() with base 10: # 'fdpkvcnrfdybhur' raise SuspiciousOperation("Invalid value for limit or offset") kw = self.actor.parse_req(request, rqdata, **kw) # print("20171123 %s.parse_req() --> %s" % (self, kw)) return kw
def delete_resource_file(pk, filename_or_id, user, delete_logical_file=True): """ Deletes an individual file from a HydroShare resource. If the file does not exist, the Exceptions.NotFound exception is raised. REST URL: DELETE /resource/{pid}/files/{filename} Parameters: :param pk: The unique HydroShare identifier for the resource from which the file will be deleted :param filename_or_id: Name of the file or id of the file to be deleted from the resource :param user: requesting user :param delete_logical_file: If True then if the ResourceFile object to be deleted is part of a LogicalFile object then the LogicalFile object will be deleted which deletes all associated ResourceFile objects and file type metadata objects. :returns: The name or id of the file which was deleted Return Type: string or integer Raises: Exceptions.NotAuthorized - The user is not authorized Exceptions.NotFound - The resource identified by pid does not exist or the file identified by file does not exist Exception.ServiceFailure - The service is unable to process the request Note: This does not handle immutability as previously intended. """ resource = utils.get_resource_by_shortkey(pk) res_cls = resource.__class__ for f in ResourceFile.objects.filter(object_id=resource.id): if filter_condition(filename_or_id, f): if delete_logical_file: if f.logical_file is not None: # logical_delete() calls this function (delete_resource_file()) # to delete each of its contained ResourceFile objects f.logical_file.logical_delete(user) return filename_or_id signals.pre_delete_file_from_resource.send(sender=res_cls, file=f, resource=resource, user=user) file_name = delete_resource_file_only(resource, f) # This presumes that the file is no longer in django delete_format_metadata_after_delete_file(resource, file_name) signals.post_delete_file_from_resource.send(sender=res_cls, resource=resource) # set to private if necessary -- AFTER post_delete_file handling resource.update_public_and_discoverable( ) # set to False if necessary # generate bag utils.resource_modified(resource, user, overwrite_bag=False) return filename_or_id # if execution gets here, file was not found raise ObjectDoesNotExist( str.format("resource {}, file {} not found", resource.short_id, filename_or_id))
try: award = cls.objects.get(key=key) with transaction.atomic(): award.name = name award.requirement = requirement award.level = level award.organization = organization award.begin_time = begin_time award.end_time = end_time award.is_active = is_active award.is_attached = is_attached award.save() except ObjectDoesNotExist, err: info = 'Award:change award key not exist:' print info, err raise ObjectDoesNotExist('%s %s' % (info, err)) except ValueError, err: info = 'Award:change error in save award:' print info, err raise ValueError('%s %s' % (info, err)) else: return True class ApplicationManager(models.Manager): """申请管理类""" def all_not_deleted(self): """重载all""" return super(models.Manager, self).filter(is_deleted=False)
def project(request): current_user = request.user profile = Profile.objects.get(user=current_user) message = "Thank you for voting" try: project = Project.objects.get(id=project_id) except Project.DoesNotExist: raise ObjectDoesNotExist() total_design = 0 total_usability = 0 total_creativity = 0 total_content = 0 overall_score = 0 ratings = Rating.objects.filter(project=project_id) if len(ratings) > 0: users = len(ratings) else: users = 1 design = list( Rating.objects.filter(project=project_id).values_list('design', flat=True)) usability = list( Rating.objects.filter(project=project_id).values_list('usability', flat=True)) creativity = list( Rating.objects.filter(project=project_id).values_list('creativity', flat=True)) content = list( Rating.objects.filter(project=project_id).values_list('content', flat=True)) total_design = sum(design) / users total_usability = sum(usability) / users total_creativity = sum(creativity) / users total_content = sum(content) / users overall_score = (total_design + total_content + total_usability + total_creativity) / 4 project.design = total_design project.usability = total_usability project.creativity = total_creativity project.content = total_content project.overall = overall_score project.save() if request.method == 'POST': form = RatingForm(request.POST, request.FILES) if form.is_valid(): rating = form.save(commit=False) rating.project = project rating.profile = profile if not Rating.objects.filter(profile=profile, project=project).exists(): rating.overall_score = (rating.design + rating.usability + rating.creativity + rating.content) / 4 rating.save() else: form = RatingForm() return render( request, "project.html", { "project": project, "profile": profile, "ratings": ratings, "form": form, "message": message, 'total_design': total_design, 'total_usability': total_usability, 'total_creativity': total_creativity, 'total_content': total_content })
def restore(self, *args, **kwargs): if not self.pk: raise ObjectDoesNotExist('Object must be created before it can be restored') self.deleted = None return super(StoreDeleted, self).save(*args, **kwargs)
def get_dataset_file_path(self, user_id, dataset_id): dataset = LabelDataset.objects.filter(id=dataset_id).first() if dataset is None: raise ObjectDoesNotExist() return dataset.file_path
def related_reference_obj(self) -> BaseUuidModel: if not self.related_action_item: raise ObjectDoesNotExist( f"Related ActionItem does not exist for {self.action_identifier}." ) return self.related_action_item.reference_obj
def delete(self, *args, **kwargs): if not self.pk: raise ObjectDoesNotExist('Object must be created before it can be deleted') self.deleted = timezone.now() return super(StoreDeleted, self).save(*args, **kwargs)
def monitor_volumes_for(provider_id, print_logs=False): """ Run the set of tasks related to monitoring sizes for a provider. Optionally, provide a list of usernames to monitor While debugging, print_logs=True can be very helpful. start_date and end_date allow you to search a 'non-standard' window of time. """ from service.driver import get_account_driver from core.models import Identity if print_logs: console_handler = _init_stdout_logging() provider = Provider.objects.get(id=provider_id) account_driver = get_account_driver(provider) # Non-End dated volumes on this provider db_volumes = Volume.objects.filter(only_current_source(), instance_source__provider=provider) all_volumes = account_driver.admin_driver.list_all_volumes(timeout=30) seen_volumes = [] for cloud_volume in all_volumes: try: core_volume = convert_esh_volume(cloud_volume, provider_uuid=provider.uuid) seen_volumes.append(core_volume) except ObjectDoesNotExist: tenant_id = cloud_volume.extra['object'][ 'os-vol-tenant-attr:tenant_id'] tenant = account_driver.get_project_by_id(tenant_id) tenant_name = tenant.name if tenant else tenant_id try: if not tenant: celery_logger.warn( "Warning: tenant_id %s found on volume %s, but did not exist from the account driver perspective.", tenant_id, cloud_volume) raise ObjectDoesNotExist() identity = Identity.objects.filter(contains_credential( 'ex_project_name', tenant_name), provider=provider).first() if not identity: raise ObjectDoesNotExist() core_volume = convert_esh_volume(cloud_volume, provider.uuid, identity.uuid, identity.created_by) except ObjectDoesNotExist: celery_logger.info( "Skipping Volume %s - No Identity for: Provider:%s + Project Name:%s" % (cloud_volume.id, provider, tenant_name)) pass now_time = timezone.now() needs_end_date = [ volume for volume in db_volumes if volume not in seen_volumes ] for volume in needs_end_date: celery_logger.debug("End dating inactive volume: %s" % volume) volume.end_date = now_time volume.save() if print_logs: _exit_stdout_logging(console_handler) for vol in seen_volumes: vol.esh = None return [vol.instance_source.identifier for vol in seen_volumes]
def obj_get_list(self, bundle, **kwargs): smiles = kwargs.pop('smiles', None) try: std_inchi_key = kwargs.pop('standard_inchi_key', None) chembl_id = kwargs.pop('chembl_id', None) if not smiles and not std_inchi_key and not chembl_id: raise BadRequest("Structure or identifier required.") similarity = kwargs.pop('similarity') molfile = None if not smiles: if chembl_id: mol_filters = {'chembl_id': chembl_id} else: mol_filters = {'compoundstructures__standard_inchi_key': std_inchi_key} try: objects = self.apply_filters(bundle.request, mol_filters).values_list( 'compoundstructures__molfile', flat=True) stringified_kwargs = ', '.join(["%s=%s" % (k, v) for k, v in list(mol_filters.items())]) length = len(objects) if length <= 0: raise ObjectDoesNotExist("Couldn't find an instance of '%s' which matched '%s'." % (self._meta.object_class.__name__, stringified_kwargs)) elif length > 1: raise MultipleObjectsReturned("More than '%s' matched '%s'." % (self._meta.object_class.__name__, stringified_kwargs)) molfile = objects[0] if not molfile: raise ObjectDoesNotExist( "No chemical structure defined for identifier {0}".format(chembl_id or std_inchi_key)) except TypeError as e: if e.message.startswith('Related Field has invalid lookup:'): raise BadRequest(e.message) else: raise e except ValueError: raise BadRequest("Invalid resource lookup data provided (mismatched type).") if not molfile and not isinstance(smiles, str): raise BadRequest("Similarity can only handle a single chemical structure identified by SMILES, " "InChiKey or ChEMBL ID.") sim_query_string = smiles or molfile similar_molregnos = get_similar_molregnos(sim_query_string, similarity/100.0) # Use percentage to present similarity values similar_molregnos = [(molregno_i, sim_i.item()*100) for molregno_i, sim_i in similar_molregnos] similarity_map = None try: similarity_map = OrderedDict(sorted(similar_molregnos, key=lambda x: x[1])) except DatabaseError as e: self._handle_database_error(e, bundle.request, {'smiles': smiles}) filters = {} standard_filters, distinct = self.build_filters(filters=kwargs) filters.update(standard_filters) try: only = filters.get('only') if only: del filters['only'] if isinstance(only, str): only = only.split(',') only = list(set(list_flatten(only))) objects = self.get_object_list(bundle.request).filter(pk__in=[sim[0] for sim in similar_molregnos])\ .filter(**filters) if chembl_id: objects = objects.exclude(chembl_id=chembl_id) if only: objects = objects.only( *[self.fields[field].attribute for field in only if field in self.fields and field != 'similarity']) except ValueError: raise BadRequest("Invalid resource lookup data provided (mismatched type).") if distinct: objects = objects.distinct() objects = self.apply_sorting(objects, similarity_map, options=kwargs) return self.authorized_read_list(objects, bundle) except: import traceback traceback.print_exc()
def get_project_env(self, project_name): try: project_env = ProjectEnv.objects.get(project_name=project_name) except ObjectDoesNotExist: raise ObjectDoesNotExist(project_name + "Does not exist") return project_env
def _get_foreign(cls, fname): "Wrapper for returning foreignkey fields" if _GA(cls, "_is_deleted"): raise ObjectDoesNotExist("Cannot access %s: Hosting object was already deleted." % fname) return _GA(cls, fieldname)
def _check_attributes_exist(attribute_ids, attribute_lookup): """Check whether all required attribute ids are valid""" for attribute_id in attribute_ids: if attribute_id not in attribute_lookup: raise ObjectDoesNotExist('No attribute "{}"'.format(attribute_id))
def update_news(self, tmp_co=None, verbose=False): try: tmp_co = tmp_co if tmp_co else self.host.get_co() except Exception as err: if "423" in str(err): if verbose: print("NGGroup {} does not exist anymore...".format( self.name)) return [] raise ConnectionError('Could not connect to the server, please ' 'check your connection ({}).'.format(err)) try: # Getting infos & data from the given group _, _, first, last, _ = tmp_co.group(self.name) # Sending a OVER command to get last_nb posts _, overviews = tmp_co.over((first, last)) except Exception as err: if "411" in str(err) or "423" in str(err): if verbose: print("NGGroup {} does not exist anymore...".format( self.name)) return [] raise ConnectionError('Could not connect to the server, please ' 'check your connection ({}).'.format(err)) already_existing_news = [] new_news_list = [] for id, over in overviews: hash = hash_over(self.host.host, over) try: NGNews.objects.get(hash=hash) continue except ObjectDoesNotExist: pass try: if verbose: print_msg('news', properly_decode_header(over['subject'])) try: n = NGNews.objects.get(groups__in=[self], message_id=over['message-id']) if verbose: print_exists() if n.hash != hash: n.delete() raise ObjectDoesNotExist() # Check if the already existing news is in self group if self not in n.groups.all(): already_existing_news.append(n) except ObjectDoesNotExist: date = parse_nntp_date(over['date']) _, info = tmp_co.body(over['message-id']) contents = '' for line in info[2]: contents += get_decoded(line) + '\n' try: nn = NGNews() nn.hash = hash nn.subject = properly_decode_header(over['subject']) nn.contents = contents nn.email_from = properly_decode_header(over['from']) nn.message_id = over['message-id'] nn.date = date nn.lines = over[':lines'] nn.xref = over['xref'] nn.references = over['references'] nn.father = get_father(over['references']) nn.bytes = over[':bytes'] new_news_list.append(nn) if verbose: print_done() except Exception as e: print_fail(e) except Exception as e: print_fail(e) for n in already_existing_news: n.add_group(self) for n in new_news_list: n.save() n.groups.add(self) n.save() self.nb_news += len(already_existing_news) + len(new_news_list) self.nb_topics = NGNews.objects.filter(groups__id=self.id, father='').count() self.save() return new_news_list
def delete_resource_file(pk, filename_or_id, user, delete_logical_file=True): """ Deletes an individual file from a HydroShare resource. If the file does not exist, the Exceptions.NotFound exception is raised. REST URL: DELETE /resource/{pid}/files/{filename} Parameters: pk - The unique HydroShare identifier for the resource from which the file will be deleted filename_or_id - Name of the file or id of the file to be deleted from the resource user - requesting user delete_logical_file - If True then the ResourceFile object to be deleted if it is part of a LogicalFile object then the LogicalFile object will be deleted which deletes all associated ResourceFile objects and file type metadata objects. Returns: The name or id of the file which was deleted Return Type: string or integer Raises: Exceptions.NotAuthorized - The user is not authorized Exceptions.NotFound - The resource identified by pid does not exist or the file identified by file does not exist Exception.ServiceFailure - The service is unable to process the request Note: For mutable resources (resources that have not been formally published), this method modifies the resource by deleting the file. For immutable resources (formally published resources), this method creates a new resource that is a new version of the formally published resource. HydroShare will record the update by storing the SystemMetadata.obsoletes and SystemMetadata.obsoletedBy fields for the respective resources in their system metadata HydroShare MUST check or set the values of SystemMetadata.obsoletes and SystemMetadata.obsoletedBy so that they accurately represent the relationship between the new and old objects. HydroShare MUST also set SystemMetadata.dateSysMetadataModified. The modified system metadata entries must then be available in HydroShare.listObjects() to ensure that any cataloging systems pick up the changes when filtering on SystmeMetadata.dateSysMetadataModified. A formally published resource can only be obsoleted by one newer version. Once a resource is obsoleted, no other resources can obsolete it. """ resource = utils.get_resource_by_shortkey(pk) res_cls = resource.__class__ fed_path = resource.resource_federation_path for f in ResourceFile.objects.filter(object_id=resource.id): if filter_condition(filename_or_id, fed_path, f): if delete_logical_file: if f.logical_file is not None: # logical_delete() calls this function (delete_resource_file()) # to delete each of its contained ResourceFile objects f.logical_file.logical_delete(user) return filename_or_id # send signal signals.pre_delete_file_from_resource.send(sender=res_cls, file=f, resource=resource, user=user) file_name = delete_resource_file_only(resource, f) delete_format_metadata_after_delete_file(resource, file_name) break else: raise ObjectDoesNotExist(filename_or_id) if resource.raccess.public or resource.raccess.discoverable: if not resource.can_be_public_or_discoverable: resource.raccess.public = False resource.raccess.discoverable = False resource.raccess.save() signals.post_delete_file_from_resource.send(sender=res_cls, resource=resource) # generate bag utils.resource_modified(resource, user, overwrite_bag=False) return filename_or_id
def get_object(self, request, *args, **kwargs): try: return Category.objects.get(pk=kwargs['pk']) except Category.DoesNotExist: raise ObjectDoesNotExist('Нет такой категории!')
def object_does_not_exist(doc_type, doc_id): """ Builds a 404 error message with standard, translated, verbiage """ return ObjectDoesNotExist(_("Could not find %(doc_type)s with id %(id)s") % \ {"doc_type": doc_type, "id": doc_id})
def site(request, site_id): current_user = request.user profile = Profile.objects.get(username=current_user) try: project = Project.objects.get(id=site_id) except: raise ObjectDoesNotExist() try: ratings = Rating.objects.filter(project_id=site_id) design = Rating.objects.filter(project_id=site_id).values_list( 'design', flat=True) usability = Rating.objects.filter(project_id=site_id).values_list( 'usability', flat=True) creativity = Rating.objects.filter(project_id=site_id).values_list( 'creativity', flat=True) content = Rating.objects.filter(project_id=site_id).values_list( 'content', flat=True) total_design = 0 total_usability = 0 total_creativity = 0 total_content = 0 print(design) for rate in design: total_design += rate print(total_design) for rate in usability: total_usability += rate print(total_usability) for rate in creativity: total_creativity += rate print(total_creativity) for rate in content: total_content += rate print(total_content) overall_score = (total_design + total_content + total_usability + total_creativity) / 4 print(overall_score) project.design = total_design project.usability = total_usability project.creativity = total_creativity project.content = total_content project.overall_score = overall_score project.save() except: return None if request.method == 'POST': form = RatingForm(request.POST, request.FILES) if form.is_valid(): rating = form.save(commit=False) rating.project = project rating.profile = profile rating.overall_score = (rating.design + rating.usability + rating.creativity + rating.content) / 2 rating.save() else: form = RatingForm() return render(request, "site.html", { "project": project, "profile": profile, "ratings": ratings, "form": form })
class Manager(object): """ The :class:`~dockit.schema.manager.Manager` class is assigned to the objects attribute of a document. The manager is used for retrieving documents. """ def contribute_to_class(self, cls, name): new = copy(self) new.schema = cls setattr(cls, name, new) @property def backend(self): return self.schema._meta.get_backend() @property def collection(self): return self.schema._meta.collection def filter(self, **kwargs): """ An accessor for the filters. """ return self.all().filter(**kwargs) def index(self, *args): return self.all().index(*args) #def values(self): # return self.index_manager.values def all(self): """ Return all documents in the collection """ return QueryIndex(self.schema) def count(self): return self.all().count() def get(self, **kwargs): """ Return the document matching the arguments """ return self.all().get(**kwargs) def filter_by_natural_key(self, hashval=None, **kwargs): if isinstance(hashval, dict): kwargs = hashval hashval = None if kwargs: if len(kwargs) == 1 and '@natural_key_hash' in kwargs: hashval = kwargs['@natural_key_hash'] else: hashval = self.schema._get_natural_key_hash(kwargs) assert isinstance(hashval, basestring) queryset = self.filter(**{'@natural_key_hash':hashval}) queryset._hashval = hashval #for debug purposes return queryset def get_by_natural_key(self, hashval=None, **kwargs): qs = self.filter_by_natural_key(hashval, **kwargs) real_hashval = qs._hashval try: return qs.get() except MultipleObjectsReturned, error: raise MultipleObjectsReturned('Duplicate natural keys found! Lookup parameters were %s. Natural key hash is: %s' % (hashval or kwargs, real_hashval)) except ObjectDoesNotExist, error: raise ObjectDoesNotExist('Natural key not found! Lookup paramets were %s. Natural key hash is: %s' % (hashval or kwargs, real_hashval))
def _deleted(self, *args, **kwargs): "Scrambling method for already deleted objects" raise ObjectDoesNotExist("This object was already deleted!")
def get_benchmark_results(data): environment = Environment.objects.get(name=data['env']) project = Project.objects.get(name=data['proj']) executable = Executable.objects.get(name=data['exe'], project=project) branch = Branch.objects.get(name=data['branch'], project=project) benchmark = Benchmark.objects.get(name=data['ben']) number_of_revs = int(data.get('revs', 10)) baseline_commit_name = (data['base_commit'] if 'base_commit' in data else None) relative_results = ( ('relative' in data and data['relative'] in ['1', 'yes']) or baseline_commit_name is not None) result_query = Result.objects.filter( benchmark=benchmark ).filter( environment=environment ).filter( executable=executable ).filter( revision__project=project ).filter( revision__branch=branch ).select_related( "revision" ).order_by('-date')[:number_of_revs] if len(result_query) == 0: raise ObjectDoesNotExist("No results were found!") result_list = [item for item in result_query] result_list.reverse() if relative_results: ref_value = result_list[0].value if baseline_commit_name is not None: baseline_env = environment baseline_proj = project baseline_exe = executable baseline_branch = branch if 'base_env' in data: baseline_env = Environment.objects.get(name=data['base_env']) if 'base_proj' in data: baseline_proj = Project.objects.get(name=data['base_proj']) if 'base_exe' in data: baseline_exe = Executable.objects.get(name=data['base_exe'], project=baseline_proj) if 'base_branch' in data: baseline_branch = Branch.objects.get(name=data['base_branch'], project=baseline_proj) base_data = Result.objects.get( benchmark=benchmark, environment=baseline_env, executable=baseline_exe, revision__project=baseline_proj, revision__branch=baseline_branch, revision__commitid=baseline_commit_name) ref_value = base_data.value if relative_results: for element in result_list: element.value = (100 * (element.value - ref_value)) / ref_value return { 'environment': environment, 'project': project, 'executable': executable, 'branch': branch, 'benchmark': benchmark, 'results': result_list, 'relative': relative_results, }
def link_plan(request, case_ids, plan_ids): """" Description: Link test cases to the given plan. Params: $case_ids - Integer/Array/String: An integer representing the ID in the database, an array of case_ids, or a string of comma separated case_ids. $plan_ids - Integer/Array/String: An integer representing the ID in the database, an array of plan_ids, or a string of comma separated plan_ids. Returns: Array: empty on success or an array of hashes with failure codes if a failure occurs Example: # Add case 1234 to plan id 54321 >>> TestCase.link_plan(1234, 54321) # Add case ids list [56789, 12345] to plan list [1234, 5678] >>> TestCase.link_plan([56789, 12345], [1234, 5678]) # Add case ids list 56789 and 12345 to plan list 1234 and 5678 with String >>> TestCase.link_plan('56789, 12345', '1234, 5678') """ case_ids = pre_process_ids(value=case_ids) qs = TestCase.objects.filter(pk__in=case_ids) tcs_ids = qs.values_list('pk', flat=True) # Check the non-exist case ids. ids_diff = set(case_ids) - set(tcs_ids.iterator()) if ids_diff: ids_str = ','.join(imap(str, ids_diff)) if len(ids_diff) > 1: err_msg = 'TestCases %s do not exist.' % ids_str else: err_msg = 'TestCase %s does not exist.' % ids_str raise ObjectDoesNotExist(err_msg) plan_ids = pre_process_ids(value=plan_ids) qs = TestPlan.objects.filter(pk__in=plan_ids) tps_ids = qs.values_list('pk', flat=True) # Check the non-exist plan ids. ids_diff = set(plan_ids) - set(tps_ids.iterator()) if ids_diff: ids_str = ','.join(imap(str, ids_diff)) if len(ids_diff) > 1: err_msg = 'TestPlans %s do not exist.' % ids_str else: err_msg = 'TestPlan %s does not exist.' % ids_str raise ObjectDoesNotExist(err_msg) # (plan_id, case_id) pair might probably exist in test_case_plans table, so # skip the ones that do exist and create the rest. # note: this query returns a list of tuples! existing = TestCasePlan.objects.filter(plan__in=plan_ids, case__in=case_ids).values_list( 'plan', 'case') # Link the plans to cases def _generate_link_plan_value(): for plan_id in plan_ids: for case_id in case_ids: if (plan_id, case_id) not in existing: yield plan_id, case_id TestCasePlan.objects.bulk_create([ TestCasePlan(plan_id=_plan_id, case_id=_case_id) for _plan_id, _case_id in _generate_link_plan_value() ])
def __lock(self, worker_id, new_state=TASK_STATES["ASSIGNED"], initial_states=None): """Critical section. Ensures that only one worker takes the task.""" if type(initial_states) in (list, tuple): # filter out invalid state codes initial_states = [ i for i, j in TASK_STATES.get_mapping() if i in initial_states ] if not initial_states: # initial_states is empty initial_states = (TASK_STATES["FREE"], ) else: initial_states = (TASK_STATES["FREE"], ) # it is safe to pass initial_states directly to query, # because these values are checked in the code above query = """ UPDATE hub_task SET state=%%s, worker_id=%%s, dt_started=%%s, dt_finished=%%s, waiting=%%s WHERE id=%%s and state in (%(initial_states)s) and (worker_id is null or worker_id=%%s) """ % { "initial_states": ",".join(("'%s'" % i for i in initial_states)), } dt_started = self.dt_started if new_state == TASK_STATES["OPEN"]: dt_started = datetime.datetime.now() dt_finished = self.dt_finished if new_state in FINISHED_STATES: dt_finished = datetime.datetime.now() new_worker_id = worker_id if new_state == TASK_STATES["FREE"]: new_worker_id = None waiting = False with transaction.atomic(): cursor = connection.cursor() cursor.execute(query, (new_state, new_worker_id, dt_started, dt_finished, waiting, self.id, worker_id)) if cursor.rowcount == 0: if self.state in FINISHED_STATES: logger.debug( "Trying to interrupt closed task %s, ignoring.", self.id) return else: raise ObjectDoesNotExist() if cursor.rowcount > 1: raise MultipleObjectsReturned() self.dt_started = dt_started self.dt_finished = dt_finished if new_worker_id is not None: self.worker = Worker.objects.get(id=new_worker_id) self.state = new_state self.waiting = waiting