def test_large_version_int(self): # This version will fail to be written to the version_int # table because the resulting int is bigger than mysql bigint. version = Version(addon=Addon.objects.get(pk=337141)) version.version = '9223372036854775807' version.save() eq_(version.version_int, None)
def test_status_beta(self, parse_addon): parse_addon.return_value = {'version': u'0.1beta'} qs = File.objects.filter(version=self.current) Version.from_upload(self.upload, self.addon, [self.platform]) eq_(sorted([q.status for q in qs.all()]), [amo.STATUS_UNREVIEWED, amo.STATUS_PUBLIC])
def transformer(addons): if not addons: return addon_dict = dict((a.id, a) for a in addons) personas = [a for a in addons if a.type_id == amo.ADDON_PERSONA] addons = [a for a in addons if a.type_id != amo.ADDON_PERSONA] # TODO(jbalogh): It would be awesome to get the versions in one # (or a few) queries, but we'll accept the overhead here to roll up # some version queries. versions = filter(None, (a.current_version for a in addons)) Version.transformer(versions) # Attach listed authors. q = (UserProfile.objects.no_cache() .filter(addons__in=addons, addonuser__listed=True) .extra(select={'addon_id': 'addons_users.addon_id'}) .order_by('addon_id', 'addonuser__position')) for addon_id, users in itertools.groupby(q, key=lambda u: u.addon_id): addon_dict[addon_id].listed_authors = list(users) for persona in Persona.objects.no_cache().filter(addon__in=personas): addon_dict[persona.addon_id].persona = persona addon_dict[persona.addon_id].listed_authors = [] # Personas need categories for the JSON dump. Category.transformer(personas)
def version_list(request, addon, template): qs = (addon.versions.filter(files__status__in=amo.VALID_STATUSES) .distinct().order_by('-created')) versions = amo.utils.paginate(request, qs, PER_PAGE) versions.object_list = list(versions.object_list) Version.transformer(versions.object_list) return render(request, template, {'addon': addon, 'versions': versions})
def test_version_status(): addon = Addon() version = Version() version.all_files = [File(status=amo.STATUS_PUBLIC), File(status=amo.STATUS_UNREVIEWED)] eq_(u"Fully Reviewed,Awaiting Review", helpers.version_status(addon, version)) version.all_files = [File(status=amo.STATUS_UNREVIEWED)] eq_(u"Awaiting Review", helpers.version_status(addon, version))
def create_version(self, license=None): data = self.cleaned_data v = Version(addon=self.addon, license=license, version=data["version"], releasenotes=data["release_notes"]) v.save() amo.log(amo.LOG.ADD_VERSION, v.addon, v) self._save_apps(v) self._save_file(v) return v
def manifest_updated(self, manifest, upload): """The manifest has updated, create a version and file.""" # This does most of the heavy work. Version.from_upload(upload, self, []) # Triggering this ensures that the current_version gets updated. self.update_version() amo.log(amo.LOG.MANIFEST_UPDATED, self)
def _extra_version_and_file(self, status): version = Version.objects.get(id=81551) version_two = Version(addon=self.addon, license=version.license, version="1.2.3") version_two.save() file_two = File(status=status, version=version_two) file_two.save() return version_two, file_two
def test_version_status(): addon = Addon() version = Version() version.all_files = [File(status=amo.STATUS_PUBLIC), File(status=amo.STATUS_DELETED)] eq_(u'Fully Reviewed,Deleted', helpers.version_status(addon, version)) version.all_files = [File(status=amo.STATUS_UNREVIEWED)] eq_(u'Awaiting Preliminary Review', helpers.version_status(addon, version))
def version_list(request, addon, template, beta=False): status_list = (amo.STATUS_BETA,) if beta else amo.VALID_STATUSES qs = (addon.versions.filter(files__status__in=status_list) .distinct().order_by('-created')) versions = amo.utils.paginate(request, qs, PER_PAGE) versions.object_list = list(versions.object_list) Version.transformer(versions.object_list) return render(request, template, {'addon': addon, 'beta': beta, 'versions': versions})
def create_version(self, license=None): data = self.cleaned_data v = Version(addon=self.addon, license=license, version=data['version'], releasenotes=data['release_notes']) v.save() self._save_apps(v) self._save_file(v) return v
def version_list(request, addon_id): addon = get_object_or_404(Addon.objects.valid(), pk=addon_id) qs = (addon.versions.filter(files__status__in=amo.VALID_STATUSES) .distinct().order_by('-created')) versions = amo.utils.paginate(request, qs, PER_PAGE) versions.object_list = list(versions.object_list) Version.transformer(versions.object_list) return jingo.render(request, 'versions/version_list.html', {'addon': addon, 'versions': versions})
def from_upload(cls, upload, platforms): from files.utils import parse_addon data = parse_addon(upload.path) fields = cls._meta.get_all_field_names() addon = Addon(**dict((k, v) for k, v in data.items() if k in fields)) addon.status = amo.STATUS_NULL addon.default_locale = to_language(translation.get_language()) addon.save() Version.from_upload(upload, addon, platforms) amo.log(amo.LOG.CREATE_ADDON, addon) log.debug('New addon %r from %r' % (addon, upload)) return addon
def create_file(self, **kwargs): addon = Addon() addon.save() ver = Version(version='0.1') ver.addon = addon ver.save() f = File(**kwargs) f.version = ver f.save() return f
def manifest_updated(self, manifest): """The manifest has updated, create a version and file.""" with open(manifest) as fh: chunks = fh.read() # We'll only create a file upload when we detect that the manifest # has changed, otherwise we'll be creating an awful lot of these. upload = FileUpload.from_post(chunks, manifest, len(chunks)) # This does most of the heavy work. Version.from_upload(upload, self, [Platform.objects.get(id=amo.PLATFORM_ALL.id)]) # Triggering this ensures that the current_version gets updated. self.update_version() amo.log(amo.LOG.MANIFEST_UPDATED, self)
def test_version_status(): addon = Addon() version = Version() version.all_files = [File(status=amo.STATUS_PUBLIC), File(status=amo.STATUS_DELETED)] eq_(u"Fully Reviewed,Deleted", helpers.version_status(addon, version)) version.all_files = [File(status=amo.STATUS_UNREVIEWED)] eq_(u"Awaiting Preliminary Review", helpers.version_status(addon, version)) with patch.object(settings, "MARKETPLACE", True): version.all_files = [File(status=amo.STATUS_PENDING)] eq_(u"Pending approval", helpers.version_status(addon, version)) version.deleted = True eq_(u"Deleted", helpers.version_status(addon, version))
def status(request, addon_id, addon, webapp=False): form = forms.AppAppealForm(request.POST, product=addon) upload_form = NewWebappForm(request.POST or None, is_packaged=True, addon=addon) if request.method == 'POST': if 'resubmit-app' in request.POST and form.is_valid(): form.save() messages.success(request, _('App successfully resubmitted.')) return redirect(addon.get_dev_url('versions')) elif 'upload-version' in request.POST and upload_form.is_valid(): ver = Version.from_upload(upload_form.cleaned_data['upload'], addon, [amo.PLATFORM_ALL]) log.info('[Webapp:%s] New version created id=%s from upload: %s' % (addon, ver.pk, upload_form.cleaned_data['upload'])) return redirect(addon.get_dev_url('versions.edit', args=[ver.pk])) ctx = {'addon': addon, 'webapp': webapp, 'form': form, 'upload_form': upload_form} if addon.status == amo.STATUS_REJECTED: try: entry = (AppLog.objects .filter(addon=addon, activity_log__action=amo.LOG.REJECT_VERSION.id) .order_by('-created'))[0] except IndexError: entry = None # This contains the rejection reason and timestamp. ctx['rejection'] = entry and entry.activity_log return jingo.render(request, 'developers/apps/status.html', ctx)
def test_version_status(): addon = Addon() version = Version() version.all_files = [File(status=amo.STATUS_PUBLIC), File(status=amo.STATUS_DELETED)] eq_(u'Published,Deleted', helpers.version_status(addon, version)) version.all_files = [File(status=amo.STATUS_UNREVIEWED)] eq_(u'Awaiting Preliminary Review', helpers.version_status(addon, version)) with patch.object(settings, 'MARKETPLACE', True): version.all_files = [File(status=amo.STATUS_PENDING)] eq_(u'Pending approval', helpers.version_status(addon, version)) version.deleted = True eq_(u'Deleted', helpers.version_status(addon, version))
def test_app_versions(self): version = Version.from_upload(self.upload, self.addon, [self.platform]) assert amo.FIREFOX in version.compatible_apps app = version.compatible_apps[amo.FIREFOX] eq_(app.min.version, '3.0') eq_(app.max.version, '3.6.*')
def test_mobile_all_with_mixed_desktop_creates_platform_files(self): all_mobile = Platform.objects.get(id=amo.PLATFORM_ALL_MOBILE.id) linux = Platform.objects.get(id=amo.PLATFORM_LINUX.id) version = Version.from_upload(self.upload, self.addon, [linux, all_mobile]) files = version.all_files eq_(sorted(amo.PLATFORMS[f.platform.id].shortname for f in files), ['android', 'linux', 'maemo'])
def test_desktop_all_with_mixed_mobile_creates_platform_files(self): all_desktop = Platform.objects.get(id=amo.PLATFORM_ALL.id) android = Platform.objects.get(id=amo.PLATFORM_ANDROID.id) version = Version.from_upload(self.upload, self.addon, [all_desktop, android]) files = version.all_files eq_(sorted(amo.PLATFORMS[f.platform.id].shortname for f in files), ['android', 'linux', 'mac', 'windows'])
def test_mobile_all_desktop_all_creates_all(self): all_desktop = Platform.objects.get(id=amo.PLATFORM_ALL.id) all_mobile = Platform.objects.get(id=amo.PLATFORM_ALL_MOBILE.id) version = Version.from_upload(self.upload, self.addon, [all_desktop, all_mobile]) files = version.all_files eq_(sorted(amo.PLATFORMS[f.platform.id].shortname for f in files), ['all'])
def transformer(addons): if not addons: return addon_dict = dict((a.id, a) for a in addons) # TODO(jbalogh): It would be awesome to get the versions in one # (or a few) queries, but we'll accept the overhead here to roll up # some version queries. versions = filter(None, (a.current_version for a in addons)) Version.transformer(versions) # Attach listed authors. q = (UserProfile.objects.no_cache() .filter(addons__in=addons, addonuser__listed=True) .extra(select={'addon_id': 'addons_users.addon_id'}) .order_by('addon_id', 'addonuser__position')) for addon_id, users in itertools.groupby(q, key=lambda u: u.addon_id): addon_dict[addon_id].listed_authors = list(users)
def version_add(request, addon_id, addon): form = forms.NewVersionForm(request.POST, addon=addon) if form.is_valid(): v = Version.from_upload(form.cleaned_data['upload'], addon, form.cleaned_data['platforms']) url = reverse('devhub.versions.edit', args=[addon.slug, str(v.id)]) return dict(url=url) else: return json_view.error(form.errors)
def test_version_log_transformer(self): addon = Addon.objects.get() version = addon.latest_version amo.log(amo.LOG.REJECT_VERSION, addon, version, user=self.request.amo_user) version_two = Version(addon=addon, license=version.license, version='1.2.3') version_two.save() amo.log(amo.LOG.REJECT_VERSION, addon, version_two, user=self.request.amo_user) versions = (Version.objects.filter(addon=addon).order_by('-created') .transform(Version.transformer_activity)) eq_(len(versions[0].all_activity), 1) eq_(len(versions[1].all_activity), 1)
def test_status_beta(self): # Create a version and switch the add-on status to public. Version.from_upload(self.upload, self.addon, [self.platform]) File.objects.all().update(status=amo.STATUS_PUBLIC) self.addon.update(status=amo.STATUS_PUBLIC) # Create an under review version. upload = self.get_upload('extension-0.2.xpi') Version.from_upload(upload, self.addon, [self.platform]) # Create a beta version. upload = self.get_upload('extension-0.2b1.xpi') version = Version.from_upload(upload, self.addon, [self.platform], is_beta=True) # Check that it doesn't modify the public status and that the # created file is in the beta status. eq_(File.objects.filter(version=self.current)[0].status, amo.STATUS_PUBLIC) eq_(self.addon.status, amo.STATUS_PUBLIC) eq_(File.objects.filter(version=version)[0].status, amo.STATUS_BETA)
def test_desktop_all_android_creates_all(self): version = Version.from_upload( self.upload, self.addon, [amo.PLATFORM_ALL.id, amo.PLATFORM_ANDROID.id] ) files = version.all_files eq_(sorted(amo.PLATFORMS[f.platform].shortname for f in files), ['all', 'android'])
def test_mobile_all_desktop_all_creates_all(self): version = Version.from_upload( self.upload, self.addon, [amo.PLATFORM_ALL.id, amo.PLATFORM_ALL_MOBILE.id] ) files = version.all_files eq_(sorted(amo.PLATFORMS[f.platform].shortname for f in files), ['all'])
def test_mobile_all_with_mixed_desktop_creates_platform_files(self): version = Version.from_upload( self.upload, self.addon, [amo.PLATFORM_LINUX.id, amo.PLATFORM_ALL_MOBILE.id] ) files = version.all_files eq_(sorted(amo.PLATFORMS[f.platform].shortname for f in files), ['android', 'linux', 'maemo'])
def version_add(request, addon_id, addon): form = forms.NewVersionForm(request.POST, addon=addon) if form.is_valid(): pl = (list(form.cleaned_data['desktop_platforms']) + list(form.cleaned_data['mobile_platforms'])) v = Version.from_upload(form.cleaned_data['upload'], addon, pl) log.info('Version created: %s for: %s' % (v.pk, form.cleaned_data['upload'])) url = reverse('devhub.versions.edit', args=[addon.slug, str(v.id)]) return dict(url=url) else: return json_view.error(form.errors)
def repackage_jetpack(builder_data, **kw): repack_data = dict(urlparse.parse_qsl(builder_data['request'])) jp_log.info('[1@None] Repackaging jetpack for %s.' % repack_data['file_id']) jp_log.info('; '.join('%s: "%s"' % i for i in builder_data.items())) all_keys = builder_data.copy() all_keys.update(repack_data) msg = lambda s: ('[{file_id}]: ' + s).format(**all_keys) upgrader = JetpackUpgrader() file_data = upgrader.file(repack_data['file_id']) if file_data.get('uuid') != repack_data['uuid']: return jp_log.warning( msg('Aborting repack. AMO<=>Builder tracking ' 'number does not match.')) if builder_data['result'] != 'success': return jp_log.warning(msg('Build not successful. {result}: {msg}')) try: addon = Addon.objects.get(id=repack_data['addon']) old_file = File.objects.get(id=repack_data['file_id']) old_version = old_file.version except Exception: jp_log.error(msg('Could not find addon or file.'), exc_info=True) raise # Fetch the file from builder.amo. try: filepath, headers = urllib.urlretrieve(builder_data['location']) except Exception: jp_log.error(msg('Could not retrieve {location}.'), exc_info=True) raise # Figure out the SHA256 hash of the file. try: hash_ = hashlib.sha256() with open(filepath, 'rb') as fd: while True: chunk = fd.read(8192) if not chunk: break hash_.update(chunk) except Exception: jp_log.error(msg('Error hashing file.'), exc_info=True) raise upload = FakeUpload(path=filepath, hash='sha256:%s' % hash_.hexdigest(), validation=None) # TODO: multi-file: have we already created the new version for a different # file? try: new_version = Version.from_upload(upload, addon, [old_file.platform], send_signal=False) # Sync the compatible apps of the new version. for app in old_version.apps.values(): app.update(version_id=new_version.id, id=None) ApplicationsVersions.objects.create(**app) # Sync the status of the new file. new_file = new_version.files.using('default')[0] new_file.status = old_file.status new_file.save() except Exception: jp_log.error(msg('Error creating new version/file.'), exc_info=True) raise # Sync out the new version. addon.update_version() upgrader.finish(repack_data['file_id']) try: send_upgrade_email(addon, new_version, file_data['version']) except Exception: jp_log.error(msg('Could not send success email.'), exc_info=True) raise # TODO: don't send editor notifications about the new file. # Return the new file to make testing easier. return new_file
def status(request, addon_id, addon, webapp=False): form = forms.AppAppealForm(request.POST, product=addon) upload_form = NewWebappVersionForm(request.POST or None, is_packaged=True, addon=addon, request=request) if request.method == 'POST': if 'resubmit-app' in request.POST and form.is_valid(): form.save() messages.success(request, _('App successfully resubmitted.')) return redirect(addon.get_dev_url('versions')) elif 'upload-version' in request.POST and upload_form.is_valid(): mobile_only = (addon.latest_version and addon.latest_version.features.has_qhd) ver = Version.from_upload(upload_form.cleaned_data['upload'], addon, [amo.PLATFORM_ALL]) # Update addon status now that the new version was saved. addon.update_status() res = run_validator(ver.all_files[0].file_path) validation_result = json.loads(res) # Set all detected features as True and save them. keys = [ 'has_%s' % feature.lower() for feature in validation_result['feature_profile'] ] data = defaultdict.fromkeys(keys, True) # Set "Smartphone-Sized Displays" if it's a mobile-only app. qhd_devices = (set((amo.DEVICE_GAIA, )), set( (amo.DEVICE_MOBILE, )), set(( amo.DEVICE_GAIA, amo.DEVICE_MOBILE, ))) if set(addon.device_types) in qhd_devices or mobile_only: data['has_qhd'] = True # Update feature profile for this version. ver.features.update(**data) messages.success(request, _('New version successfully added.')) log.info('[Webapp:%s] New version created id=%s from upload: %s' % (addon, ver.pk, upload_form.cleaned_data['upload'])) return redirect(addon.get_dev_url('versions.edit', args=[ver.pk])) ctx = { 'addon': addon, 'webapp': webapp, 'form': form, 'upload_form': upload_form } # Used in the delete version modal. if addon.is_packaged: versions = addon.versions.values('id', 'version') version_strings = dict((v['id'], v) for v in versions) version_strings['num'] = len(versions) ctx['version_strings'] = json.dumps(version_strings) if addon.status == amo.STATUS_REJECTED: try: entry = (AppLog.objects.filter( addon=addon, activity_log__action=amo.LOG.REJECT_VERSION.id).order_by( '-created'))[0] except IndexError: entry = None # This contains the rejection reason and timestamp. ctx['rejection'] = entry and entry.activity_log return jingo.render(request, 'developers/apps/status.html', ctx)
continue if addon: if addon.versions.filter(version=data['version']).exists(): log.info('[@None] Version %s of "%s" language pack exists' % (data['version'], xpi)) continue if not (addon.addonuser_set .filter(user__email=settings.LANGPACK_OWNER_EMAIL) .exists()): log.info('[@None] Skipping language pack "%s": ' 'not owned by %s' % (xpi, settings.LANGPACK_OWNER_EMAIL)) continue version = Version.from_upload(upload, addon, PLATFORMS) log.info('[@None] Updating language pack "%s" to version %s' % (xpi, data['version'])) else: if amo.VERSION_BETA.search(data['version']): log.error('[@None] Not creating beta version %s for new "%s" ' 'language pack' % (data['version'], xpi)) continue addon = Addon.from_upload(upload, PLATFORMS) AddonUser(addon=addon, user=owner).save() version = addon.versions.get() addon.status = amo.STATUS_PUBLIC if addon.default_locale.lower() == lang.lower(): addon.target_locale = addon.default_locale
def test_status(self): self.current.files.all().update(status=amo.STATUS_UNREVIEWED) Version.from_upload(self.upload, self.addon, [self.platform]) eq_( File.objects.filter(version=self.current)[0].status, amo.STATUS_DISABLED)
def handle(self, *args, **options): user_name = options.get('user') user = None try: user = User.objects.get(username=user_name) except User.DoesNotExist: logger.error('The user %s did not exist.', user_name, extra={'options': options}) sys.exit() org = None try: org = Organism.objects.get(scientific_name=options.get('organism')) except Organism.DoesNotExist: logger.error('The organism %s did not exist.', options.get('organism'), extra={'options': options}) sys.exit() accepted_evcodes = None if options.get('evcodes'): accepted_evcodes = set(options.get('evcodes').split(',')) gene_ontology = go() remote = options.get('remote') != None obo_location = GO_OBO_URL if remote else options.get('obo') loaded_obo = gene_ontology.load_obo(obo_location, remote_location=remote, timeout=5) if not loaded_obo: logger.error("Couldn't load OBO file %s with remote equal to %s.", obo_location, remote) sys.exit() annot_zip_fh = None annot_fh = None if remote: annot_zip_fh = urllib2.urlopen(GO_ASSOC_FTP + '.'.join( (GO_ASSOC_PREFIX, GO_NAMES[org.scientific_name], GO_ASSOC_SUFFIX)), timeout=5) else: annot_zip_fh = open(options.get('annot')) annot_fh = gzip.GzipFile(fileobj=io.BytesIO(annot_zip_fh.read())) annot_zip_fh.close() annots = [] load_pairs = {} pubs = set() for line in annot_fh: if line.startswith('!'): continue toks = line.strip().split('\t') (xrdb, xrid, details, goid, ref, ev, date) = (toks[0], toks[1], toks[3], toks[4], toks[5], toks[6], toks[13]) if options.get('tair'): import re tair_regex = re.compile('AT[0-9MC]G[0-9][0-9][0-9][0-9][0-9]') first_alias = toks[10].split('|')[0] if tair_regex.match(toks[2]): xrid = toks[2] elif tair_regex.match(toks[9]): xrid = toks[9] elif tair_regex.match(first_alias): xrid = first_alias if options.get('only_wb') and (toks[0] != 'WB'): continue if details == 'NOT': continue if accepted_evcodes is not None and not (ev in accepted_evcodes): continue if options.get('leading') is not None: xrid = xrid.split(':')[1] try: load_pairs[xrdb].append(xrid) except KeyError: load_pairs[xrdb] = [ xrid, ] refs = ref.split('|') for ref_item in refs: if ref_item.startswith('PMID:'): pubs.add(ref_item.split(':')[1]) else: logger.info("Unknown publication key %s", ref_item) annots.append((xrdb, xrid, goid, ref, date)) xref_cache = {} if options.get('pseudomonas'): logger.info('Pseudomonas entered') for (xrdb, xrids) in load_pairs.iteritems(): gene_objs = Gene.objects.filter(systematic_name__in=xrids) logger.info( "Mapped %s Pseudomonas genes from the database using gene systematic name.", gene_objs.count()) for gene_obj in gene_objs: xref_cache[(xrdb, gene_obj.systematic_name)] = gene_obj else: for (xrdb, xrids) in load_pairs.iteritems(): if xrdb in DB_REMAP: xrdb = DB_REMAP[xrdb] try: xrdb_obj = CrossRefDB.objects.get(name=xrdb) except CrossRefDB.DoesNotExist: logger.warning("Couldn't find the cross reference DB %s.", xrdb) continue xrid_objs = CrossRef.objects.filter( crossrefdb=xrdb_obj).filter(xrid__in=xrids) logger.info("Mapped %s cross references from %s", xrid_objs.count(), xrdb) for xrid_obj in xrid_objs: xref_cache[(xrdb, xrid_obj.xrid)] = xrid_obj.gene load_pmids(pubs) pub_cache = {} pub_values = Publication.objects.filter(pmid__in=pubs).only( 'id', 'pmid').values() for pub in pub_values: pub_cache[pub['pmid']] = pub['id'] for annot in annots: (xrdb, xrid, goid, ref, date) = annot if xrdb in DB_REMAP: xrdb = DB_REMAP[xrdb] try: gene = xref_cache[(xrdb, xrid)] except KeyError: logger.debug("Couldn't find xrid %s in xrdb %s.", xrid, xrdb) logger.info("Couldn't find xrid %s in xrdb %s.", xrid, xrdb) continue refs = ref.split('|') pub = None for ref_item in refs: if ref_item.startswith('PMID:'): try: pub = pub_cache[int(ref_item.split(':')[1])] except KeyError: pub = None gene_ontology.add_annotation(go_id=goid, gid=gene.pk, ref=pub, date=date, direct=True) gene_ontology.populated = True #mark annotated gene_ontology.propagate() #prop annotations evlist = list(accepted_evcodes) for (term_id, term) in gene_ontology.go_terms.iteritems(): if term.annotations: slug = slugify(' '.join( (term.go_id, org.scientific_name, term.full_name)))[:50] #make first 50 chars into a slug namespace = GO_NAMESPACE_MAP[term.get_namespace()] go_id = term.go_id.split(':')[1] #construct title title = 'GO' + '-' + namespace + '-' + go_id + ':' + term.full_name #construct abstract #write evidence as string evclause = '' if len(evlist): evclause = ' Only annotations with evidence coded as ' if len(evlist) == 1: evclause = evclause + evlist[0] else: evclause = evclause + ', '.join( evlist[:-1]) + ' or ' + evlist[-1] evclause = evclause + ' are included.' if term.description: description = term.description + ' Annotations are propagated through transitive closure as recommended by the GO Consortium.' + evclause else: logger.info("No description on term %s", term) #get geneset changed = False try: gs_obj = Geneset.objects.get(slug=slug, creator=user) changed = False #flag to know if we need to call save #all these genesets should be public if not gs_obj.public: gs_obj.public = True changed = True if gs_obj.title != title: gs_obj.title = title changed = True if gs_obj.abstract != description: gs_obj.abstract = description changed = True except Geneset.DoesNotExist: gs_obj = Geneset(title=title, slug=slug, creator=user, organism=org, public=True, abstract=description) changed = True #if anything changed if changed: gs_obj.save() if options.get('initial'): #disable commit field's auto_now_add, allows us to set a prior annotation date commit_date = Version._meta.get_field_by_name( 'commit_date')[0] commit_date.auto_now_add = False logger.info( 'Initial load. Need to construct versions of %s from annotation date.', term.go_id) date_annots = {} for annotation in term.annotations: date = timezone.make_aware( datetime.strptime(annotation.date, '%Y%m%d'), timezone.get_default_timezone()) try: date_annots[date].append(annotation) except KeyError: date_annots[date] = [ annotation, ] annots_as_of_date = set() prior_annots = set() prior_version = None for (date, annots) in sorted(date_annots.iteritems()): annots_as_of_date.update([(annotation.gid, annotation.ref) for annotation in annots]) if (annots_as_of_date == prior_annots ): #if nothing changed, continue continue v_obj = Version(geneset=gs_obj, creator=user, parent=prior_version, commit_date=date) v_obj.description = "Added " + str( len(annots) ) + " annotations from GO based on the dates provided in the GO annotation file." v_obj.annotations = annots_as_of_date v_obj.save() prior_version = v_obj prior_annots = annots_as_of_date.copy() #re-enable auto_now_add commit_date.auto_now_add = True else: #load annotations most_recent_versions = Version.objects.filter( geneset=gs_obj).order_by('-commit_date')[:1] annots = set([(annotation.gid, annotation.ref) for annotation in term.annotations]) description = '' most_recent_version = None if most_recent_versions: most_recent_version = most_recent_versions[0] if (most_recent_version.commit_date > timezone.now()): logger.error('Version from the future: %s.', most_recent_version) new = annots - most_recent_version.annotations removed = most_recent_version.annotations - annots if (new or removed): description = description + 'Added ' + str( len(new)) + ' and removed ' + str( len(removed)) + ' annotations from GO.' else: description = 'Created with ' + str( len(annots)) + ' annotations from GO.' if description: v_obj = Version(geneset=gs_obj, creator=user, parent=most_recent_version, commit_date=timezone.now()) v_obj.description = description v_obj.annotations = annots v_obj.save()
def test_file_name_platform_all(self): version = Version.from_upload( self.upload, self.addon, [Platform.objects.get(pk=amo.PLATFORM_ALL.id)]) files = version.all_files eq_(files[0].filename, u'delicious_bookmarks-0.1-fx.xpi')
def test_generate_filename_ja(self): f = File() f.version = Version(version='0.1.7') f.version.compatible_apps = (amo.FIREFOX,) f.version.addon = Addon(name=u' フォクすけ といっしょ') eq_(f.generate_filename(), 'addon-0.1.7-fx.xpi')
def repackage_jetpack(builder_data, **kw): repack_data = dict(urlparse.parse_qsl(builder_data['request'])) jp_log.info('[1@None] Repackaging jetpack for %s.' % repack_data['file_id']) jp_log.info('; '.join('%s: "%s"' % i for i in builder_data.items())) all_keys = builder_data.copy() all_keys.update(repack_data) msg = lambda s: ('[{file_id}]: ' + s).format(**all_keys) upgrader = JetpackUpgrader() file_data = upgrader.file(repack_data['file_id']) redis_logger = RedisLogHandler(jp_log, upgrader, file_data) jp_log.addHandler(redis_logger) if file_data.get('uuid') != repack_data['uuid']: _msg = ('Aborting repack. AMO<=>Builder tracking number mismatch ' '(%s) (%s)' % (file_data.get('uuid'), repack_data['uuid'])) return jp_log.warning(msg(_msg)) if builder_data['result'] != 'success': return jp_log.warning(msg('Build not successful. {result}: {msg}')) try: addon = Addon.objects.get(id=repack_data['addon']) old_file = File.objects.get(id=repack_data['file_id']) old_version = old_file.version except Exception: jp_log.error(msg('Could not find addon or file.'), exc_info=True) raise # Fetch the file from builder.amo. try: filepath, headers = urllib.urlretrieve(builder_data['location']) except Exception: jp_log.error(msg('Could not retrieve {location}.'), exc_info=True) raise # Figure out the SHA256 hash of the file. try: hash_ = hashlib.sha256() with storage.open(filepath, 'rb') as fd: while True: chunk = fd.read(8192) if not chunk: break hash_.update(chunk) except Exception: jp_log.error(msg('Error hashing file.'), exc_info=True) raise upload = FakeUpload(path=filepath, hash='sha256:%s' % hash_.hexdigest(), validation=None) try: version = parse_addon(upload, addon)['version'] if addon.versions.filter(version=version).exists(): jp_log.warning('Duplicate version [%s] for %r detected. Bailing.' % (version, addon)) return except Exception: pass # TODO: multi-file: have we already created the new version for a different # file? try: new_version = Version.from_upload(upload, addon, [old_file.platform], send_signal=False) except Exception: jp_log.error(msg('Error creating new version.')) raise try: # Sync the compatible apps of the new version with data from the old # version if the repack didn't specify compat info. for app in old_version.apps.values(): sync_app = amo.APP_IDS[app['application_id']] new_compat = new_version.compatible_apps if sync_app not in new_compat: app.update(version_id=new_version.id, id=None) ApplicationsVersions.objects.create(**app) else: new_compat[sync_app].min_id = app['min_id'] new_compat[sync_app].max_id = app['max_id'] new_compat[sync_app].save() except Exception: jp_log.error(msg('Error syncing compat info. [%s] => [%s]' % (old_version.id, new_version.id)), exc_info=True) pass # Skip this for now, we can fix up later. try: # Sync the status of the new file. new_file = new_version.files.using('default')[0] new_file.status = old_file.status new_file.save() if (addon.status in amo.MIRROR_STATUSES and new_file.status in amo.MIRROR_STATUSES): new_file.copy_to_mirror() except Exception: jp_log.error(msg('Error syncing old file status.'), exc_info=True) raise # Sync out the new version. addon.update_version() upgrader.finish(repack_data['file_id']) jp_log.info('Repacked %r from %r for %r.' % (new_version, old_version, addon)) jp_log.removeHandler(redis_logger) try: send_upgrade_email(addon, new_version, file_data['version']) except Exception: jp_log.error(msg('Could not send success email.'), exc_info=True) raise # Return the new file to make testing easier. return new_file
def test_developer_name(self): version = Version.objects.latest('id') version._developer_name = u'M€lâ' eq_(version.developer_name, u'M€lâ') eq_(Version(_developer_name=u'M€lâ').developer_name, u'M€lâ')
def test_file_platform(self): version = Version.from_upload(self.upload, self.addon, [self.platform]) files = version.all_files eq_(len(files), 1) eq_(files[0].platform, self.platform)
def _get_version(self, status): v = Version() v.all_files = [mock.Mock()] v.all_files[0].status = status return v
def test_status(self): qs = File.objects.filter(version=self.current) Version.from_upload(self.upload, self.addon, [self.platform]) eq_(sorted([q.status for q in qs.all()]), [amo.STATUS_PUBLIC, amo.STATUS_DISABLED])
def test_file_platform_is_always_all(self): version = Version.from_upload(self.upload, self.addon, [self.platform]) files = version.all_files eq_(len(files), 1) eq_(files[0].platform.id, amo.PLATFORM_ALL.id)
def create_fake_app(self, data): """Create a fake instance of Webapp and related models from ES data.""" is_packaged = data['app_type'] != amo.ADDON_WEBAPP_HOSTED is_privileged = data['app_type'] == amo.ADDON_WEBAPP_PRIVILEGED obj = Webapp(id=data['id'], app_slug=data['app_slug'], is_packaged=is_packaged, type=amo.ADDON_WEBAPP, icon_type='image/png') # Set relations and attributes we need on those relations. # The properties set on latest_version and current_version differ # because we are only setting what the serializer is going to need. # In particular, latest_version.is_privileged needs to be set because # it's used by obj.app_type_id. obj._current_version = Version() obj._current_version._developer_name = data['author'] obj._current_version.supported_locales = data['supported_locales'] obj._current_version.version = data['current_version'] obj._latest_version = Version() obj._latest_version.is_privileged = is_privileged obj._geodata = Geodata() obj.all_categories = [Category(slug=cat) for cat in data['category']] obj.all_previews = [ Preview(id=p['id'], modified=p['modified'], filetype=p['filetype']) for p in data['previews'] ] obj._device_types = [DEVICE_TYPES[d] for d in data['device']] # Set base attributes on the "fake" app using the data from ES. # It doesn't mean they'll get exposed in the serializer output, that # depends on what the fields/exclude attributes in Meta. for field_name in ('created', 'modified', 'default_locale', 'is_escalated', 'is_offline', 'manifest_url', 'premium_type', 'regions', 'reviewed', 'status', 'weekly_downloads'): setattr(obj, field_name, data.get(field_name)) # Attach translations for all translated attributes. for field_name in ('name', 'description', 'homepage', 'support_email', 'support_url'): ESTranslationSerializerField.attach_translations( obj, data, field_name) ESTranslationSerializerField.attach_translations( obj._geodata, data, 'banner_message') ESTranslationSerializerField.attach_translations( obj._current_version, data, 'release_notes', target_name='releasenotes') # Set attributes that have a different name in ES. obj.public_stats = data['has_public_stats'] # Avoid a query for payment_account if the app is not premium. if not obj.is_premium(): obj.payment_account = None # Override obj.get_region() with a static list of regions generated # from the region_exclusions stored in ES. obj.get_regions = obj.get_regions( obj.get_region_ids(restofworld=True, excluded=data['region_exclusions'])) # Some methods below will need the raw data from ES, put it on obj. obj.es_data = data return obj
def test_file_name(self): version = Version.from_upload(self.upload, self.addon, [self.platform]) files = version.all_files eq_(files[0].filename, u'delicious_bookmarks-%s.xml' % self.now)
def status(request, addon_id, addon, webapp=False): form = forms.AppAppealForm(request.POST, product=addon) upload_form = NewWebappVersionForm(request.POST or None, is_packaged=True, addon=addon, request=request) if request.method == 'POST': if 'resubmit-app' in request.POST and form.is_valid(): if waffle.switch_is_active('iarc') and not addon.is_rated(): # Cannot resubmit without content ratings. return http.HttpResponseForbidden( 'This app must obtain content ratings before being ' 'resubmitted.') form.save() create_comm_note(addon, addon.latest_version, request.amo_user, form.data['notes'], note_type=comm.RESUBMISSION) if addon.vip_app: handle_vip(addon, addon.current_version, request.amo_user) messages.success(request, _('App successfully resubmitted.')) return redirect(addon.get_dev_url('versions')) elif 'upload-version' in request.POST and upload_form.is_valid(): mobile_only = (addon.latest_version and addon.latest_version.features.has_qhd) ver = Version.from_upload(upload_form.cleaned_data['upload'], addon, [amo.PLATFORM_ALL]) # Update addon status now that the new version was saved. addon.update_status() res = run_validator(ver.all_files[0].file_path) validation_result = json.loads(res) # Set all detected features as True and save them. keys = [ 'has_%s' % feature.lower() for feature in validation_result['feature_profile'] ] data = defaultdict.fromkeys(keys, True) # Set "Smartphone-Sized Displays" if it's a mobile-only app. qhd_devices = (set((amo.DEVICE_GAIA, )), set( (amo.DEVICE_MOBILE, )), set(( amo.DEVICE_GAIA, amo.DEVICE_MOBILE, ))) if set(addon.device_types) in qhd_devices or mobile_only: data['has_qhd'] = True # Update feature profile for this version. ver.features.update(**data) messages.success(request, _('New version successfully added.')) log.info('[Webapp:%s] New version created id=%s from upload: %s' % (addon, ver.pk, upload_form.cleaned_data['upload'])) if addon.vip_app: handle_vip(addon, ver, request.amo_user) return redirect(addon.get_dev_url('versions.edit', args=[ver.pk])) ctx = { 'addon': addon, 'webapp': webapp, 'form': form, 'upload_form': upload_form } # Used in the delete version modal. if addon.is_packaged: versions = addon.versions.values('id', 'version') version_strings = dict((v['id'], v) for v in versions) version_strings['num'] = len(versions) ctx['version_strings'] = json.dumps(version_strings) if addon.status == amo.STATUS_REJECTED: try: entry = (AppLog.objects.filter( addon=addon, activity_log__action=amo.LOG.REJECT_VERSION.id).order_by( '-created'))[0] except IndexError: entry = None # This contains the rejection reason and timestamp. ctx['rejection'] = entry and entry.activity_log if waffle.switch_is_active('preload-apps'): test_plan = PreloadTestPlan.objects.filter(addon=addon, status=amo.STATUS_PUBLIC) if test_plan.exists(): test_plan = test_plan[0] if (test_plan.last_submission < settings.PREINSTALL_TEST_PLAN_LATEST): ctx['outdated_test_plan'] = True ctx['next_step_suffix'] = 'submit' else: ctx['next_step_suffix'] = 'home' ctx['test_plan'] = test_plan return render(request, 'developers/apps/status.html', ctx)
def test_mobile_all_creates_platform_files(self): all_mobile = Platform.objects.get(id=amo.PLATFORM_ALL_MOBILE.id) version = Version.from_upload(self.upload, self.addon, [all_mobile]) files = version.all_files eq_(sorted(amo.PLATFORMS[f.platform.id].shortname for f in files), ['android', 'maemo'])
def test_carry_over_license_no_version(self): self.addon.versions.all().delete() version = Version.from_upload(self.upload, self.addon, [self.platform]) eq_(version.license_id, None)
def test_carry_over_old_license(self): version = Version.from_upload(self.upload, self.addon, [self.platform]) eq_(version.license_id, self.addon.current_version.license_id)
def test_version_number(self): version = Version.from_upload(self.upload, self.addon, [self.platform]) eq_(version.version, '0.1')
def test_android_creates_platform_files(self): version = Version.from_upload(self.upload, self.addon, [amo.PLATFORM_ANDROID.id]) files = version.all_files eq_(sorted(amo.PLATFORMS[f.platform].shortname for f in files), ['android'])