def determine_versions(): """ Scrape the Swift website to find all available versions. >>> determine_versions() [Version(3.0.1), Version(3.0), ...] """ request = requests.get(download_url) soup = BeautifulSoup(request.text, 'html.parser') releases = soup.find_all('a') versions = {} for a in releases: url = urljoin(download_url, a['href']) version, platform = parse_url(url) if version and platform: if version in versions: versions[version].binaries[platform] = url else: versions[version] = Version(version, {platform: url}) return [version for (name, version) in versions.items()]
def import_manifest(arch,version): v = Version.get_by_key_name(version) if v is None or not v.imported: m = fetcher.fetch(arch,version,'manifest.xml') if m is not None: m = Manifest(m) #xg_on = db.create_transaction_options(xg=True) v = version_ok(arch,version) prev = db.GqlQuery('select * from Version where imported = True and arch = {0}'.format(arch)).fetch(1) if prev is not None and len(prev) > 0: prev = prev[0] from htmldiff import Changeset pmanifest = Manifest(fetcher.fetch(arch,prev.value,'manifest.xml')) changes = Changeset(pmanifest,m) to_delete = [ pmanifest.files[x] for x in changes.dels | changes.changes if pmanifest.files[x]['path'].endswith('entity') and pmanifest.files[x]['path'].startswith('game/resources0.s2z')] to_import = [ m.files[x] for x in changes.adds | changes.changes if m.files[x]['path'].endswith('entity') and m.files[x]['path'].startswith('game/resources0.s2z')] total = len(to_delete) current = 1 del(changes) del(m) del(pmanifest) for file in to_delete: e = Node.get_by_key_name('|'.join([file['version'],file['path']])) if e is not None: logging.info('[{1}/{2}] Deleting {0} entity group'.format('|'.join([file['version'],file['path']]),current,total)) db.run_in_transaction(delete_group,e) current += 1 del(to_delete) else: prev = None to_import = [x for x in m.files.values() if x['path'].endswith('entity') and x['path'].startswith('game/resources0.s2z')] total = len(to_import) current = 1 for file in to_import: if file['path'].endswith('.entity'): e = Node.get_by_key_name('|'.join([file['version'],file['path']])) if e is None: data = fetcher.fetch(arch,file['version'],file['path']) #if data is None: #continue logging.info('[%d/%d] importing %s %s into db' % (current,total,file['version'],file['path'])) db.run_in_transaction(parse_entity,data,file['version'],file['path'],[version]) #db.run_in_transaction_options(xg_on,parse_entity,file['version'],file['path'],[version]) #elif version not in e.versions: #db.run_in_transaction(set_version,e,version) current += 1 v.imported = True v.put() if prev is not None: prev.imported = False prev.put()
def _get_repo_versions(repo): repo_tags = sorted(list(repo.tags), key=lambda t: t.commit.committed_date) tags = zip(list(repo_tags)[1:], list(repo_tags)) versions = list( map( lambda tag: Version( tag[0], DataExtractor._version_files(tag[0], tag[1])), tags)) return sorted(versions, key=lambda version: version._commit._commit_date)
def get_repo_versions(project, repo): commits_files = DataExtractor._get_commits_files(project, repo) commits_versions = DataExtractor.get_commits_between_versions(repo) tags_commits = dict(map(lambda t: (t.commit, t), repo.tags)) versions = [] for v, commits in commits_versions.items(): files = reduce( list.__add__, list(map(lambda c: commits_files.get(c.hexsha, []), commits)), []) versions.append(Version(tags_commits[v], files)) return sorted(versions, key=lambda version: version._commit._commit_date)
def save_version(version, commit=False): if os.path.exists(version.path): existing_version = Version.fromfile(version.path) if version != existing_version: print('Mismatched Data: {}'.format(version)) else: print('Add {}'.format(version)) version.save() if commit: subprocess.check_call(['git', 'add', version.path]) subprocess.check_call(['git', 'commit', '-m', 'chore: Add {}'.format(version.version)]) return True return False
def save_version(version, commit=False): if os.path.exists(version.path): existing_version = Version.fromfile(version.path) if version != existing_version: print('Mismatched Data: {}'.format(version)) else: print('Add {}'.format(version)) version.save() if commit: subprocess.check_call(['git', 'add', version.path]) subprocess.check_call([ 'git', 'commit', '-m', 'chore: Add {}'.format(version.version) ]) return True return False
def get_archive_contents_entry(tfile): basedir,tf = os.path.split(tfile) base, ext = os.path.splitext(tf) splits = base.split('-') pkgname = '-'.join(splits[0:-1]) ver = Version.parse(splits[-1]) version = '({0} {1} {2})'.format(ver.major, ver.minor, ver.patch) with closing(tarfile.open(tfile)) as t: f = t.extractfile(base + '/docstring.txt') DOCSTRING = f.read().strip() f = t.extractfile(base + '/requirements.txt') REQUIREMENTS = f.read().strip() return '''({pkgname} . [{version} nil "{DOCSTRING}" tar])'''.format(**locals())
def get_page(self,arch,version): error = '' query = '' keywords = '' stored_key = self.request.get('stored_query') if stored_key: try: stored = StoredQuery.get(stored_key) if not stored: error = 'Sorry, there was no such stored query' else: query = stored.query keywords = stored.keywords except: error = 'Sorry, there was no such stored query' else: query = self.request.get('query') query = urllib.unquote(query) keywords = self.request.get('keywords') template_values = { 'query' : query, 'version' : version, } query = query.strip() data = [] v = Version.get_by_key_name(version) if arch != ARCHS.LINUX_RETAIL: return '<pre>Sorry, DB is disabled for RCT/SBT</pre>' if v is None or not v.imported: versions = get_versions(arch) versions.sort(key = lambda x: [int(y) for y in x.split('.')]) if version == versions[-1]: error = "Sorry, this version is not imported into db yet, importing was put into queue" taskqueue.add(url='/import',params={'version' : version,'arch' : arch},queue_name='importer') else: self.redirect('/query/latest/?' + self.request.query_string) else: if len(query) > 0: for qline in query.splitlines(): operation = '' if qline[0] == '&': operation = '&' qline = qline[1:] elif qline[0] == '!': operation = '!' qline = qline[1:] try: qline = "Select * from Node where {0}".format(qline) logging.info(qline) q = db.GqlQuery(qline) except: error = 'Sorry this query was malformed' q = None if q is not None: pb = q._proto_query inequalities = {} for k,v in pb.filters().iteritems(): #keywords.append(k[0]) if k[1] == '!=': inequalities[k[0]] = set([v[1][0]._Literal__value for v in v]) #logging.info(inequalities) #for k in pb.orderings(): #keywords.append(k[0]) if len(pb.orderings()) > 0: error = 'Sorry order by is not allowed' else: try: result = q.fetch(1000) #logging.info('results: {0}'.format(len(result))) _result = [] for r in result: ok = True for prop,keys in inequalities.iteritems(): l = getattr(r,prop) if isinstance(l,list): l = set(l) else: l = set([l]) if len(l & keys) > 0: ok = False #logging.info('filtered') #logging.info(l) break if not ok: break if ok: _result.append(r) result = _result _data = [] for node in result: root = node while root.parent() is not None: root = root.parent() _data.append((root.key().name(),root,node)) if operation == '&': roots = set([x[0] for x in _data]) _data = [x for x in data if x[0] in roots] data = _data elif operation == '!': roots = [x[0] for x in _data] _data = [x for x in data if x[0] not in roots] data = _data else: data.extend(_data) except datastore_errors.NeedIndexError, exc: x = str(exc) error = 'Sorry, this query is not possible without additional indices'
def test_version_not_snapshot(self): version = Version('2.2.0', {}) self.assertFalse(version.is_snapshot)
def test_version_doesnt_support_unknown_platform(self): version = Version('2.2.0', {}) self.assertFalse(version.supports_platform('test'))
def test_version_supports_known_platform(self): version = Version('2.2.0', {'test': 'http://example.com/test.pkg'}) self.assertTrue(version.supports_platform('test'))
def test_version_is_snapshot(self): version = Version('2.2.1-SNAPSHOT-2016-04-23-a', {}) self.assertTrue(version.is_snapshot)
def get_page(self,arch,version,hero): v = Version.get_by_key_name(version) if arch != fetcher.ARCHS.LINUX_RETAIL: return '<pre>Sorry, DB is disabled for RCT/SBT</pre>' elif v is None or not v.imported: versions = get_versions() versions.sort(key = lambda x: [int(y) for y in x.split('.')]) if version == versions[-1]: self.response.out.write("Sorry, this version is not imported into db yet, importing was put into queue") taskqueue.add(url='/import',params={'version' : version,'arch' : arch},queue_name='importer') else: self.redirect('/heroes/latest/?' + self.request.query_string) return None else: if hero is None: manifest = fetcher.fetch(arch,version,'manifest.xml') manifest = Manifest(manifest) query = "Select * from Node where tag='hero'".format(version) q = db.GqlQuery(query) result = q.fetch(1000) result = [_ for _ in result if _.name not in ['wl_Warlock']] for hero in result: if hasattr(hero,'attackprojectile') and hero.attackprojectile != '': projectile = db.GqlQuery("Select * from Node where name='{0}'".format(hero.attackprojectile)).fetch(1)[0] if hasattr(projectile,'speed'): hero.projectilespeed = projectile.speed else: hero.projectilespeed = '""' else: hero.projectilespeed = '""' #get url for icon icon = hero.icon.replace('.tga','.dds') path = '/'.join(hero.key().name().split('|')[1].split('/')[:-1]) path = '/'.join([path,icon]) path = path.replace('game/resources0.s2z','game/textures.s2z/00000000') if path in manifest.files: path = '/'.join([manifest.files[path]['version'],path]) else: logging.info("Failed to create url for hero icon :( :") logging.info(icon) logging.info(path) hero.iconurl = path template_values = {} template_values['data'] = result template_values['stringtables'] = get_stringtables_entities(arch,version) template_name = self.request.get('template') if template_name and template_name == 'csv': template = templates.get_template('heroes.csv') else: template = templates.get_template('heroes.html') #self.response.out.write(template.render(template_values)) #return None return template.render(template_values) else: hero = db.GqlQuery("Select * from Node where tag='hero' and name = :1",hero).fetch(1) if len(hero) == 0: return 'Sorry, such hero is not found' hero = hero[0] #get url for icon manifest = fetcher.fetch(arch,version,'manifest.xml') manifest = Manifest(manifest) icon = hero.icon.replace('.tga','.dds') path = '/'.join(hero.key().name().split('|')[1].split('/')[:-1]) path = '/'.join([path,icon]) path = path.replace('game/resources0.s2z','game/textures.s2z/00000000') path = '/'.join([manifest.files[path]['version'],path]) hero.iconurl = path abilities = db.GqlQuery("Select * from Node where tag='ability' and name in :1",[hero.inventory0,hero.inventory1,hero.inventory2,hero.inventory3]).fetch(10) for a in abilities: icon = a.icon.replace('.tga','.dds') path = '/'.join(a.key().name().split('|')[1].split('/')[:-1]) path = '/'.join([path,icon]) path = path.replace('game/resources0.s2z','game/textures.s2z/00000000') path = '/'.join([manifest.files[path]['version'],path]) a.iconurl = path #abilities = dict([(a.name,a) for a in abilities]) template_values = {} template_values['entity'] = hero template_values['version'] = version template_values['abilities'] = abilities template_values['stringtables'] = get_stringtables_entities(arch,version) template = templates.get_template('hero.html') return template.render(template_values)
return # deprecieated @abc.abstractmethod def get_devo_old(self, delta=0): """Save the data object to the output.""" return AbstractDevoSource.register(UtmostDevoSource) devo_source = UtmostDevoSource() get_devo = devo_source.get_devo get_devo_old = devo_source.get_devo_old V = Version() from shadow import BOT_TOKEN, CREATOR_ID, BOT_ID TELEGRAM_URL = 'https://api.telegram.org/bot' + BOT_TOKEN TELEGRAM_URL_SEND = TELEGRAM_URL + '/sendMessage' TELEGRAM_URL_SEND_PHOTO = TELEGRAM_URL + '/sendPhoto' TELEGRAM_URL_CHAT_ACTION = TELEGRAM_URL + '/sendChatAction' JSON_HEADER = {'Content-Type': 'application/json;charset=utf-8'} LOG_SENT = '{} {} sent to uid {} ({})' LOG_ENQUEUED = 'Enqueued {} to uid {} ({})' LOG_DID_NOT_SEND = 'Did not send {} to uid {} ({}): {}' LOG_ERROR_SENDING = 'Error sending {} to uid {} ({}):\n{}' LOG_ERROR_DAILY = 'Error enqueueing dailies:\n' LOG_ERROR_QUERY = 'Error querying uid {} ({}): {}'
def test_version_snapshot_is_pre_release(self): version = Version('2.2.1-SNAPSHOT-2016-04-23-a', {}) self.assertTrue(version.is_pre_release)
DOCSTRING = f.read().strip() f = t.extractfile(base + '/requirements.txt') REQUIREMENTS = f.read().strip() return '''({pkgname} . [{version} nil "{DOCSTRING}" tar])'''.format(**locals()) # build up a dictionary mostrecent = {} for tf in tfs: basedir, tf = os.path.split(tf) base, ext = os.path.splitext(tf) splits = base.split('-') pkgname = '-'.join(splits[0:-1]) version = Version.parse(splits[-1]) if pkgname in mostrecent: if mostrecent[pkgname] < version: mostrecent[pkgname] = version else: mostrecent[pkgname] = version ARCHIVE_CONTENTS = '(1\n' for pkg in mostrecent: tfile = '{0}{1}-{2}.tar'.format(root, pkg, mostrecent[pkg]) ARCHIVE_CONTENTS += ' ' + get_archive_contents_entry(tfile) + '\n' ARCHIVE_CONTENTS += ')'