def POST(self): """ this is the data from POST['data'] { u'category': u'7', u'version' : u'5.7', u'appName' : u'haozu', u'projectPath' : u'/var/www/here', u'mailContent' : u'hello world', u'projectId': u'4', u'isDebug': True, u'versionForPackage': u'5.0.1', u'dependencyArray': [ { u'sha1': u'90123', u'repoId': u'1', u'repoName': u'RTNetwork' }, { u'sha1': u'90123', u'repoId': u'2', u'repoName': u'RTApiProxy' } ] } """ postData = web.input() data = json.loads(urllib.unquote(postData['data'])); print data os.system("echo '0%' > " + data['projectPath']+"/progress.log") PackageModel.buildPackage(data) pass
def POST(self): postData = web.input() data = json.loads(urllib.unquote(postData['data'])); print "the posted data is" print data if data['initSHA1'] == "": data['initSHA1'] = None params = { 'projectId':data['projectId'], 'dependencyId':data['dependencyId'], 'dependencyType':data['dependencyType'], 'initSHA1': data['initSHA1'], 'numBeforeInitSHA1':3 } print "params for getDependecyInfoArray()" print params dependencyArray = PackageModel.getDependecyInfoArray(params) dependencyArray.reverse() print "the fetched dependency Array" print dependencyArray jsonDependencyArray = json.dumps(dependencyArray) print "json encoded dependency array is" print jsonDependencyArray return jsonDependencyArray
def POST(self): postData = web.input() print postData data = json.loads(urllib.unquote(postData['data'])); print data projectPath = PackageModel.getProjectPath(data['appId'], data['version']) filePath = projectPath + "/init_progress.log" os.system("touch " + filePath) progress = open(filePath).readline() return progress
def update_aur(db): feed = feedparser.parse(AUR_FEED) latest_packages = [] for item in feed['items']: (year, mon, mday, hour, m, sec) = item['published_parsed'][:6] # time is one hour off on the server published = datetime(year, mon, mday, hour + 1 , m, sec) latest_packages.append((item['title'], published)) conditions = [] for (name, _) in latest_packages: conditions.append(Package.name == name) cached_packages = db.query(Package).\ filter(or_(*conditions)).\ filter(Package.repo == "aur").all() pkgs_to_fetch = {} for (name, last_update) in latest_packages: pkg = None for p in cached_packages: if p.name == name: cached_packages.remove(p) pkg = p break if pkg is None: pkg = Package() db.add(pkg) pkgs_to_fetch[name] = pkg # FIXME relying on last_update doesn't work for all packages elif pkg.last_update != last_update: pkgs_to_fetch[name] = pkg if len(pkgs_to_fetch) == 0: return queries = "&".join("arg[]=%s" % quote(pkg, safe="+") for pkg in pkgs_to_fetch.keys()) try: results = requests.get(AUR_RPC_URL + "&" + queries).json()["results"] for json in results: pkg = pkgs_to_fetch[json["Name"]] pkg.apply_aur_package_info(json) except requests.exceptions.RequestException as e: print("failed to get information for package '%s': %s" % (name, e))
def POST(self): postData = web.input() print "here is post, %s" % postData print "user is %s" % postData["user"] print "repo is %s" % postData["repo"] print "ref is %s" % postData["ref"] """ user is mobile repo is mobile/ios_AnjukeHD ref is master """ branchName = postData["ref"] repoInfo = postData["repo"].split("/") repoName = repoInfo[1] user = postData["user"] if user == "wadecong": print "user is wadecong, do nothing" return if "develop" not in branchName: print "this is not develop, do nothing" return configHelper = ConfigHelper().initWithBranchName(branchName) packageInfo = configHelper.getConfigData() if packageInfo["category"] == '0': return if packageInfo["category"] != '7' and packageInfo["category"] != '8': packageInfo["category"] = '7' packageInfo["mailContent"] = configHelper.getMailContent(packageInfo["projectPath"], repoName, branchName) print packageInfo PackageModel.buildPackage(packageInfo) print "notification building package" pass
def GET(self): """ the page need the name of dependency and SHA1 code array and the project ID and category """ projectId = (web.input())['projectId'] category = (web.input())['category'] data['packageInfoForBuild'] = PackageModel.getPackageInfoForBuild(projectId, category) return render.selectVersions(data=data)
def update_arch_repositories(db): feed = feedparser.parse(PACKAGE_FEED) latest_packages = [] for item in feed['items']: (name, version, arch) = item['title'].split() repo = item['category'].lower() latest_packages.append((name, version, arch, repo)) conditions = [] for (name, _, arch, repo) in latest_packages: condition = (Package.name == name) & \ (Package.repo == repo) & \ (Package.arch == ARCH.index(arch)) conditions.append(condition) cached_packages = db.query(Package).filter(or_(*conditions)).all() new_rows = [] for (name, version, arch, repo) in latest_packages: pkg = None for p in cached_packages: if p.name == name and p.arch == arch and p.repo == repo: cached_packages.remove(p) pkg = p if pkg is None: url = PACKAGE_API_URL.format(repo=repo, arch=arch, pkgname=name) try: json = requests.get(url).json() pkg = Package() pkg.apply_arch_package_info(json) pkg.version = version new_rows.append(dict(pkg)) except requests.exceptions.RequestException as e: print("failed to get information for package '%s': %s" % (name, e)) continue elif pkg.version != version: url = PACKAGE_API_URL.format(repo=repo, arch=arch, pkgname=name) try: json = requests.get(url).json() pkg.apply_arch_package_info(json) pkg.version = version except requests.exceptions.RequestException as e: print("failed to get information for package '%s': %s" % (name, e)) continue if len(new_rows) > 0: db.execute(Package.__table__.insert(), new_rows)
def getAllPoolVersions(self, package_name): """Find all available versions of the given package available in the pool. Looks in the target and all the sources.""" ret = [] pkg = Package(self.distro, self.dist, self.component, package_name) ret.extend(pkg.getPoolVersions()) for srclist in self.getAllSourceLists(): for source in srclist: for component in source.distro.components(): pkg = Package(source.distro, source.dist, component, package_name) ret.extend(pkg.getPoolVersions()) return ret
symlink = { "key" : "symlink", "value" : "/opt/bin/frell"} flhndle = { "key" : "file", "value" : "/opt/bin/frell.test"} actionvalues = [ ActionValues(**symlink), ActionValues(**flhndle) ] actiontype = { "action" : "create", "tag" : tag, "values" : actionvalues } postinstall = [ Action(**actiontype) ] package = { 'name': 'frell', 'description': 'Frelling Test Example', 'lang': 'en', 'platform': 'linux', 'postinstall': postinstall, 'release': '1.EL.test', 'requires': 'test-requires', 'sources': sources, 'version': '0.0.1', } #epdb.st() test = Package(**package) blob = yaml.load(test.asJSON(), Loader=Loader) print yaml.dump(blob, default_flow_style=False, Dumper=Dumper)
def createPackage(packagename, bookids, price, description): package = Package(packagename, PackageManager.list2str(bookids), price, description) db.session.add(package) db.session.commit() return True
def generator_package(): """ 随机生成数据包 每个timeslot产生包的数量,服从泊松分布; 每个包产生的源节点数量,随机选择在1-4之间。 每个包的影响范围,随机选择距离在[100-1000]之间 每个包影响时间,随机选择在[10ms,3min]之间 每个数据包的长度,在[10-100]之间 每个目标车辆的截止时间,在[50ms-200ms]之间,即[5-20个tianslot] :return: 每个timeslot的包列表 """ dags = load_data("data/1/dag_data1.json") packages = {} # 产生每个timeslot生成的新增包数量,服从泊松分布 packages_number = np.random.poisson(10, T) id = 0 for t in range(T): print("timeslot", t) packages[t] = [] packages[t] = [] t_nodes = dags[t].nodes if len(t_nodes) <= 1: continue for pid in range(packages_number[t]): # print("pid", id) d_index = np.random.randint(data_num) d_length = data_length[d_index] start_time = t expire_time = np.random.randint(start_time, start_time + 20) source_node_num = np.random.randint(1, 3) source_nodes = random.sample(t_nodes, source_node_num) already_nodeIds = [node.id for node in source_nodes] destination_nodes = [] destination_nodeIds = [] for node in source_nodes: x, y = node.x, node.y inf_range = random.randint(10, 500) # inf_range = 500 for v in t_nodes: if v.id in already_nodeIds or v.id == node.id: continue # print("adsdadsa") x1, y1 = v.x, v.y if pow((x1 - x), 2) + pow( (y1 - y), 2) <= pow(inf_range, 2): deadline = np.random.randint(start_time, start_time + 15) deadline = expire_time if deadline > expire_time else deadline destination_node = DestinationNode(v, deadline) destination_nodes.append(destination_node) destination_nodeIds.append(v.id) already_nodeIds.append(v.id) print("destination_nodeIds", destination_nodeIds) if len(destination_nodes) <= 0: continue package = Package(id, d_index, d_length, start_time, expire_time, source_nodes, destination_nodes, destination_nodeIds) id += 1 packages[t].append(package) store_data(packages, "data/1/package_data1.json")