def get_user_input(): prompts = ["Enter a package ID: ", "Enter delivery address: ", "Enter delivery city: ", "Enter delivery zip code: ", "Enter delivery state: ", "Enter delivery deadline: ", "Enter package weight: "] input_parameters = ['27', '1060 Dalton Ave S', 'Salt Lake City', '84104', 'UT', 'EOD', '5'] # input_parameters = ['9', '300 State St', 'Salt Lake City', '84103', 'UT', 'EOD', '2'] input_parameters = [] print("Enter the following information for the package:") for prompt_index in range(len(prompts)): user_input = input(prompts[prompt_index]) input_parameters.append(user_input) input_address = Address(input_parameters[1], input_parameters[2], input_parameters[3], input_parameters[4]) input_package = Package(int(input_parameters[0]), input_address, float(input_parameters[6])) str_time = input_parameters[5] valid_selection = False while not valid_selection: if ':' in str_time: split_time = input_parameters[5].split(':') hour = split_time[0] minute = split_time[1] if not hour.isnumeric() or not minute.isnumeric(): str_time = input("Invalid time. Please, re-enter time (HH:MM): ") elif int(hour) <= 0 or int(hour) > 23 or int(minute) < 0 or int(minute) > 60: str_time = input("Invalid time. Please, re-enter time (HH:MM): ") else: deadline = time(int(hour), int(minute, 0)) valid_selection = True elif str_time == 'EOD': deadline = time(23, 0, 0) valid_selection = True input_package.deadline = deadline return input_package
def build_tests(self): package = self.package package_root = os.getcwd() self.build_package(package, package_root) download_dependencies(package.test_dependencies) dependencies = [] for dependency in package.test_dependencies: path = path_for_dependency(dependency) os.chdir(path) sub_package = Package.open() os.chdir(os.path.join('..', '..')) self.build_package(sub_package, path) dependencies.append(sub_package) dependencies.append(package) for dependency in package.dependencies: path = path_for_dependency(dependency) os.chdir(path) sub_package = Package.open() os.chdir(os.path.join('..', '..')) dependencies.append(sub_package) sources = collect_sources(os.path.join(package_root, 'Tests')) has_main = len([s for s in sources if s.endswith('/main.swift')]) > 0 if has_main: self.build_cli(package.name + 'Tests', sources, dependencies) else: raise Exception('Tests is missing a main.swift')
def read_directory(self, filename, package=None): """ Parse every module in the specified directory. """ directory = File(filename) if not directory.is_folder: raise ValueError("%s is NOT a directory." % filename) if package is not None: contents = package.contents else: contents = [] for child in directory.children: # If the child is a Python file then parse it. if child.ext == '.py': contents.append(self.read_file(child.path, package)) # If the child is a sub-package then recurse! elif child.is_package: if package is not None: sub_package_name = '%s.%s' % (package.name, child.name) sub_package = Package(filename=child.path, name=sub_package_name, parent=package) else: sub_package = Package(filename=child.path, name=child.name) self.read_directory(child.path, sub_package) contents.append(sub_package) return contents
def _do_newest_filtering(filelist): ''' Only return the newest package for each name.arch ''' newest = {} for f in filelist: pkg = Package(f) key = (pkg.name, pkg.arch) if key in newest: # the current package is older if pkg.verCMP(newest[key]) < 0: continue # the current package is the same version if pkg.verCMP(newest[key]) == 0: continue # the current package is newer than what we have stored del newest[key] newest[key] = pkg # get back the file list filelist_new = [] for pkg in newest.values(): filelist_new.append(pkg.filename) return filelist_new
def receive(self): r_data = None while True: try: r_data, address = self.sock.recvfrom(self.widgets.pack_size.get()) except BlockingIOError: # buffer-ul socket-ului este gol pass except ConnectionResetError: """ Daca ack-ul pentru pachetul FIN nu a fost receptionat in cel mult 2 secunde de la trimiterea acestuia, atunci inchidem conexiunea automat. """ if (self.timer != 0 and int(round(time.time() * 1000)) - self.timer > 2000): self.handle_end_of_connection() break time.sleep(0.5) # daca s-a primit un pachet de ack atunci acesta este eliminat din dictionar si este actualizata fereastra if r_data: r_pack = Package() r_pack.set_data(r_data) self.handler.remove_package(r_pack) self.update_cwnd() self.dec_pack_in_transit() # s-a receptionat ultimul pachet, adica cel care are flag-ul de FIN activ, asadar conexiunea este incheiata if r_pack.get_flag(1): self.handle_end_of_connection() break r_data = None
def testParseCorruptedStreamToPackage(self): # encoded is a bunch of hex chars # b'\xfffff23fafafa' package = Package(encoded=CORRUPTED_STREAM) package_test_function(self, package, b"", "E", "B", 0, 0) self.assertTrue(package.is_error()) self.assertFalse(package.is_handshake())
def testParseBytesToPackage(self): # Package's payload argument is given a encoded package of bytes # b'DB\x04\x00\x00\x00\x00\x00\x00\x00ABCD1997' package = Package(encoded=BYTES_ENCODED) package_test_function(self, package, b"ABCD", "D", "B", 4, 0) self.assertFalse(package.is_handshake()) self.assertFalse(package.is_error())
def find_a_way(self, truck: Truck): """ finds the shortest route along the stops from truck in O(N^2) time :param truck: truck with stops to make :return: the list of locations and the total distance traveled by the truck """ stops = list(truck.cargo.keys()) current = self.center # start at the Hub the_way = [current] distance = 0.0 while stops: result = self.nearest_neighbor(stops, current) distance += result[1] the_way.append(result[0]) current = stops.pop(0) packages = truck.unload_package(current) delivery_time = Package.get_delivery_time(distance, truck.speed, truck.depart_at) for pkg_id in packages: pkg = self.database.look_up(pkg_id) pkg.delivery_status = "Delivered" pkg.delivery_time = delivery_time distance += self.destinations.get_distance( the_way[-1], self.center) # return to the Hub truck.depart_at = Package.get_delivery_time(distance, truck.speed, truck.depart_at) return the_way, distance
def from_archive(cls, file, uploader): """Load a package version from a .tar.gz archive. If the package specified in the archive already exists, it will be loaded and assigned as the package version's package. If it doesn't, a new package will be created. Arguments: file: An open file object containing a .tar.gz archive. uploader: The user who uploaded this package archive. Returns: Both the Package object and the PackageVersion object. """ try: tar = tarfile.open(mode="r:gz", fileobj=file) changelog = Readme.from_archive(tar, name='CHANGELOG') readme = Readme.from_archive(tar) pubspec = Pubspec.from_archive(tar) name = pubspec.required('name') package = Package.get_by_key_name(name) if not package: assert uploader is not None package = Package.new(name=name, uploaders=[uploader]) libraries = sorted(name[4:] for name in tar.getnames() if name.startswith('lib/') and not name.startswith('lib/src/') and name.endswith('.dart')) return PackageVersion.new( package=package, changelog=changelog, readme=readme, pubspec=pubspec, libraries=libraries, uploader=uploader) except (tarfile.TarError, KeyError) as err: raise db.BadValueError( "Error parsing package archive: %s" % err)
def test(func): import time from package import Package fd = open('testdata.txt') vorher = 0 nachher = 0 start = time.time() counter = 0 bin = Package("370x270x250") for line in fd: counter += 1 if counter > 450: break packages = [Package(pack) for pack in line.strip().split()] if not packages: continue bins, rest, bxyz = func(packages, bin=bin) if rest: print "invalid data", rest, line else: vorher += len(packages) nachher += len(bins) print bin print bins print len(bins) print bins print bxyz print rest break
def item(id, item): repository = open_repository(id) pkg = Package(repository.root) item = pkg.item_by_id(item) if not item: abort(404) return redirect(url_for("view", id=id, relpath=item.relpath))
def process(self, incoming_production_line): packages = deque() for stack in incoming_production_line.values(): package = Package() package.add_bottles(stack) packages.append(package) return packages
def testIncoherentHeadToPackage(self): # head says payload's length is 3, but it's 4 # b'DB\x04\x00\x00\x00\x00\x00\x00\x00ABCD1997' encoded = b'DB\x03\x00\x00\x00\x00\x00\x00\x00ABCD1997' package = Package(encoded=encoded) package_test_function(self, package, b"", "E", "B", 0, 0) self.assertFalse(package.is_handshake()) self.assertTrue(package.is_error())
def package_list(): """Print the packages list in env. Read the.config file in the BSP directory, and list the version number of the selected package. Args: none Returns: none Raises: none """ fn = '.config' env_root = Import('env_root') bsp_root = Import('bsp_root') target_pkgs_path = os.path.join(bsp_root, 'packages') pkgs_fn = os.path.join(target_pkgs_path, 'pkgs.json') if not os.path.isfile(fn): print('no system configuration file : .config.') print('you should use < menuconfig > command to config bsp first.') return #if not os.path.exists(target_pkgs_path): # try: # os.mkdir(target_pkgs_path) # except: # print 'mkdir packages directory failed' # return pkgs = kconfig.parse(fn) #if not os.path.isfile(pkgs_fn): # pkgs_file = file(pkgs_fn, 'w') # pkgs_file.write(json.dumps(pkgs, indent=1)) # pkgs_file.close() for pkg in pkgs: package = Package() pkg_path = pkg['path'] if pkg_path[0] == '/' or pkg_path[0] == '\\': pkg_path = pkg_path[1:] #pkg_path = pkg_path.replace('/', '\\') pkg_path = os.path.join(env_root, 'packages', pkg_path, 'package.json') package.parse(pkg_path) pkgs_name_in_json = package.get_name() print pkgs_name_in_json, pkg['ver'] #print "package path:", pkg['path'] if not pkgs: print("Packages list is empty.") print('You can use < menuconfig > command to select online packages.') print('Then use < pkgs --update > command to install them.') return
def unpack_data_buffer(self, package: Package): payload: List[int] = package.get_payload(parse=True) header = package.get_header() seq: int = header.get_package_seq(parse=True) # put the data into stack inverted order for i in range(len(payload) - 1, -1, -1): seq_data = SeqData(seq=seq + i, data=payload[i]) self.resent_buffer.put(seq_data) log.debug(f"resent buffer: {self.resent_buffer.queue}")
def info_file(package_fn): from package import Package if not os.path.exists(package_fn): raise Error(_('File %s not found') % package_fn) package = Package(package_fn) package.read() return package.metadata, package.files
def getPackage(name): package_version = NpmParser.getPackageVersion(name) if package_version is None: return None root_package = Package(name, NpmParser.getPackageVersion(name)) root_package.set_dependencies(NpmParser.getDependenciesList(name)) return root_package
def info_file(package_fn): from package import Package if not os.path.exists(package_fn): raise Error (_('File %s not found') % package_fn) package = Package(package_fn) package.read() return package.metadata, package.files
def getDependenciesList(package): dep_list = [] json_object = NpmParser.getDependenciesJson(package) if json_object is not None: for name, version in json_object.items(): package = Package(name, version) package.set_dependencies(NpmParser.getDependenciesList(name)) dep_list.append(package) return dep_list
def set_marshalling_data(self, data): version = data[Marshallable.VERSIONS]['core.InstalledPackage'] if version != 1: raise MarshalledVersionUnknownError(klass=self.__class__, marshalled_version=version, current_version=1) self.__name = data[Marshallable.ATTRIBUTES]['name'] self.__version = data[Marshallable.ATTRIBUTES]['version'] self.__nodes = data[Marshallable.ATTRIBUTES]['nodes'] Package.set_marshalling_data(self, data) pass
def get_newest_build(self, branch, pkgname): """ Returns the newest 'Package' for a given branch and package name """ builds = self.session.getLatestRPMS(branch, package=pkgname, arch='src') if len(builds[0]) == 0: return None latest = builds[0][0] pkg = Package() pkg.name = latest['name'] pkg.version = latest['version'] pkg.release = latest['release'] return pkg
def testPackage(self): pkg = Package(10, "Test", "Small Box") self.assertEqual(pkg.weight, 10) self.assertEqual(pkg.contents, "Test") self.assertEqual(pkg.containerType, "Small Box") pkg.weight = 15 self.assertEqual(pkg.weight, 15) pkg.contents = "Motherboard" self.assertEqual(pkg.contents, "Motherboard") pkg.containerType = "Large Box" self.assertEqual(pkg.containerType, "Large Box")
def __init__(self, db_dict=None): self.packages = [] #list of package objects self.delivered = [] #list of packages that have been delivered if db_dict: print("Stuff in database...reading in") for itm in db_dict: pkg = Package(itm['tracking_num'], itm['sender']) pkg.status = itm['status'] pkg.est_delivery = itm['est_delivery'] self.need_to_print = False self.packages.append(pkg)
def install(self, name: PackageName, dependency_of: List[Package] = [], force: bool = False, ignore_dependencies: bool = False): definitions_dir = path.join(PACKAGE_DEFINITIONS_REPOS, name.repo, 'package_definitions') package = Package( self._os_flavor, name, self._package_data(name), definitions_dir, self._event_stream, ) # pass self.install so Package can trigger an install for each of its dependencies return package.install(self.install, dependency_of, force, ignore_dependencies)
def export(self, name: PackageName, dependency_of: List[Package] = []): definitions_dir = path.join(PACKAGE_DEFINITIONS_REPOS, name.repo, 'package_definitions') package = Package( self._os_flavor, name, self._package_data(name), definitions_dir, self._event_stream, ) # pass self.export so Package can trigger an export for each of its dependencies return package.export(self.export, dependency_of)
class UDP_Client: def __init__(self, port: int = 10923, buff: int = 2400): self.PORT = port self.buff = buff self.skt = self.__init_sckt() self.pkg_handler = Package(self.buff) def __init_sckt(self): # Cria a instância do socket s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((socket.gethostname(), 1243)) return s def __close(self, x): # Mata o socket quando o usuário aperta q enter = input("Para sair a qualquer momento, aperte 'q'\n") if 'q' in enter: self.skt.close() os._exit(1) def hear(self): """ Escuta e interpreta o que o servidor está mandando para o cliente """ x = threading.Thread(target=self.__close, args=(1, )) x.start() while True: full_msg = [] total_msgs = -1 new_msg = True while True: msg = self.skt.recv(self.buff) if len(msg) <= 0: continue print() total_msgs = self.pkg_handler.parse_total(msg) curr_msg = self.pkg_handler.parse_sequence(msg) owner = self.pkg_handler.parse_owner(msg) print( f"\33[32m Novo pacote recebido: {curr_msg} do total de {total_msgs} que veio do {owner}" ) print(f"\33[34m Conteudo do pkg: {msg}") print(f"\33[34m Já foi recebido {len(full_msg)} pacotes") full_msg.append(msg) if len(full_msg) == int(total_msgs): print("\33[31m Mensagem totalmente recebida:") information = self.pkg_handler.decode_and_merge(full_msg) print(information, "\n\n\33[37m") new_msg = True full_msg = [] total_msgs = -1
def wait_for_syn_pack(self): data = None while not data: data, address = self.sock.recvfrom(self.widgets.pack_size.get()) time.sleep(0.01) server_syn_pack = Package() server_syn_pack.set_data(data) if server_syn_pack.get_flag(0): self.message_box.insert_message("SYN package has been received. Successful connection.") self.rf = open(file_path, "rb") self.handler.remove_package(server_syn_pack) thread2 = threading.Thread(target=self.receive) thread2.start()
def package_list(): """Print the packages list in env. Read the.config file in the BSP directory, and list the version number of the selected package. """ fn = '.config' env_root = Import('env_root') pkgs_root = Import('pkgs_root') if not os.path.isfile(fn): if platform.system() == "Windows": os.system('chcp 65001 > nul') print("\n\033[1;31;40m当前路径下没有发现 .config 文件,请确保当前目录为 BSP 根目录。\033[0m") print( "\033[1;31;40m如果确定当前目录为 BSP 根目录,请先使用 <menuconfig> 命令来生成 .config 文件。\033[0m\n" ) print('\033[1;31;40mNo system configuration file : .config.\033[0m') print( '\033[1;31;40mYou should use < menuconfig > command to config bsp first.\033[0m' ) if platform.system() == "Windows": os.system('chcp 437 > nul') return pkgs = kconfig.parse(fn) for pkg in pkgs: package = Package() pkg_path = pkg['path'] if pkg_path[0] == '/' or pkg_path[0] == '\\': pkg_path = pkg_path[1:] pkg_path = os.path.join(pkgs_root, pkg_path, 'package.json') package.parse(pkg_path) pkgs_name_in_json = package.get_name() print("package name : %s, ver : %s " % (pkgs_name_in_json.encode("utf-8"), pkg['ver'].encode("utf-8"))) if not pkgs: print("Packages list is empty.") print('You can use < menuconfig > command to select online packages.') print('Then use < pkgs --update > command to install them.') return
def create_pack(self, data): pack = Package() pack.add_data(data) pack.set_dim(len(data)) pack.set_num(self.pack_cnt) self.inc_cnt() return pack
def send(self): package = Package() package.type = self._type_package package.user = self._user package.command = self._command PACKAGE = package.getPackage() s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((TCP_IP, TCP_PORT)) s.send(PACKAGE) data = s.recv(BUFFER_SIZE) s.close() package = Package() package.setPackage(data) if package.type == 0: pass if package.type == 1: message = str(package.data) #message = message.split('$$NEWLINE$$ $$NEWLINE$$') #message = '\n'.join(message) #message = message.replace('$$NEWLINE$$','',1) print message if package.type == 2: print "Status: {}".format(package.data) if package.type == 3: print "Data sending"
def get_ack_for_syn_pack(self, dim): ack_pack = Package() ack_pack.set_flag(0) ack_pack.set_num(self.pack_cnt) self.inc_cnt() ack_pack.set_ack(dim) return ack_pack
def _build_package(package): """ Build a package file from the local files structure """ logger.info("Building package from local content") # new package obj p = Package(package, config) # extract action from js src file to xml one for m in list_modules(p.wd): extract_actions_from_module_file( m[0], "".join(m[1][1:]), p.expand_target ) p.rebuild()
def view(id, relpath=None): repository = open_repository(id) pkg = Package(repository.root) if "cmd" in request.args: return repo(id, relpath or "/") if relpath is None: return redirect(url_for("view", id=id, relpath=pkg.spine[0].relpath)) doc = pkg.item_by_relpath(relpath) if not doc: abort(404) if doc.is_content: return render_page(id, pkg, doc) return doc.fh().read()
def load_packages(): fo = open("packagelist.txt") while True: line = fo.readline() if ("" == line): fo.close() break package_info = line.split("|") the_package = Package(package_info[0],package_info[1],package_info[2],package_info[3],package_info[4],package_info[5],package_info[6],"At HUB") the_package.location_id = get_loc_id(the_package) package_list[the_package.ID] = the_package for x in package_list: print(str(package_list.get(x)))
def sendFile(self, filename): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((TCP_IP, TCP_PORT)) package = Package(3) package.command.variables = ' '.join([filename, 'wb']) print "Starting sending file" with open(filename,'rb') as file: print 'Sending..' data = file.read(2048) package.command.name = 'send_file' package.data = data.encode() s.send(package.getPackage()) package.command.variables = '{} {}'.format(filename, 'a')
def __init__(self, name, version, nodes): Package.__init__(self) self.__name = name self.__version = version self.__nodes = nodes for n in self.__nodes: n.set_package(self) pass # <paranoia> assert type(name) is types.StringType assert type(version) is types.StringType # </paranoia> pass
def install_pkg(env_root, bsp_root, pkg): """Install the required packages.""" # default true ret = True local_pkgs_path = os.path.join(env_root, 'local_pkgs') bsp_pkgs_path = os.path.join(bsp_root, 'packages') # get the .config file from env env_kconfig_path = os.path.join(env_root, 'tools\scripts\cmds') env_config_file = os.path.join(env_kconfig_path, '.config') package = Package() pkg_path = pkg['path'] if pkg_path[0] == '/' or pkg_path[0] == '\\': pkg_path = pkg_path[1:] pkg_path = os.path.join(env_root, 'packages', pkg_path, 'package.json') package.parse(pkg_path) url_from_json = package.get_url(pkg['ver']) package_url = package.get_url(pkg['ver']) pkgs_name_in_json = package.get_name() if package_url[-4:] == '.git': ver_sha = package.get_versha(pkg['ver']) # print("==================================================>") # print("packages name :"%pkgs_name_in_json.encode("utf-8")) # print("ver :"%pkg['ver']) # print("url :"%package_url.encode("utf-8")) # print("url_from_json : "%url_from_json.encode("utf-8")) # print("==================================================>") get_package_url = None get_ver_sha = None upstream_change_flag = False try: if (not os.path.isfile(env_config_file)) or ( os.path.isfile(env_config_file) and find_macro_in_config( env_config_file, 'SYS_PKGS_DOWNLOAD_ACCELERATE')): get_package_url, get_ver_sha = get_url_from_mirror_server( pkgs_name_in_json, pkg['ver']) # determine whether the package package url is valid if get_package_url != None and determine_url_valid( get_package_url): package_url = get_package_url if get_ver_sha != None: ver_sha = get_ver_sha upstream_change_flag = True except Exception, e: # print('e.message:%s\t' % e.message) print( "Failed to connect to the mirror server, package will be downloaded from non-mirror server.\n" )
def upgrade(self, config, force = False, packages = None, exclude = None, dryrun = False): if packages: if isinstance(packages, str): packages = packages.split(",") elif not isinstance(packages, list): packages = self.packages else: packages = self.packages if exclude: if isinstance(exclude, str): exclude = exclude.split(",") elif not isinstance(exclude, list): exclude = list() for e in exclude: try: packages.remove(e) except: pass for package in packages: if not config.package_exists(package): log.error("%s: package upgrade failed: invalid package", package) return cpkg = config.package_get(package) pkg = Package(package, cpkg, self.path, dryrun) version = config.versions.get(self.server, package) version = pkg.upgrade(version, force) config.versions.set(self.server, package, version) if not dryrun: config.versions.save() return config
def addPackage(self,name,dependencies = []): #may or may not have dependencies #check if package already there if name in self.packages: return False #make sure dependencies already exist for dependency in dependencies: if dependency not in self.packages: return False #iterate through dependencies and make connection to current package for dependency in dependencies: self.packages[dependency].used_by.append(dependency) #if all is well, add package newPackage = Package(name) canAdd = newPackage.addDependencies(dependencies) if not canAdd: return False self.packages[name] = newPackage return True
def build_package(self, package, package_root): download_dependencies(package.dependencies) dependencies = [] for dependency in package.dependencies: path = path_for_dependency(dependency) os.chdir(path) sub_package = Package.open() os.chdir(os.path.join('..', '..')) self.build_package(sub_package, path) dependencies.append(sub_package) sources = collect_sources(os.path.join(package_root, 'Sources')) has_main = len([s for s in sources if s.endswith('/main.swift')]) > 0 if has_main: self.build_cli(package.name, sources, dependencies) else: self.build_library(package.name, sources, dependencies)
def Run(self): """Run the application ... We first need to read in all of our package.* files. Then we simply write them out with the correct method. We pass a flag to check for installed packages. """ files_list = [ "/etc/portage/package.use", "/etc/portage/package.unmask", "/etc/portage/package.mask", "/etc/portage/package.keywords", "/etc/portage/package.license" ] for name in files_list: if not os.access(name, os.F_OK): continue file = Package(name, self._destructive, self._sort, self._debug) file.open() if self._pretend: output.verbose("%s:", name) output.verbose(file.__unicode__()) else: file.write(self._reorganize) file.close()
########################################## # Argv Parasing # ########################################## for v in sys.argv: if v == "-f" or v == "-force": forceUpdate = True if v == "-d" or v == "-debug": debug = True ########################################## # Main Loop # ########################################## with open(CONFIG_FILE, "r") as f: for name in f: pack = Package() # create new Package Object pack.name = name.strip() respString = getHttpResponseString(pack.name) pack.updateTime = parseDateString(getUpdateTime(respString)) pack.gitPath = AUR_CLONE_URL + pack.name + ".git" pack.localPath = CLONE_PATH + pack.name + "/PKGBUILD" if os.path.isfile(pack.localPath): pack.installTime = os.path.getmtime(pack.localPath) timeDif = pack.updateTime - pack.installTime else: clonePackage(pack) exit() if debug == True:
def install_pkg_files(package_URIs): """install a number of pisi package files""" from package import Package ctx.ui.debug('A = %s' % str(package_URIs)) for x in package_URIs: if not x.endswith(ctx.const.package_prefix): ctx.ui.error('Mixing file names and package names not supported YET.\n') return False # read the package information into memory first # regardless of which distribution they come from d_t = {} dfn = {} for x in package_URIs: package = Package(x) package.read() name = str(package.metadata.package.name) d_t[name] = package.metadata.package dfn[name] = x def satisfiesDep(dep): return dependency.installed_satisfies_dep(dep) \ or dependency.dict_satisfies_dep(d_t, dep) # for this case, we have to determine the dependencies # that aren't already satisfied and try to install them # from the repository dep_unsatis = [] for name in d_t.keys(): pkg = d_t[name] deps = pkg.runtimeDeps for dep in deps: if not satisfiesDep(dep): dep_unsatis.append(dep) # now determine if these unsatisfied dependencies could # be satisfied by installing packages from the repo # if so, then invoke install_pkg_names extra_packages = [x.package for x in dep_unsatis] if (extra_packages and install_pkg_names(extra_packages)) or \ (not extra_packages): class PackageDB: def __init__(self): self.d = d_t def get_package(self, key): return d_t[str(key)] packagedb = PackageDB() A = d_t.keys() if len(A)==0: ctx.ui.info('No packages to install.') return True # try to construct a pisi graph of packages to # install / reinstall G_f = pgraph.PGraph(packagedb) # construct G_f # find the "install closure" graph of G_f by package # set A using packagedb print A for x in A: G_f.add_package(x) B = A #state = {} while len(B) > 0: Bp = set() for x in B: pkg = packagedb.get_package(x) print pkg for dep in pkg.runtimeDeps: print 'checking ', dep if dependency.dict_satisfies_dep(d_t, dep): if not dep.package in G_f.vertices(): Bp.add(str(dep.package)) G_f.add_dep(x, dep) B = Bp G_f.write_graphviz(sys.stdout) order = G_f.topological_sort() order.reverse() print order for x in order: operations.install_single_file(dfn[x]) else: raise Error('External dependencies not satisfied') return True # everything went OK.
def main(): parser = argparse.ArgumentParser(description='Build a list of packages') parser.add_argument('--branch-source', default="f21", help='The branch to use as a source') parser.add_argument('--copr-id', default="el7-gnome-3-14", help='The COPR to use') parser.add_argument('--packages', default="./data/el7-gnome-3-14.txt", help='the list if packages to build') args = parser.parse_args() copr = CoprHelper(args.copr_id) koji = KojiHelper() data = ModulesXml('modules.xml') # add the copr id (e.g. el7) to any items in modules.xml file f = open(args.packages, 'r') for l in f.readlines(): if l.startswith('#'): continue if l.startswith('\n'): continue linedata = l.strip().split(',') pkgname = linedata[0] item = data._get_item_by_pkgname(pkgname) if not item: print("%s not found" % pkgname) continue item.releases.append(copr.release) item.custom_package_url = None if len(linedata) > 1: item.custom_package_url = linedata[1] f.close() # disable any modules without the copr-specific release for item in data.items: if copr.release not in item.releases: item.disabled = True continue # depsolve print_debug("Depsolving moduleset...") if not data.depsolve(): print_fail("Failed to depsolve") return # process all packages current_depsolve_level = 0 for item in data.items: if item.disabled: continue; # wait for builds if current_depsolve_level != item.depsolve_level: rc = copr.wait_for_builds() if not rc: print_fail("A build failed, so aborting") break current_depsolve_level = item.depsolve_level print_debug("Now running depsolve level %i" % current_depsolve_level) # find the koji package pkg = None if not item.custom_package_url: pkg = koji.get_newest_build(args.branch_source, item.pkgname) if not pkg: print_fail("package %s does not exists in koji" % item.pkgname) continue pkg2 = koji.get_newest_build(args.branch_source + '-updates-candidate', item.pkgname) if not pkg2: print_fail("package %s does not exists in koji" % item.pkgname) continue # use the newest package if pkg.get_nvr() != pkg2.get_nvr(): if rpm.labelCompare(pkg.get_evr(), pkg2.get_evr()) < 0: pkg = pkg2; else: pkg = Package() nvr = os.path.basename(item.custom_package_url).rsplit('-', 2) pkg.name = nvr[0] pkg.version = nvr[1] pkg.release = nvr[2].replace('.src.rpm', '') pkg.url = item.custom_package_url print_debug("Latest version of %s: %s" % (item.pkgname, pkg.get_nvr())) # find if the package has been built in the copr try: status = copr.get_pkg_status(pkg) except CoprException, e: print_fail(str(e)) continue if status == CoprBuildStatus.ALREADY_BUILT: print_debug("Already built") continue elif status == CoprBuildStatus.FAILED_TO_BUILD: print_debug("Failed, so retrying build") elif status == CoprBuildStatus.NOT_FOUND: print_debug("Not found, so building") elif status == CoprBuildStatus.IN_PROGRESS: print_debug("Already in progress") continue else: print_fail("copr status unknown: %s" % status) continue # submit build and wait for it to complete if not copr.build(pkg): print_fail("Failed to submit build") break
from package import Package package = Package() package.setup()
def install_pkg_files(package_URIs): """install a number of pisi package files""" from package import Package ctx.ui.debug('A = %s' % str(package_URIs)) for x in package_URIs: if not x.endswith(ctx.const.package_suffix): raise Error(_('Mixing file names and package names not supported yet.')) if ctx.config.get_option('ignore_dependency'): # simple code path then for x in package_URIs: atomicoperations.install_single_file(x) return # short circuit # read the package information into memory first # regardless of which distribution they come from d_t = {} dfn = {} for x in package_URIs: package = Package(x) package.read() name = str(package.metadata.package.name) d_t[name] = package.metadata.package dfn[name] = x def satisfiesDep(dep): # is dependency satisfied among available packages # or packages to be installed? return dependency.installed_satisfies_dep(dep) \ or dependency.dict_satisfies_dep(d_t, dep) # for this case, we have to determine the dependencies # that aren't already satisfied and try to install them # from the repository dep_unsatis = [] for name in d_t.keys(): pkg = d_t[name] deps = pkg.runtimeDependencies() for dep in deps: if not satisfiesDep(dep): dep_unsatis.append(dep) # now determine if these unsatisfied dependencies could # be satisfied by installing packages from the repo # if so, then invoke install_pkg_names extra_packages = [x.package for x in dep_unsatis] if extra_packages: ctx.ui.info(_("""The following packages will be installed in the respective order to satisfy extra dependencies: """) + util.strlist(extra_packages)) if not ctx.ui.confirm(_('Do you want to continue?')): raise Error(_('External dependencies not satisfied')) install_pkg_names(extra_packages) class PackageDB: def get_package(self, key, repo = None): return d_t[str(key)] packagedb = PackageDB() A = d_t.keys() if len(A)==0: ctx.ui.info(_('No packages to install.')) return # try to construct a pisi graph of packages to # install / reinstall G_f = pgraph.PGraph(packagedb) # construct G_f # find the "install closure" graph of G_f by package # set A using packagedb for x in A: G_f.add_package(x) B = A while len(B) > 0: Bp = set() for x in B: pkg = packagedb.get_package(x) for dep in pkg.runtimeDependencies(): if dependency.dict_satisfies_dep(d_t, dep): if not dep.package in G_f.vertices(): Bp.add(str(dep.package)) G_f.add_dep(x, dep) B = Bp if ctx.config.get_option('debug'): G_f.write_graphviz(sys.stdout) order = G_f.topological_sort() if not ctx.get_option('ignore_file_conflicts'): check_conflicts(order, packagedb) order.reverse() ctx.ui.info(_('Installation order: ') + util.strlist(order) ) if ctx.get_option('dry_run'): return ctx.ui.notify(ui.packagestogo, order = order) for x in order: atomicoperations.install_single_file(dfn[x]) pisi_installed = ctx.installdb.is_installed('pisi') if 'pisi' in order and pisi_installed: upgrade_pisi()
def main(): # use the main mirror gnome_ftp = 'http://ftp.gnome.org/pub/GNOME/sources' # read defaults from command line arguments parser = argparse.ArgumentParser(description='Automatically build Fedora packages for a GNOME release') parser.add_argument('--fedora-branch', default="rawhide", help='The fedora release to target (default: rawhide)') parser.add_argument('--simulate', action='store_true', help='Do not commit any changes') parser.add_argument('--check-installed', action='store_true', help='Check installed version against built version') parser.add_argument('--force-build', action='store_true', help='Always build even when not newer') parser.add_argument('--relax-version-checks', action='store_true', help='Relax checks on the version numbering') parser.add_argument('--cache', default="cache", help='The cache of checked out packages') parser.add_argument('--buildone', default=None, help='Only build one specific package') parser.add_argument('--buildroot', default=None, help='Use a custom buildroot, e.g. f18-gnome') parser.add_argument('--bump-soname', default=None, help='Build any package that deps on this') parser.add_argument('--copr-id', default=None, help='The COPR to optionally use') args = parser.parse_args() if args.copr_id: copr = CoprHelper(args.copr_id) # create the cache directory if it's not already existing if not os.path.isdir(args.cache): os.mkdir(args.cache) # use rpm to check the installed version installed_pkgs = {} if args.check_installed: print_info("Loading rpmdb") ts = rpm.TransactionSet() mi = ts.dbMatch() for h in mi: installed_pkgs[h['name']] = h['version'] print_debug("Loaded rpmdb with %i items" % len(installed_pkgs)) # parse the configuration file modules = [] data = ModulesXml('modules.xml') if not args.buildone: print_debug("Depsolving moduleset...") if not data.depsolve(): print_fail("Failed to depsolve") return for item in data.items: # ignore just this one module if item.disabled: continue # build just one module if args.buildone: if args.buildone != item.name: continue # just things that have this as a dep if args.bump_soname: if args.bump_soname not in item.deps: continue # things we can't autobuild as we don't have upstream data files if not item.ftpadmin: continue # things that are obsolete in later versions if args.copr_id: if not args.copr_id[10:] in item.branches: continue # get started print_info("Loading %s" % item.name) if item.pkgname != item.name: print_debug("Package name: %s" % item.pkgname) print_debug("Version glob: %s" % item.release_glob[args.fedora_branch]) # ensure package is checked out if not item.setup_pkgdir(args.cache, args.fedora_branch): continue # get the current version from the spec file if not item.parse_spec(): continue print_debug("Current version is %s" % item.version) # check for newer version on GNOME.org success = False for i in range (1, 20): try: urllib.urlretrieve ("%s/%s/cache.json" % (gnome_ftp, item.name), "%s/%s/cache.json" % (args.cache, item.pkgname)) success = True break except IOError as e: print_fail("Failed to get JSON on try %i: %s" % (i, e)) if not success: continue new_version = None gnome_branch = item.release_glob[args.fedora_branch] local_json_file = "%s/%s/cache.json" % (args.cache, item.pkgname) with open(local_json_file, 'r') as f: # the format of the json file is as follows: # j[0] = some kind of version number? # j[1] = the files keyed for each release, e.g. # { 'pkgname' : {'2.91.1' : {u'tar.gz': u'2.91/gpm-2.91.1.tar.gz'} } } # j[2] = array of remote versions, e.g. # { 'pkgname' : { '3.3.92', '3.4.0' } # j[3] = the LATEST-IS files try: j = json.loads(f.read()) except Exception, e: print_fail("Failed to read JSON at %s: %s" % (local_json_file, str(e))) continue # find the newest version newest_remote_version = '0' for remote_ver in j[2][item.name]: version_valid = False for b in gnome_branch.split(','): if fnmatch.fnmatch(remote_ver, b): version_valid = True break if not args.relax_version_checks and not version_valid: continue rc = rpm.labelCompare((None, remote_ver, None), (None, newest_remote_version, None)) if rc > 0: newest_remote_version = remote_ver if newest_remote_version == '0': print_fail("No remote versions matching the gnome branch %s" % gnome_branch) print_fail("Check modules.xml is looking at the correct branch") continue print_debug("Newest remote version is: %s" % newest_remote_version) # is this newer than the rpm spec file version rc = rpm.labelCompare((None, newest_remote_version, None), (None, item.version, None)) new_version = None if rc > 0: new_version = newest_remote_version # check the installed version if args.check_installed: if item.pkgname in installed_pkgs: installed_ver = installed_pkgs[item.pkgname] if installed_ver == newest_remote_version: print_debug("installed version is up to date") else: print_debug("installed version is", installed_ver) rc = rpm.labelCompare((None, installed_ver, None), (None, newest_remote_version, None)) if rc > 0: print_fail("installed version is newer than gnome branch version") print_fail("check modules.xml is looking at the correct branch") # nothing to do if new_version == None and not args.bump_soname and not args.force_build: print_debug("No updates available") continue # never update a major version number */ if new_version: if args.relax_version_checks: print_debug("Updating major version number, but ignoring") elif new_version.split('.')[0] != item.version.split('.')[0]: print_fail("Cannot update major version numbers") continue # we need to update the package if new_version: print_debug("Need to update from %s to %s" %(item.version, new_version)) # download the tarball if it doesn't exist if new_version: tarball = j[1][item.name][new_version]['tar.xz'] dest_tarball = tarball.split('/')[1] if os.path.exists(item.pkgname + "/" + dest_tarball): print_debug("Source %s already exists" % dest_tarball) else: tarball_url = gnome_ftp + "/" + item.name + "/" + tarball print_debug("Download %s" % tarball_url) if not args.simulate: try: urllib.urlretrieve (tarball_url, args.cache + "/" + item.pkgname + "/" + dest_tarball) except IOError as e: print_fail("Failed to get tarball: %s" % e) continue # add the new source item.new_tarball(dest_tarball) # prep the spec file for rpmdev-bumpspec if new_version: with open(item.spec_filename, 'r') as f: with open(item.spec_filename+".tmp", "w") as tmp_spec: for line in f: if line.startswith('Version:'): line = replace_spec_value(line, new_version + '\n') elif line.startswith('Release:'): line = replace_spec_value(line, '0%{?dist}\n') elif line.startswith(('Source:', 'Source0:')): line = re.sub("/" + majorminor(item.version) + "/", "/" + majorminor(new_version) + "/", line) tmp_spec.write(line) os.rename(item.spec_filename + ".tmp", item.spec_filename) # bump the spec file comment = None if args.bump_soname: comment = "Rebuilt for %s soname bump" % args.bump_soname elif new_version: comment = "Update to " + new_version if comment: cmd = ['rpmdev-bumpspec', "--comment=%s" % comment, "%s.spec" % item.pkgname] item.run_command(cmd) # run prep, and make sure patches still apply if not args.simulate: if not item.check_patches(): print_fail("to build %s as patches did not apply" % item.pkgname) continue # push the changes if args.simulate: print_debug("Not pushing as simulating") continue # commit the changes if comment and not item.commit_and_push(comment): print_fail("push") continue # COPR, so build srpm, upload and build if item.is_copr: if not item.run_command(['fedpkg', "--dist=%s" % item.dist, 'srpm']): print_fail("to build srpm") continue # extract the nevr from the package new_srpm = glob.glob(args.cache + "/" + item.pkgname + '/*.src.rpm')[0] pkg = Package(new_srpm) # check if it already exists status = copr.get_pkg_status(pkg) if status == CoprBuildStatus.ALREADY_BUILT: print_debug ("Already built in COPR") continue elif status == CoprBuildStatus.IN_PROGRESS: print_debug ("Already building in COPR") continue # upload the package somewhere shared if os.getenv('USERNAME') == 'hughsie': upload_dir = '[email protected]:/home/fedora/rhughes/public_html/copr/' upload_url = 'http://rhughes.fedorapeople.org/copr/' elif os.getenv('USERNAME') == 'kalev': upload_dir = '[email protected]:/home/fedora/kalev/public_html/copr/' upload_url = 'http://kalev.fedorapeople.org/copr/' else: print_fail ("USERNAME not valid, ping hughsie on irc") continue print_debug("Uploading local package to " + upload_dir) p = subprocess.Popen(['scp', '-q', new_srpm, upload_dir]) p.wait() pkg.url = upload_url + os.path.basename(new_srpm) if not copr.build(pkg): print_fail("COPR build") break rc = copr.wait_for_builds() if not rc: print_fail("waiting") continue # work out release tag if args.fedora_branch == "f18": pkg_release_tag = 'fc18' elif args.fedora_branch == "f19": pkg_release_tag = 'fc19' elif args.fedora_branch == "f20": pkg_release_tag = 'fc20' elif args.fedora_branch == "f21": pkg_release_tag = 'fc21' elif args.fedora_branch == "f22": pkg_release_tag = 'fc22' elif args.fedora_branch == "rawhide": pkg_release_tag = 'fc23' else: print_fail("Failed to get release tag for %s" % args.fedora_branch) continue # build package if new_version: print_info("Building %s-%s-1.%s" % (item.pkgname, new_version, pkg_release_tag)) else: print_info("Building %s-%s-1.%s" % (item.pkgname, item.version, pkg_release_tag)) if args.buildroot: rc = item.run_command(['fedpkg', 'build', '--target', args.buildroot]) else: rc = item.run_command(['fedpkg', 'build']) if not rc: print_fail("Build") continue # work out repo branch if args.fedora_branch == "f18": pkg_branch_name = 'f18-build' elif args.fedora_branch == "f19": pkg_branch_name = 'f19-build' elif args.fedora_branch == "f20": pkg_branch_name = 'f20-build' elif args.fedora_branch == "f21": pkg_branch_name = 'f21-build' elif args.fedora_branch == "f22": pkg_branch_name = 'f22-build' elif args.fedora_branch == "rawhide": pkg_branch_name = 'f23-build' else: print_fail("Failed to get repo branch tag for" + args.fedora_branch) continue # wait for repo to sync if item.wait_repo and args.fedora_branch == "rawhide": rc = item.run_command(['koji', 'wait-repo', pkg_branch_name, '--build', "%s-%s-1.%s" % (item.pkgname, new_version, pkg_release_tag)]) if not rc: print_fail("Wait for repo") continue
bcast_connection = pika.BlockingConnection(pika.ConnectionParameters( 'localhost', 5672, '/', credentials)) bcast_channel = bcast_connection.channel() ''' # Database Service creation: # Get existing packages from database: # TODO: Actually use db service to get package information packages = [] # In a loop, get each package and spin off it's bridge. pckg = Package('cf412') pckg.id = 1 #pckg.id = db_service.### pckg.pid = 1000 # pckg.id = return pid from starting process. packages.append(pckg) running = True # Starting package bridges: bridge_process = Popen(["./bridge.py", "demo"]) print ' [*] Waiting for messages. To exit press CTRL+C' def package_callback(ch, method, properties, body): print " [x] Received %r" % (body,) def web_callback(ch, method, properties, body):
def up(self): while True: conn, addr = self.sock.accept() print 'Connection address:', addr while 1: data = conn.recv(BUFFER_SIZE) if not data: break package = Package(11, 320) package.setPackage(data) package.decode() if package.type == 0 or package.type == 1: reply_package = Package(1) messages_data = self.call(package) reply_package.data = ''.join(messages_data) if DEVELOP: print package.__str__() conn.send(reply_package.getPackage()) # echo elif package.type == 2: reply_package = Package(2) reply_package.data = 'OK' if DEVELOP: print package.__str__() conn.send(reply_package.getPackage()) if package.command.name == 'send_file': var = package.command.variables.split() filename = var[0] mode = var[1] print "Start receiving" file = open(filename, mode) file.write(package.data.decode()) file.close() conn.close()