def test(func): import time from package import Package fd = open('testdata.txt') vorher = 0 nachher = 0 start = time.time() counter = 0 bin = Package("370x270x250") for line in fd: counter += 1 if counter > 450: break packages = [Package(pack) for pack in line.strip().split()] if not packages: continue bins, rest, bxyz = func(packages, bin=bin) if rest: print "invalid data", rest, line else: vorher += len(packages) nachher += len(bins) print bin print bins print len(bins) print bins print bxyz print rest break
def read_directory(self, filename, package=None): """ Parse every module in the specified directory. """ directory = File(filename) if not directory.is_folder: raise ValueError("%s is NOT a directory." % filename) if package is not None: contents = package.contents else: contents = [] for child in directory.children: # If the child is a Python file then parse it. if child.ext == '.py': contents.append(self.read_file(child.path, package)) # If the child is a sub-package then recurse! elif child.is_package: if package is not None: sub_package_name = '%s.%s' % (package.name, child.name) sub_package = Package(filename=child.path, name=sub_package_name, parent=package) else: sub_package = Package(filename=child.path, name=child.name) self.read_directory(child.path, sub_package) contents.append(sub_package) return contents
def parse_pkgs(pkgs): output = [] data = None package = None with open(db_path, 'r') as file: data = yaml.load(file, Loader=yaml.FullLoader) package_names = data['packages'].keys() for pkg in pkgs: matches = [x for x in package_names if pkg in x] if len(matches) == 0: print(f'Package "{pkg}" not found in database. Exiting...', file=sys.stderr) sys.exit(1) elif len(matches) == 1: name = matches[0] package = Package(name) else: print(f'\nPackage "{pkg}" has multiple matches') for i, pkg in enumerate(matches): print(f'{i + 1}) {pkg}') try: index = int(input('Package # to select: ')) except TypeError: print('Error: Invalid selection', file=sys.stderr) sys.exit(1) if 1 <= index <= len(matches): name = matches[index - 1] package = Package(name) else: print('Error: Invalid selection', file=sys.stderr) sys.exit(1) package.installed = is_installed(package.entry) package.req_deps = data['packages'][package.entry].get('req', []) package.rec_deps = data['packages'][package.entry].get('rec', []) package.opt_deps = data['packages'][package.entry].get('opt', []) output.append(package) package = None return output
def create_pack(self, data): pack = Package() pack.add_data(data) pack.set_dim(len(data)) pack.set_num(self.pack_cnt) self.inc_cnt() return pack
def process_update_package(package=None, queue_manager=None): predecessor = queue_manager.get_predecessor(package.package) while predecessor is not None: p = Package(predecessor) if p.public: break else: predecessor = queue_manager.get_predecessor(predecessor.package) r = package.resources rm = r[properties.METADATA] if predecessor is not None: rm.predecessor = predecessor.package gmn_update(rm) else: gmn_create(rm) gmn_create(r[properties.REPORT]) data_resources = r[properties.DATA] for rd in data_resources: gmn_create(rd) ro = r[properties.ORE] if predecessor is not None: ro.predecessor = predecessor.doi gmn_update(ro) else: gmn_create(ro)
def send(self): self.handler.run() # are loc starea de initializare(este transmis pachetul SYN si se asteapta ack-ul pentru acesta). self.send_syn_pack() self.wait_for_syn_pack() while True: # un nou pachet este creat doar daca nr. de pachete aflate in tranzit este mai mic decat dim. ferestrei if self.pack_in_transit < self.cwnd: data = self.rf.read(self.widgets.pack_size.get()-16) # nu s-a ajuns la finalul fisierului, asadar un nou pachet este creat si transmis clasei handler if len(data) != 0: pack = self.create_pack(data) self.handler.add_package(pack) self.inc_pack_in_transit() # s-a ajuns la finalul fisierului prin urmare trebuie transmis pachetul FIN pentru incheierea conexiunii. else: pack = Package() pack.set_flag(1) pack.set_num(self.pack_cnt) self.message_box.insert_message("FIN pack has been sent.") self.handler.add_package(pack) # salvam momentul trimiterii pachetului FIN self.timer = int(round(time.time() * 1000)) self.rf.close() break time.sleep(0.001)
def run(self): arr = [None] * (250421) arr = self.create_arr(arr) wrapper = self.lru_wrapper(arr) striped_arr = self.create_arr_without_spec_chars(arr) self.compare(striped_arr) idx = 0 with open(self.current_dir + "/../top-pypi-packages-30-days.json", "r") as file: data = json.load(file) for p in data["rows"]: obj = Package(p["project"]) if str(idx) == config.samplesize: config.limit = True logging.warning( "analizer is done with typo creation for given samplesize \n sum of typos: " + str(len(config.package_list))) break for i in range(len(arr) - 1): lev_distance = Algos.levenshtein(obj.project, wrapper(i)) if len(obj.project) <= 7: THRESHOLD = 1 if len(obj.project) > 7: THRESHOLD = 2 if (lev_distance <= THRESHOLD and wrapper(i) != obj.project): obj.typos.append(wrapper(i)) data = json.dumps({ "real_project": obj.project, "p_typo": wrapper(i) }) config.package_list.append(data) #for lines idx = idx + 1 file.close()
def package_create(jusp_id, institution_id, package_type): jisc_package_id = u"package-jiscels{}".format(jusp_id) package_id = u"package-n8els_{}_{}".format(jusp_id, package_type.replace(" ", "")) package_name = u"Elsevier n8 ({})".format(package_type) scenario_id = u"scenario-n8els_{}_{}".format(jusp_id, package_type.replace(" ", "")) scenario_name = u"n8 ({})".format(package_type) my_package = Package.query.get(package_id) if not my_package: print u"package {} doesn't exist, making".format(package_id) my_package = Package(package_id=package_id, publisher="Elsevier", package_name=package_name, created=datetime.datetime.utcnow().isoformat(), institution_id=institution_id, is_demo=False, currency="GBP") db.session.add(my_package) print my_package safe_commit(db) if package_type == "own pta": copy_into_n8_package(old_package_id=jisc_package_id, new_package_id=package_id, copy_perpetual_access=True) elif package_type == "group pta": copy_into_n8_package(old_package_id=jisc_package_id, new_package_id=package_id, copy_perpetual_access=False) elif package_type == "uk pta": copy_into_n8_package(old_package_id=jisc_package_id, new_package_id=package_id, copy_perpetual_access=False) my_scenario = SavedScenario.query.get(scenario_id) if not my_scenario: print u"scenario {} doesn't exist, making".format(scenario_id) my_scenario = SavedScenario(False, scenario_id, None) my_scenario.package_id = package_id my_scenario.created = datetime.datetime.utcnow().isoformat() db.session.add(my_scenario) safe_commit(db) print "updating settings, including big deal cost from jisc package" big_deal_price = get_sql_answer( db, "select big_deal_cost from jump_account_package where package_id = '{}';" .format(jisc_package_id)) dict_to_save = my_scenario.to_dict_saved_from_db() dict_to_save["name"] = scenario_name dict_to_save["configs"]["cost_bigdeal"] = big_deal_price dict_to_save["configs"]["cost_bigdeal_increase"] = 2 dict_to_save["configs"]["include_social_networks"] = True # set to true dict_to_save["configs"]["weight_authorship"] = 0 # 100 dict_to_save["configs"]["weight_citation"] = 0 # 10 save_raw_scenario_to_db(scenario_id, dict_to_save, None)
def get_packages(locations): """ Complexity: Big O(N) get all the package data from the csv and create Package class instances append additional data to the location object which it did not previously have match the location address of the package with a location instance already created to make mapping simpler. """ packages = [] with open("./data/packages.csv") as csv_file: reader = csv.DictReader(csv_file) for row in reader: package_location = [ location for location in locations if location.address == row["address"] ][0] package_location.set_city(row["city"]) package_location.set_state(row["state"]) package_location.set_zip_code(row["zip"]) new_package = Package( int(row["id"]), package_location, convert_deadline_to_datetime(row["deadline"]), int(row["mass"]), parse_notes(row["notes"]), ) packages.append(new_package) return packages
def get_user_input(): prompts = ["Enter a package ID: ", "Enter delivery address: ", "Enter delivery city: ", "Enter delivery zip code: ", "Enter delivery state: ", "Enter delivery deadline: ", "Enter package weight: "] input_parameters = ['27', '1060 Dalton Ave S', 'Salt Lake City', '84104', 'UT', 'EOD', '5'] # input_parameters = ['9', '300 State St', 'Salt Lake City', '84103', 'UT', 'EOD', '2'] input_parameters = [] print("Enter the following information for the package:") for prompt_index in range(len(prompts)): user_input = input(prompts[prompt_index]) input_parameters.append(user_input) input_address = Address(input_parameters[1], input_parameters[2], input_parameters[3], input_parameters[4]) input_package = Package(int(input_parameters[0]), input_address, float(input_parameters[6])) str_time = input_parameters[5] valid_selection = False while not valid_selection: if ':' in str_time: split_time = input_parameters[5].split(':') hour = split_time[0] minute = split_time[1] if not hour.isnumeric() or not minute.isnumeric(): str_time = input("Invalid time. Please, re-enter time (HH:MM): ") elif int(hour) <= 0 or int(hour) > 23 or int(minute) < 0 or int(minute) > 60: str_time = input("Invalid time. Please, re-enter time (HH:MM): ") else: deadline = time(int(hour), int(minute, 0)) valid_selection = True elif str_time == 'EOD': deadline = time(23, 0, 0) valid_selection = True input_package.deadline = deadline return input_package
def process(self, incoming_production_line): packages = deque() for stack in incoming_production_line.values(): package = Package() package.add_bottles(stack) packages.append(package) return packages
def testParseCorruptedStreamToPackage(self): # encoded is a bunch of hex chars # b'\xfffff23fafafa' package = Package(encoded=CORRUPTED_STREAM) package_test_function(self, package, b"", "E", "B", 0, 0) self.assertTrue(package.is_error()) self.assertFalse(package.is_handshake())
def finish_job(self): """ Close socket, delete client item. :return: """ self.job_finished_flag_lock.acquire() log.info("The ordered packages has been full-filled, job is done.") for client in self.client_list: client.socket.close() log.info(f"Close connection of {client.uuid}") self.client_list.clear() self.job_finished_flag_lock.release() s = socket.socket() server_host = socket.gethostname() server_port = 23457 s.connect((server_host, server_port)) """ Add all seq data, combine to one package, send to server """ temp_list = [] for seq_data in self.ordered_seq_data: temp_list.append(seq_data.data) # result += seq_data.data # temp_list = [result] package = Package(payload=int_list_to_bytes(temp_list), data_type=PackageDataType.INT) package.generate_default_header() package.get_header().set_message("Ordered min value group") send_package(package, s)
def generate_repo(repo_json): """Takes a JSON repository description and generates a dictionary of corresponding Package objects. The repo is a dictionary organised via package name, version.""" # Create repository of package objects. repo = {} for package in repo_json: name = package.get("name", "") version = package.get("version", "") size = package.get("size", "") depends = package.get("depends", []) conflicts = package.get("conflicts", []) # Required fields for a package, if missing invalid repository. if name == "" or version == "" or size == "": print("Exiting, invalid repo description.") sys.exit(1) # Create Package object for the current package and add to the repo. p = Package(name=name, version=version, size=size, depends=depends, conflicts=conflicts) if not name in repo: repo[name] = {} repo[name][version] = p else: repo[name][version] = p return _expand_constraints(repo)
def receive(self): r_data = None while True: try: r_data, address = self.sock.recvfrom(self.widgets.pack_size.get()) except BlockingIOError: # buffer-ul socket-ului este gol pass except ConnectionResetError: """ Daca ack-ul pentru pachetul FIN nu a fost receptionat in cel mult 2 secunde de la trimiterea acestuia, atunci inchidem conexiunea automat. """ if (self.timer != 0 and int(round(time.time() * 1000)) - self.timer > 2000): self.handle_end_of_connection() break time.sleep(0.5) # daca s-a primit un pachet de ack atunci acesta este eliminat din dictionar si este actualizata fereastra if r_data: r_pack = Package() r_pack.set_data(r_data) self.handler.remove_package(r_pack) self.update_cwnd() self.dec_pack_in_transit() # s-a receptionat ultimul pachet, adica cel care are flag-ul de FIN activ, asadar conexiunea este incheiata if r_pack.get_flag(1): self.handle_end_of_connection() break r_data = None
def get_package_list(): tab = [] query = "MATCH (p:PackageType) RETURN p.Name AS package" result = get_graph().cypher.execute(query) for it in result: tab.append(Package(it.package)) return tab
def get_ack_for_syn_pack(self, dim): ack_pack = Package() ack_pack.set_flag(0) ack_pack.set_num(self.pack_cnt) self.inc_cnt() ack_pack.set_ack(dim) return ack_pack
def testParseBytesToPackage(self): # Package's payload argument is given a encoded package of bytes # b'DB\x04\x00\x00\x00\x00\x00\x00\x00ABCD1997' package = Package(encoded=BYTES_ENCODED) package_test_function(self, package, b"ABCD", "D", "B", 4, 0) self.assertFalse(package.is_handshake()) self.assertFalse(package.is_error())
def test_1(): time_from = time.time() for i in range(1): k = 100 simple_data = 'Hello Casin, iam simple data' * k simple_data += '***' simple_data = '***' + simple_data pack_out = Package(simple_data, callback_out) frame_out = pack_out.next_frame() raw = frame_out.raw() print(raw) pack_in = Package(raw, callback_in) flag = True while flag: frame_in = pack_in.next_frame() raw = frame_in.raw() print(raw) pack_out.extend_bytes(raw) frame_out = pack_out.next_frame() if frame_out.is_last_frame(): flag = False raw = frame_out.raw() print(raw) if not frame_out.is_internal(): pack_in.extend_bytes(raw) time_to = time.time() print(time_to - time_from) print(sys.getsizeof(simple_data))
def push(package, server, build): """ (Optionnaly build and) Push a package to a vRO server """ if build: _build_package(package) p = Package(package, config) v = VroServer(server, config) v.push(p.name, p.build)
def __init__(self, table_size=41): """ table is initialized with empty values and small initial capacity """ self.dummy_pkg = Package(0, "", 0, "EOD", 0, "") self.table_size = table_size self.table = [self.dummy_pkg] * self.table_size self.element_count = 0
def setUpClass(cls): event = Event() event.package = 'knb-lter-nin.1.1' event.datetime = '2017-02-23T13:09:29.166' event.method = 'createDataPackage' event.owner = 'uid=LNO,o=LTER,dc=ecoinformatics,dc=org' event.doi = 'doi:10.6073/pasta/3bcc89b2d1a410b7a2c678e3c55055e1' TestPackage.package = Package(event=event)
def load_table(graph): with open('package_data.csv') as package_file: csv_reader = csv.reader(package_file, delimiter=',') for row in csv_reader: # Create package package = Package(row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7]) # Add the package to the hash table graph.insert(package)
def process_package(path, catagory, package): filename = os.path.splitext(os.path.basename(path))[0] (file, version) = filename.split('-', 1) if (package == file): package = Package(self.parent, package, catagory, version, self['name'], path) self.packages.append(package) else: print('WARNING: ' + PACKAGE_INVALID_FILENAME % path)
def testIncoherentHeadToPackage(self): # head says payload's length is 3, but it's 4 # b'DB\x04\x00\x00\x00\x00\x00\x00\x00ABCD1997' encoded = b'DB\x03\x00\x00\x00\x00\x00\x00\x00ABCD1997' package = Package(encoded=encoded) package_test_function(self, package, b"", "E", "B", 0, 0) self.assertFalse(package.is_handshake()) self.assertTrue(package.is_error())
def binpack(packages, bin=None, iterlimit=5000): """Packs a list of Package() objects into a number of equal-sized bins. Returns a list of bins listing the packages within the bins and a list of packages which can't be packed because they are to big.""" if not bin: bin = Package("600x400x400") return allpermutations(packages, bin, iterlimit)
def pull(package, server, expand): """ Get (and optionnaly expand) a package from a vRO server """ p = Package(package, config) v = VroServer(server, config) v.pull(p.name, p.src_package) if expand: _expand_package(package)
def create_packages(): with open("WGUPS Package File.csv", encoding='utf-8-sig') as file: reader = csv.DictReader(file) hash_list = LinearProbingHashTable() for row in reader: package = Package(row) hash_list.insert(package) return hash_list
def install_pkg(env_root, bsp_root, pkg): """Install the required packages.""" # default true ret = True local_pkgs_path = os.path.join(env_root, 'local_pkgs') bsp_pkgs_path = os.path.join(bsp_root, 'packages') # get the .config file from env env_kconfig_path = os.path.join(env_root, 'tools\scripts\cmds') env_config_file = os.path.join(env_kconfig_path, '.config') package = Package() pkg_path = pkg['path'] if pkg_path[0] == '/' or pkg_path[0] == '\\': pkg_path = pkg_path[1:] pkg_path = os.path.join(env_root, 'packages', pkg_path, 'package.json') package.parse(pkg_path) url_from_json = package.get_url(pkg['ver']) package_url = package.get_url(pkg['ver']) pkgs_name_in_json = package.get_name() if package_url[-4:] == '.git': ver_sha = package.get_versha(pkg['ver']) # print("==================================================>") # print("packages name :"%pkgs_name_in_json.encode("utf-8")) # print("ver :"%pkg['ver']) # print("url :"%package_url.encode("utf-8")) # print("url_from_json : "%url_from_json.encode("utf-8")) # print("==================================================>") get_package_url = None get_ver_sha = None upstream_change_flag = False try: if (not os.path.isfile(env_config_file)) or ( os.path.isfile(env_config_file) and find_macro_in_config( env_config_file, 'SYS_PKGS_DOWNLOAD_ACCELERATE')): get_package_url, get_ver_sha = get_url_from_mirror_server( pkgs_name_in_json, pkg['ver']) # determine whether the package package url is valid if get_package_url != None and determine_url_valid( get_package_url): package_url = get_package_url if get_ver_sha != None: ver_sha = get_ver_sha upstream_change_flag = True except Exception, e: # print('e.message:%s\t' % e.message) print( "Failed to connect to the mirror server, package will be downloaded from non-mirror server.\n" )
def package_list(): """Print the packages list in env. Read the.config file in the BSP directory, and list the version number of the selected package. Args: none Returns: none Raises: none """ fn = '.config' env_root = Import('env_root') bsp_root = Import('bsp_root') target_pkgs_path = os.path.join(bsp_root, 'packages') pkgs_fn = os.path.join(target_pkgs_path, 'pkgs.json') if not os.path.isfile(fn): print('no system configuration file : .config.') print('you should use < menuconfig > command to config bsp first.') return #if not os.path.exists(target_pkgs_path): # try: # os.mkdir(target_pkgs_path) # except: # print 'mkdir packages directory failed' # return pkgs = kconfig.parse(fn) #if not os.path.isfile(pkgs_fn): # pkgs_file = file(pkgs_fn, 'w') # pkgs_file.write(json.dumps(pkgs, indent=1)) # pkgs_file.close() for pkg in pkgs: package = Package() pkg_path = pkg['path'] if pkg_path[0] == '/' or pkg_path[0] == '\\': pkg_path = pkg_path[1:] #pkg_path = pkg_path.replace('/', '\\') pkg_path = os.path.join(env_root, 'packages', pkg_path, 'package.json') package.parse(pkg_path) pkgs_name_in_json = package.get_name() print pkgs_name_in_json, pkg['ver'] #print "package path:", pkg['path'] if not pkgs: print("Packages list is empty.") print('You can use < menuconfig > command to select online packages.') print('Then use < pkgs --update > command to install them.') return