def table_from_csv(self, file_name='CSV_Data\packages.csv'): ''' Loads data from csv file into package_table. Creates package based on CSV data and inserts that package into hash table Space-time complexity = O(N) ''' # Opens & reads from csv file with open(file_name) as csv_file: reader = csv.reader(csv_file, delimiter=',') # Loop through each row in file, create package based on it's information for row in reader: # Retrieves & sets package attributes package = Package(int(row[0])) package.address_id = row[1] package.address = row[2] package.city = row[3] package.state = row[4] package.zip_code = row[5] package.deadline = row[6] package.weight = row[7] package.instructions = row[8] # Inserts package self.insert_package(package)
def create_packages_list(file): packages = HashTable() locations = create_locations_table('./files/locations.csv') hub = 0 graph = create_location_graph() with open(file, newline='', encoding='utf-8-sig') as packages_file: reader = csv.reader(packages_file) for row in reader: p_id = int(row[0].strip()) street = row[1].strip() weight = row[6].strip() deadline = row[5].strip() if deadline == 'EOD': deadline = '' else: dl = deadline.split(' ') deadline = dl[0] + ':00' address = locations[street] p = Package(p_id, address, deadline, weight) addr_idx = locations[street].graph_index shortest_path_to_hub = graph.shortest_paths(addr_idx)[0] p.distance_from_hub = shortest_path_to_hub packages.insert(p_id, p) return packages
def update_address(location_id, package: Package): package.location_id = int(location_id) new_address = get_address(location_id) package.address = new_address package_table.hash_update(package) return 0
def execute_maint_script(self, script_name): ''' execute a user-defined function script_name -- name of the function to run ''' Package.execute_maint_script(self) script_path = "%s/%s.py" % (self.maint_dir, script_name) start_dir = get_slash_cwd() os.chdir(self.maint_dir) if not os.path.isfile(script_path): msg = "%s does not exist" % script_path raise BadPackage, (self.name, msg) sys.path.append(self.maint_dir ) import_string = 'import %s' % script_name cmd = 'status = %s.execute(self.config, Logger)' % script_name job = Job(import_string, cmd, self.config) status = job.execute() sys.path.remove(self.maint_dir) os.chdir(start_dir) if status == None: status = OK if type(status) != type(1): msg = "Invalid status type (%s: '%s')" Logger.warning(msg % (type(status), status)) status = FAIL return status
def initialize(self): '''Evaluate the package meta-data for consistency and initialize parameters. Verify that data on the disk for this package is consistent. ''' Package.initialize(self) self.status = FAIL dat = self.meta_data.data if dat: release = dat.get('release') if type(release) == type(1): self.release = release class_name = dat.get('class_name') if type(class_name) == type('string'): self.status = OK self.class_name = class_name injectors_info = dat.get('injectors') if type(injectors_info) == type({}): self.injectors_info = injectors_info libs_info = dat.get('libs') if type(libs_info) == type({}): self.libs_info = libs_info else: msg = "No script infomation for this package" raise BadPackage, (self.name, msg) else: msg = "No release infomation for this package" raise BadPackage, (self.name, msg) else: msg = "No metadata found for this package" raise BadPackage, (self.name, msg) self.full_name = "%s-%d" % (self.name, self.release)
def get_jobs(self, name, version=None, dist=None, arch=None): """Dump a job status""" if version: pkgs = Package.selectBy(name=name, version=version) else: pkgs = Package.selectBy(name=name) if not pkgs.count(): return [] retjobs = [] if dist and arch: for pkg in pkgs: retjobs.extend(Job.selectBy(package=pkg, dist=dist, arch=arch)) elif dist: for pkg in pkgs: retjobs.extend(Job.selectBy(package=pkg, dist=dist)) elif arch: for pkg in pkgs: retjobs.extend(Job.selectBy(package=pkg, arch=arch)) else: for pkg in pkgs: retjobs.extend(Job.selectBy(package=pkg)) return retjobs
def create_new_package(): ''' Prompts user for new package attributes and creates a package based on them. The method will also dynamically assign package & address ID to the package. If there is room in the hash table, the package is then inserted into it. If there is no room in the hash table, a bucket is created for the package to be inserted. Space-time complexity = O(N) ''' global receiveing package_id = -1 address_id = -1 # Find empty bucket in Hash Table and sets it's index to package id # This is valid because we are using a direct hash table for i in range(len(receiveing.package_table)): if type(receiveing.package_table[i]) != Package: package_id = i break # If no empty bucket was found, then hash table full. # Append an empty bucket to the hash table and make it's index the package id if package_id == -1: receiveing.package_table.append(None) package_id = len(receiveing.package_table) - 1 # Prompts user to enter package details. print("Please enter package details below.\n") address = input("Address: ") city = input("City: ") state = input("State: ") zip_code = input("Zip: ") weight = input("Weight: ") deadline = input("Deadline: ") instructions = input("Instructions: ") # Checks if address already exists in hash table. # If yes, set address_id to matching address_id. # If no, create a new address_id not already usedl if receiveing.lookup_packages('address', address): address_id = receiveing.lookup_packages('address', address)[0].address_id else: address_id = receiveing.num_addresses # Create package with attributes entered by user package = Package(package_id) package.address_id = address_id package.address = address package.city = city package.state = state package.zip_code = zip_code package.weight = weight package.deadline = deadline package.instructions = instructions # Inserts package into hash table receiveing.insert_package(package)
def __init__(self, name, repository, config, instance_name): Package.__init__(self, name, repository, config, instance_name) self.package_version = 5 self.release = None self.injectors_info = {} self.libs_info = {} self.class_name = None
def __init__(self, name, repository, config, instance_name): ''' name -- the name of this package repository -- object that keeps track of package data config -- configuration for this machine instance_name -- name of this machine ''' Package.__init__(self, name, repository, config, instance_name) self.package_version = 4
def execute_maint_script(self, script_name, arguments): ''' execute a user-defined function script_name -- name of the function to run arguments -- arguments to the script to be run ''' Package.execute_maint_script(self, script_name) self.status = self._find_cmd(script_name, arguments=arguments) msg = "%s result for %s : %s" Logger.info(msg % (script_name, self.full_name, self.status)) return self.status
def update_package_status(self, package: Package, status: PackageStatus, time: datetime) -> None: if status == PackageStatus.ON_TRUCK: package = self.packages.search(package) package.status = status package.time_on_truck = time self.packages.update(package) if status == PackageStatus.DELIVERED: package = self.packages.search(package) package.status = status package.delivery_time = time self.packages.update(package)
def convertToDisplay(self, byte_string_package): packet_rcv = Package() packet_rcv.setData(byte_string_package) if packet_rcv.OPTIONS[2] == 2 or packet_rcv.OPTIONS[2] == 5: self.client.package.CIADDR = packet_rcv.YADDR self.uiManager.viewButton.setHidden(False) # save in the file the latest ip address numeric_ip = [] if packet_rcv.OPTIONS[2] == 5: numeric_ip = [int(byte) for byte in packet_rcv.YADDR] with open("IP_History", 'w') as file: ip_addr = f"{numeric_ip[0]}.{numeric_ip[1]}.{numeric_ip[2]}.{numeric_ip[3]}" file.write(ip_addr) print(f"Packet manager:\n{packet_rcv}")
def add_job(self, name, version, priority, dist, mailto=None, arch=None): """Add a job""" if not arch: arch = self.cfg.arch[0] if not Dists().get_dist(dist, arch): RebuilddLog.error("Couldn't find dist/arch in the config file for %s_%s on %s/%s, don't adding it" \ % (name, version, dist, arch)) return False pkgs = Package.selectBy(name=name, version=version) if pkgs.count(): # If several packages exists, just take the first pkg = pkgs[0] else: # Maybe we found no packages, so create a brand new one! pkg = Package(name=name, version=version, priority=priority) jobs_count = Job.selectBy(package=pkg, dist=dist, arch=arch, mailto=mailto, status=JobStatus.WAIT).count() if jobs_count: RebuilddLog.error("Job already existing for %s_%s on %s/%s, don't adding it" \ % (pkg.name, pkg.version, dist, arch)) return False job = Job(package=pkg, dist=dist, arch=arch) job.status = JobStatus.WAIT job.arch = arch job.mailto = mailto log = Log(job=job) RebuilddLog.info("Added job for %s_%s on %s/%s for %s" \ % (name, version, dist, arch, mailto)) return True
def __init__(self): Thread.__init__(self) print("Client created") self.state = [ WaitForConfigState(self), SendConfigState(self), WaitForReplyState(self), DisplayReplyState(self) ] self.current_state = 0 self.config_ready = False # this flags the packet as being ready to send and is set from the UI callback self.reply_received = False # this flags the receiving of a packet and is set from the WaitForReplyState self.package = Package() self.received_bytes = bytes( [0x00]) # this is the variable that holds the server reply self.keep_running = True # Creating the socket self.CLIENT_PORT = 68 self.SERVER_PORT = 67 self.MAX_BYTES = 1024 self.DESTINATION = ('<broadcast>', self.SERVER_PORT) self.socket_cl = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) self.socket_cl.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) self.socket_cl.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.socket_cl.bind(('192.168.1.101', self.CLIENT_PORT)) self.socket_cl.settimeout(5)
def __init_playground(self): print "INIT PLAYGROUND" # first release all production settings, just to get sure self.urlDocWebRoot = None self.artifacts = None self.reactor = None self.package = None self.urlDocWebRoot = "http://josceleton.sourceforge.net/documentation/delme" commonGroup = "net.sf.josceleton.playground.releaseapp.playground1" commonSvnBase = "playground/releaseapp-playground1" self.artifacts = [ Artifact("java-pom", commonGroup, commonSvnBase + "/java-pom", "0.4", "0.5-SNAPSHOT", "pom"), Artifact("model", commonGroup, commonSvnBase + "/model", "0.3", "0.4-SNAPSHOT", "jar"), Artifact("logic", commonGroup, commonSvnBase + "/logic", "0.4", "0.5-SNAPSHOT", "jar"), Artifact("app", commonGroup, commonSvnBase + "/app", "0.4", "0.5-SNAPSHOT", "jar"), ] self.reactor = Reactor("reactor-pom", commonGroup, commonSvnBase + "/reactor-pom", "0.4") # remember when using Package(self.artifacts..), and processing artifacts failed, then outcommented and resumed, packager will only package half of it :-/ self.package = Package(self.artifacts, "some_zip", "zip readme content.")
def load_package(self, package: Package): if len(self.truck_package_list) < 16: package.status = 'Loaded on Truck #' + str(self.truck_num) self.truck_package_list.append(package) return 'Loaded\n' else: return 'Truck full\n'
def uniform_com_func(net): for node in net.node: if random.random() <= node.prob and node.is_active: package = Package() node.send(net, package) # print(package.path) return True
def create_package_list(self, filename): with open(filename) as p_file: reader = csv.reader(p_file) for row in reader: package = Package(row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7]) self.insert_into_table(int(package.ID), package)
def count_package_function(net): count = 0 for node in net.node: package = Package(is_energy_info=True) node.send(net, package) if package.path[-1] == -1: count += 1 return count
def uniform_com_func(net): for node in net.node: if node.id in net.target and random.random( ) <= node.prob and node.is_active: package = Package(package_size=net.package_size) node.send(net, package) # print(package.path) return True
def count_package_function(net): count = 0 for target_id in net.target: package = Package(is_energy_info=True) net.node[target_id].send(net, package) if package.path[-1] == -1: count += 1 return count
def get_new_jobs(self): """Feed jobs list with waiting jobs and lock them""" max_new = self.cfg.getint('build', 'max_jobs') count_current = len(self.jobs) with self.jobs_locker: if count_current >= max_new: return 0 jobs = [] for dist in Dists().dists: jobs.extend(Job.selectBy(status=JobStatus.WAIT, dist=dist.name, arch=dist.arch)[:max_new]) count_new = 0 for job in jobs: # Look for higher versions ? if self.cfg.getboolean('build', 'build_more_recent'): packages = Package.selectBy(name=job.package.name) candidate_packages = [] candidate_packages.extend(packages) candidate_packages.sort(cmp=Package.version_compare) candidate_packages.reverse() newjob = None # so there are packages with higher version number # try to see if there's a job for us for cpackage in candidate_packages: candidate_jobs = [] candidate_jobs.extend(Job.selectBy(package=cpackage, dist=job.dist, arch=job.arch)) for cjob in candidate_jobs: if newjob and newjob != cjob and cjob.status == JobStatus.WAIT: cjob.status = JobStatus.GIVEUP elif cjob.status == JobStatus.WAIT: newjob = cjob job = newjob # We have to check because it might have changed # between our first select and the build_more_recent stuffs if not job or job.status != JobStatus.WAIT: continue # Check dependencies if not job.is_allowed_to_build(): continue job.status = JobStatus.WAIT_LOCKED job.host = socket.gethostname() self.jobs.append(job) count_new += 1 count_current += 1 if count_current >= max_new: break return count_new
def parse_file(): """ Parses the contents of dpkg/status into a list of packages. We need to traverse the file twice so we can find all the packages that aren't listed in the file themselves (only as dependencies) and know we shouldn't link to them. """ if os.access("/var/lib/dpkg/status", os.R_OK): filepath = "/var/lib/dpkg/status" else: filepath = "example_dpkg_status" with open(filepath) as f: lines = f.readlines() # Traverse file once to initialise all packages for line in lines: if re.match("Package: ", line): #re.match only searches the beginning of the line name = line[line.find(" ") + 1:-1] packages[name] = Package(name) # Traverse file again to find add all the other parsed data strict_deps = [] sub_deps = [] in_description = False description = "" for line in lines: if re.match("Package: ", line): #re.match only searches the beginning of the line name = line[line.find(" ") + 1:-1] in_description = False elif re.match("Version: ", line): version = line[line.find(" ") + 1:-1] elif re.match("(Pre-)?Depends: ", line): parse_dependencies(line, strict_deps, sub_deps) elif re.match("Description: ", line): #TODO most descriptions contain multiple lines... description_summary = line[line.find(" ") + 1:-1] in_description = True elif re.match(r"((Homepage|Original-Maintainer)|\n)", line): if in_description: strict_deps.sort() for dep in sub_deps: strict_deps.append(sorted(dep)) packages[name].add_data( version=version, description_summary=description_summary, description=description, deps=strict_deps) sub_deps, strict_deps = [], [] description = "" in_description = False elif in_description: if re.match(r" .\n", line): description += "\n" else: description += line[1:]
def GET(self, name=None, version=None): jobs = [] if version: pkg = Package.selectBy(name=name, version=version)[0] title = "%s %s" % (name, version) package = "%s/%s" % (name, version) else: pkg = Package.selectBy(name=name)[0] title = package = name jobs.extend(Job.selectBy(package=pkg)) return render.base(page=render.tab(jobs=jobs), \ hostname=socket.gethostname(), \ title=title, \ package=package, \ archs=RebuilddConfig().arch, \ dists=RebuilddConfig().get('build', 'dists').split(' '))
def __init__(self, packages, locations) -> None: super().__init__() self.packages = HashTable() self.csvData = CSVReader() now = datetime.now() self.end_of_day = datetime(now.year, now.month, now.day, 17) for row in packages: for location in locations: if location.street == row[Package.addressCol]: package = Package(int(row[Package.idCol]), location, row[Package.DeadlineCol], row[Package.specialNotesCol], row[Package.mass]) self.packages.insert(package) self.duplicate_address_dict = {} for package in self.packages: if package is not None: list_of_duplicate_street_packages = [] for second_package in self.packages: if second_package is not None: if package.destination == second_package.destination: list_of_duplicate_street_packages.append( second_package.destination) self.duplicate_address_dict[ package. destination] = list_of_duplicate_street_packages self.linked_package_graph = Graph( len(self.packages) + 1, len(self.packages) + 1, self.packages) self.__link_packages() self.packages_with_wrong_address = [package for package in self.packages\ if package is not None\ and package.has_wrong_address == True] self.wrong_address_update_times = [ package.will_be_address_updated_at for package in self.packages_with_wrong_address ] self.wrong_address_update_times = self.__remove_duplicates( self.wrong_address_update_times) self.deadlines = self.__get_deadlines() self.deadlines = self.__remove_duplicates(self.deadlines) self.deadlines = sorted(self.deadlines) self.delay_times = self.__get_delay_times() self.delay_times = self.__remove_duplicates(self.delay_times) self.delay_times = sorted(self.delay_times)
def __init__(self, capacity=10): self._table = [] self.keys = [] self._struct(capacity) # import the data with open('csv/packages.csv') as file: data = csv.reader(file) for row in data: package = Package(row) self.insert(package.id, package)
def loadPackageList(self): #reads package data from file and creates a package object to be passed to the hash table #create file reader with ',' as delimiter with open('WGUPS Package File.csv') as file: fileReader = csv.reader(file, delimiter=",") #for each row in file reader create a new package object and adds it to _packageList #Time Complexity: O(n) Space Complexity:O(n) for row in fileReader: self._packageList.insert( Package(int(row[0]), row[1], row[2], row[3], row[4], row[5], row[6], row[7]))
def __init__(self): ## GLOBAL ################################################################# self.urlMavenReleaseRepo = "http://josceleton.sourceforge.net/maven/release" self.urlSvnWebRoot = "http://josceleton.svn.sourceforge.net/svnroot/josceleton" self.urlDocWebRoot = "http://josceleton.sourceforge.net/documentation" self.urlRootRoot = "http://josceleton.sourceforge.net" ## PER USER ################################################################# self.workspace = "/path/to/releaseapp/tmp_workspace" self.localSvnRoot = "/path/to/checkedout/svn/root/ARTIFACT" self.username = "******" self.password = "******" ## MAIN CONFIG ################################################################# guiceDepsGroupId = "net.sf.josceleton.thirdparty.com.google.code.guice" self.artifacts = [ Artifact("corporate-pom", "net.sf.josceleton", "pom/corporate-pom", "0.4", "0.5-SNAPSHOT", "pom"), Artifact("guice-dependencies", guiceDepsGroupId, "pom/guice-dependencies", "2.3", "2.4-SNAPSHOT", "pom"), Artifact("checkstyle-config", "net.sf.josceleton", "pom/checkstyle-config", "0.4", "0.5-SNAPSHOT", "jar"), Artifact("java-abstract-pom", "net.sf.josceleton", "pom/java-abstract-pom", "0.4", "0.5-SNAPSHOT", "pom"), Artifact("java-pom", "net.sf.josceleton", "pom/java-pom", "0.4", "0.5-SNAPSHOT", "pom"), Artifact("commons", "net.sf.josceleton", "josceleton/commons", "0.4", "0.5-SNAPSHOT", "jar"), Artifact("core-api", "net.sf.josceleton", "josceleton/core-api", "0.4", "0.5-SNAPSHOT", "jar"), Artifact("core-impl", "net.sf.josceleton", "josceleton/core-impl", "0.4", "0.5-SNAPSHOT", "jar"), Artifact("connection-api", "net.sf.josceleton", "josceleton/connection-api", "0.4", "0.5-SNAPSHOT", "jar"), Artifact("connection-impl", "net.sf.josceleton", "josceleton/connection-impl", "0.4", "0.5-SNAPSHOT", "jar"), Artifact("motion-api", "net.sf.josceleton", "josceleton/motion-api", "0.1", "0.2-SNAPSHOT", "jar"), Artifact("motion-impl", "net.sf.josceleton", "josceleton/motion-impl", "0.1", "0.2-SNAPSHOT", "jar"), Artifact("josceleton", "net.sf.josceleton", "josceleton/josceleton", "0.4", "0.5-SNAPSHOT", "jar") ] self.reactor = Reactor("josceleton-reactor", "net.sf.josceleton", "josceleton/josceleton-reactor", "0.4") self.package = Package(self.artifacts, "josceleton-0.4", "This is the content of my readme file.")
def add_job(self, name, version, priority, dist, mailto=None, arch=None): """Add a job""" if not arch: arch = self.cfg.arch[0] if not Dists().get_dist(dist, arch): RebuilddLog.error("Couldn't find dist/arch in the config file for %s_%s on %s/%s, not adding it" \ % (name, version, dist, arch)) return False pkgs = Package.selectBy(name=name, version=version) if pkgs.count(): # If several packages exists, just take the first pkg = pkgs[0] else: # Maybe we found no packages, so create a brand new one! pkg = Package(name=name, version=version, priority=priority) jobs_count = Job.selectBy(package=pkg, dist=dist, arch=arch, mailto=mailto, status=JobStatus.WAIT).count() if jobs_count: RebuilddLog.error("Job already existing for %s_%s on %s/%s, not adding it" \ % (pkg.name, pkg.version, dist, arch)) return False job = Job(package=pkg, dist=dist, arch=arch) job.status = JobStatus.WAIT job.arch = arch job.mailto = mailto log = Log(job=job) RebuilddLog.info("Added job for %s_%s on %s/%s for %s" \ % (name, version, dist, arch, mailto)) return True
def uniform_com_func(net): """ communicate function :param net: :return: """ for node in net.node: if node.id in net.target and random.random( ) <= node.prob and node.is_active: package = Package() node.send(net, package) # print(package.path) return True
def count_package_function(net): """ count the number of package which can go to base :param net: :return: """ count = 0 for target_id in net.target: package = Package(is_energy_info=True) net.node[target_id].send(net, package) if package.path[-1] == -1: count += 1 return count
def initialize(self): '''Evaluate the package meta-data for consistency and initialize parameters. Verify that data on the disk for this package is consistent. ''' Package.initialize(self) self.status = FAIL if self.meta_data.data: if type(self.meta_data.data) == type(dict()): install_data = self.meta_data.data.get("install") chk = install_data.get('console') self.console = eval_boolean(chk) chk = install_data.get('reboot') self.reboot = eval_boolean(chk) self.checksum = install_data.get('md5sum') if install_data: if type(install_data) == type({}): self.full_name = install_data.get('fullName') if not self.full_name: self.full_name = install_data.get('full_name') if self.full_name: self.status = OK return else: msg = "Package does not exist on the server" raise BadPackage, (self.name, msg) else: msg = "The server's record is completely corrupt" raise BadPackage, (self.name, msg) else: msg = "server record is corrupt: no 'install' section" raise BadPackage, (self.name, msg) else: msg = "server record is corrupt: not a dictionary" raise BadPackage, (self.name, msg) else: msg = "No metadata found for this package" raise BadPackage, (self.name, msg)
def import_packages(): """ A function to import csv data into the program Takes a set file that contains the package data and reads it into the program then it loads it into the hashtable and generates Package objects. 18N+6 Time complexity of O(N) """ with open((pathlib.Path.cwd() / "src/data/PackageFile.csv")) as csvfile: readCSV = csv.reader(csvfile, delimiter=',') imported_data = list(readCSV) # import the package data num_of_package_data_points = 7 # data points in each package times the number of packages in the data # so that there is limited collisions package_space = len(imported_data) * num_of_package_data_points DataStorage.packages = HashTable(package_space) num_of_packages = 0 # Read the data into the package objects for row in imported_data: package_id = row[0] address = row[1] city = row[2] state = row[3] zip_code = row[4] delivery_deadline = row[5] mass_kilo = row[6] special_notes = row[7] # Create a new package package = Package(package_id, address, city, state, zip_code, delivery_deadline, mass_kilo, special_notes) # Insert package into the hashtable DataStorage.packages.insert(package.id, package) DataStorage.packages.insert(package.address, package) DataStorage.packages.insert(package.city, package) DataStorage.packages.insert(package.state, package) DataStorage.packages.insert(package.zip, package) DataStorage.packages.insert(package.delivery_deadline, package) DataStorage.packages.insert(package.mass_kilo, package) DataStorage.packages.insert(package.delivery_status, package) # track number of packages created num_of_packages = num_of_packages + 1 DataStorage.number_of_packages_in_data = num_of_packages
def main(): """ Program main function. """ parser = OptionParser() parser.add_option("-g", "--draw", action="store_true", default=False, help="Draw the dependence's tree") parser.add_option("-p", "--package-name", action="store_true", default=False, dest="package_name", help="Calcul optional dependences for a package name and not " \ + "a PKGBUILD") parser.add_option("-r", "--request", action="store_true", default=False, help="Question the remote database (pacman -Si)," \ + " default is local (pacman -Qi)") parser.add_option("-d", "--debug", action="store_true", default=False, help="Add debug messages") (options, args) = parser.parse_args() if(len(args) == 0): parser.print_help() return 1 if(options.debug): logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s - %(message)s', datefmt='%a, %d %b %Y %H:%M:%S') for arg in args: package = Package(arg, not options.package_name, not options.request) if(options.draw): package.draw() for dep in package.optional_dependencies: print "%s is already include by %s" % \ (dep, package.optional_dependencies[dep])
def load_package_data(): with open('PackageData.csv', 'r') as csv_file: read_file = csv.reader(csv_file, delimiter=',') for row in read_file: package_id = row[0] package_location_id = row[1] package_deadline = row[6] package_weight = row[7] package_special = row[8] # Starting status is at Hub package_status = "AT HUB" package_to_add = Package(package_id, package_location_id, package_deadline, package_weight, package_special, package_status) package_hashtable.put(package_id, package_to_add) add_to_truck(package_id)
def GET_package(self, package=None): graph = self.graph_init() if package == "/": graph.title = "Build status" jobs = Job.selectBy() else: dindex = package.rindex("/") graph.title = "Build status for %s" % package[1:] pkg = Package.selectBy(version=package[dindex+1:], name=package[1:dindex])[0] jobs = Job.selectBy(package=pkg) graph.setData(self.compute_stats(jobs)) tmp = tempfile.TemporaryFile() graph.draw(tmp) tmp.seek(0) return tmp.read()
def GET_package(self, package=None): graph = self.graph_init() if package == "/": graph.title = "Build status" jobs = Job.selectBy() else: dindex = package.rindex("/") graph.title = "Build status for %s" % package[1:] pkg = Package.selectBy(version=package[dindex + 1:], name=package[1:dindex])[0] jobs = Job.selectBy(package=pkg) graph.setData(self.compute_stats(jobs)) tmp = tempfile.TemporaryFile() graph.draw(tmp) tmp.seek(0) return tmp.read()
def reset(self): self.construction_site = ConstructionSite( name=self.construction_site_name, location=self.construction_site_location) self.delivery_area = Area(name=self.delivery_area_name, pos=self.delivery_area_pos, width=self.delivery_area_width, length=self.delivery_area_length) self.building_area = Area(name=self.building_area_name, pos=self.building_area_pos, width=self.building_area_width, length=self.building_area_length) self.storage_area = Area(name=self.storage_area_name, pos=self.storage_area_pos, angle=self.storage_angle, width=self.storage_area_width, length=self.storage_area_length) self.demand_points = [self.building_area] self.construction_site.add_area( [self.delivery_area, self.building_area, self.storage_area]) self.crane = Crane(pos=self.crane_pos, sweeping_range=self.crane_sweeping_range, demand_points=self.demand_points) self.crane.init_parts() self.construction_site.add_crane([self.crane]) self.access_road = Road(pos=self.road_pos) self.access_road.calculate_length() self.truck = Truck(road=self.access_road) self.package1 = Package(name=self.package_name, width=self.package_width, length=self.package_length, crane=self.crane, truck=self.truck) self.crane.hook.package = self.package1 self.packages = [self.package1] self.construction_site.add_package(self.packages) return L1.index((int( changement_repere(self.crane.hook.pos[0], self.crane.hook.pos[1], self.crane.hook.angle)[0]), (int( changement_repere(self.crane.hook.pos[0], self.crane.hook.pos[1], self.crane.hook.angle)[1]))))
def __init__(self, csv_package_list1): self.hash_table = ChainingHashTable() self.delivery_deadline = datetime # loops through the csv list, creates package objects and inserts the object into the hash table # O(n) for row in csv_package_list1: id = row[0] address = row[1] city = row[2] state = row[3] zip = row[4] self.delivery_deadline = convert_to_time(row[5]) weight = row[6] note = row[7] self.package = Package(id, address, city, state, zip, self.delivery_deadline, weight, note) self.hash_table.insert(self.package)
def populate_packages(): # This function has a time complexity of O(n) where n is the number of rows/packages. This is due to the fact # that while iterating, the only actions are constant, so time grows linearly. # Space complexity of this function is O(n) where n is the number of rows/packages because for every package, # one package item is being created # Take in the package data and put that information in Package objects with open('ModifiedPackageFile.csv', 'r', encoding='utf-8-sig') as file: file_reader = csv.reader(file) for row in file_reader: package_id = row[0].strip() input_address = row[1] input_city = row[2] input_state = row[3] input_zip = row[4] input_deadline = row[5] input_mass = row[6] input_notes = row[7] temp_package = Package(package_id, input_address, input_city, input_state, input_zip, input_deadline, input_mass, input_notes) # Take package objects and populate HashTable package_table.insert(row[0], temp_package)
def createPackage(self, order_bin, address): """ Creates a new Package object containing the contents of order_bin to be shipped to an address Args: order_bin: A Bin object containing all of needed Item objects to complete an Order address: A string representing the address this new package will be sent to Returns: package: A Package object containing all of the Item objects """ package = Package() package.setDestination(address) for item in order_bin.getContents(): package.addToPackage(item) return package
def _eval_priority(self): 'determine priority of this package' self.priority = self.meta_data.data.get('priority') Package._eval_priority(self)
import Global from Log import Log Global.Logger = Log.instance() from DB.Database import Database Global.DB = Database.instance() from Package import Package Global.Packager = Package.instance()