def __handle_prepared(junk_data): """ <Purpose> Called when the nest has finished copying files. <Arguments> data: Unused. <Exceptions> IOError if there was problem ending the current session. <Side Effects> Sets glo_prepared to True. <Returns> None. """ global glo_prepared arizonacomm.end_session() glo_prepared = True if arizonaconfig.get_option("simulate_nest_fail"): arizonareport.send_out(0, "simulating nest failure") glo_prepared = False
def verify(): result = True permUsageFileName = arizonaconfig.get_option("permUsageFileName") # open and read the file file = open(permUsageFileName, "r") lines = file.readlines() # there could be duplicate lines because multiple copies of stork may have # been invoked from one run of pacman lines = uniq(lines) for line in lines: splitline = line.split(" ") (filename, last_hash) = (splitline[0], splitline[1]) # get the hash of the file if os.path.exists(filename): hash = arizonacrypt.get_fn_hash(filename) else: hash = "doesnotexist" if (hash != last_hash): arizonareport.send_out("hash mismatch file " + filename + " last=" + last_hash + " cur =" + hash) result = False return result
def verify_exportdir(nestname): """ <Purpose> Checks to see if /.exportdir exists. Creates it if it does not. TODO: This should be integrated into planetlab_share. Right now, planetlab_share asks the client ot create .exportdir files in a subdirectory for identification purposes rather than in the root directory (for sharing purposes) TODO: The initscript creates /.exportdir, so in theory this code can be removed once all stork clients have been upgraded (legacy stork clients remove /.exportdir after using the nest) <Arguments> None. <Exceptions> None. <Side Effects> None. <Returns> None """ if not os.path.exists("/.exportdir"): arizonareport.send_out(1, "/.exportdir is missing; recreating") try: f = file("/.exportdir", "w+") f.write(nestname + "\n") f.close() except OSError: arizonareport.send_error(0, "nest transfer: error creating /.exportdir containing: " + str(nestname))
def fetch_old_info(infofilename): """returns a list containing information about the current package data files """ try: infofile=file(infofilename,"r") except IOError: arizonareport.send_error(1,"Cannot open previous information '"+infofilename+"'. Rebuilding package databases from scratch") return [] try: ret_list = [] for line in infofile: if line.startswith('dir '): linelist = line.split() if len(linelist)!=3: raise IOError ret_list=ret_list+[[linelist[1],linelist[2],[]]] elif line.startswith('retry '): linelist = line.split() if len(linelist)!=2: raise IOError ret_list[len(ret_list)-1][2]=ret_list[len(ret_list)-1][2]+[linelist[1]] else: raise IOError except IOError: arizonareport.send_out(1,"Error parsing line:"+line+"\nRebuilding database from scratch") return [] return ret_list
def get_file(relative_path,outputfile): tmptuple = tempfile.mkstemp("curldownload") tmp = tmptuple[1] try: tmptuple[0].close() except: pass command = arizonaconfig.get_option("curlpath")+" -L -w '%{http_code}' -k -o "+tmp+" "+repository+":8081/"+relative_path arizonareport.send_out(2, "About to run: "+command) (sin, sout, serr) = os.popen3( command ) outline = sout.read() sout.close() serr.close() if outline == "" or outline != "200": try: os.unlink(tmp) except: pass return False else: try: shutil.move(tmp, outputfile) except: pass return True
def get_package_metadata(filename, original_filename=None): """ <Purpose> Given a package filename, returns a dict of the package metadata. <Arguments> filename: This is the package file we wish to extract metadata from original_filename: This is the original name, to be used when getting the hash on a file that has been temporarily renamed. <Exceptions> TypeError: If a type mismatch or parameter error is detected (not a package file). IOError: If an error occurred opening the file, etc. <Side Effects> None. <Returns> String list which contains the metadata for the package file """ # check params arizonageneral.check_type_simple(filename, "filename", str, "storkpackage.get_package_metadata") arizonareport.send_out(4, "[DEBUG] get_package_metadata: filename = " + filename) if not os.path.exists(filename): raise TypeError, "File does not exist: " + str(filename) package_dict = {} if original_filename != None: package_dict['filename'] = original_filename else: package_dict['filename'] = os.path.basename(filename) # This fetches a list containing the name, version, release, and size info = get_package_info(filename) if info == None: raise TypeError, "Unrecognized package format: " + str(filename) package_dict['name'] = info[0] package_dict['version'] = info[1] package_dict['release'] = info[2] package_dict['size'] = info[3] # may throw IOError or TypeError package_dict['hash'] = arizonacrypt.get_fn_hash(filename) package_dict['provides'] = get_packages_provide([filename]) package_dict['requires'] = get_packages_require([filename]) package_dict['files'] = get_packages_files([filename]) return package_dict
def TrustedPackagesFileParse(dtd, filename, repo_dict, publickey_string, keyname, parents): """ <Purpose> Creates the parser for the trustedpackages file and parses it. Returns a dictionary of group members. <Arguments> dtd: XML dtd to use when parsing filename: filename of tpfile, including path and extension. repo_dict: repository whre filename came from; used to print message to client publickey_string: public key used to verify signature of tpfile named by filename. keyname: the name of the key where that this should be added under in the returned dict. parents: a list build up of the keynames of the tpfiles that are parents of this tpfile. used for cycle detection. <Returns> dictionary containing trusted packages entries. """ if repo_dict: # if we know which repo the file came from, then tell the user. arizonareport.send_out(3, "Parsing Trusted Packages File: [" + repo_dict['name'] + "] " + filename) else: arizonareport.send_out(3, "Parsing Trusted Packages File: " + filename) publickey_sl = arizonacrypt.fnstring_to_publickey_sl(publickey_string) temp_publickey_fn = arizonacrypt.sl_to_fn(publickey_sl) try: try: if arizonacrypt.XML_timestamp_signedfile_with_publickey_fn(filename, temp_publickey_fn): temp_contents = arizonacrypt.XML_retrieve_originalfile_from_signedfile(filename) except TypeError, e: arizonareport.send_error(0, str(e)) raise TPFileError, "verification error" finally: os.remove(temp_publickey_fn) try: app = __TrustedPackagesApplication(keyname, parents) app = arizonaxml.XMLParse(app, dtd, temp_contents) return app.tp_dict finally: os.remove(temp_contents)
def __build_index(field, path): """ <Purpose> Builds an index for the metadata in a particular directory. <Arguments> field: The metadata field to build the index for (ex: "provides") path: The directory where the metadata files are located. Also, the directory where the index file will be written. It will be named "path/<field>.index" <Exceptions> TODO <Side Effects> None <Returns> None. """ outputfile = os.path.join(path, field + ".index") # remove an existing index if os.path.isfile(outputfile): os.remove(outputfile) # building the index takes a while, so set up a progress indicator if arizonareport.get_verbosity() > 1: width = arizonareport.console_size()[1] if width == None: width = 70 else: width = 0 import download_indicator prog_indicator_module = download_indicator prog_indicator_module.set_width(width) filenames = dircache.listdir(path) n = len(filenames) for i, metahash in enumerate(filenames): metafile = os.path.join(path, metahash) # listdir returns subdirectories too, skip them if not os.path.isfile(metafile): continue # add the items in this metahash to the index __append_index(field, metafile, outputfile) # update progress indicator prog_indicator_module.download_indicator(i + 1, 1, n) arizonareport.send_out(2, "")
def dump(): arizonareport.send_out(0, "key database: ") for keytuple in build_key_database(): arizonareport.send_out(0, " " + keytuple[0] + ": " + keytuple[3]) arizonareport.send_out(0, "default key (used when none of the above exist):") default_tuple = build_default_keytuple() if default_tuple: arizonareport.send_out(0, " " + default_tuple[0] + ": " + default_tuple[3]) else: arizonareport.send_out(0, " None")
def determine_remote_files(name, destdir, hashfuncs=[default_hashfunc], maskList=[]): """ <Purpose> Cracks open a metafile, determines the names of the files referenced from that metafile, and checks to make sure they are signed correctly. An unsigned metafile is assumed to exist at destdir/METAFILE_FN <Arguments> name: 'name' of the remote thing we're synchronizing. The only purpose of this parameter is as text info for the user; It's suggested to use the same name as the 'host' parameter that is supplied to sync_remote_files(), but not absoletely necessary. destdir: 'destdir' is a destination directory which will be synchronized. <Exceptions> None. <Side Effects> None <Returns> A tuple: (result, file_list) True or False to indicate success, a list of downloaded files, and a list of all files on the server. """ metafile_path = os.path.join(destdir, METAFILE_FN) fetch_list = [] if not os.path.exists(metafile_path): arizonareport.send_error(arizonareport.ERR, "determine_remote_files(): file " + str(metafile_path) + " does not exist") arizonareport.send_syslog(arizonareport.ERR, "determine_remote_files(): file " + str(metafile_path) + " does not exist") return (False, fetch_list) mtime = os.stat(metafile_path)[ST_MTIME] arizonareport.send_out(1, "Using metadata " + name + ", timestamp " + time.ctime(mtime)) # Open the file we just retrieved arizonareport.send_out(4, "[DEBUG] opening " + metafile_path) try: dir_file = open(metafile_path) # if a file cannot be opened except IOError, (errno, strerror): arizonareport.send_error(arizonareport.ERR, "determine_remote_files(): I/O error(" + str(errno) + "): " + str(strerror)) arizonareport.send_syslog(arizonareport.ERR, "determine_remote_files(): I/O error(" + str(errno) + "): " + str(strerror)) return (False, fetch_list)
def init(): """ <Purpose> Initialize and update repository package list(s). """ global glo_initialized global glo_repo_sections glo_repo_sections = arizonaconfig.get_option("repositories") if not glo_repo_sections: # compatibility mode -- None causes arizonaconfig.get_option_section # to default to the same behaviour as arizonaconfig.get_option glo_repo_sections = [None] # check repinfo for repo_section in glo_repo_sections: if arizonaconfig.get_option_section("repinfo", repo_section) == None: if repo_section: arizonareport.send_error(0, "repository " + repo_section + ":") arizonareport.send_error(0, "Repository package information locations must be" + \ " specified either on the command line" + \ " or in the configuration file. See the" + \ " `--repositorypackageinfo' option.") sys.exit(1) if not arizonaconfig.get_option_section("updatedb", repo_section): arizonareport.send_out(1, "Skipping update of package information...") update_ok = False else: arizonareport.send_out(1, "Updating package information...") update_ok = True # check to see if the user tried to override the localinfo variable for # a specific repository. If so, then complain because this is not yet # implemented # TODO: finish this (see __find_package_fnlist_on_one_criteria) if arizonaconfig.get_option_section("localinfo", repo_section) != arizonaconfig.get_option("localinfo"): arizonareport.send_error(0, "cannot use localinfo variable inside repository section") sys.exit(1) download_repositories(repo_section, arizonaconfig.get_option_section("repinfo", repo_section), arizonaconfig.get_option_section("localinfo", repo_section), update_ok) # build the list of packageinfo directories build_localpdir() # uncomment to dump info about repositories to stdout # dump_repositories() glo_initialized = True
def write_new_data(infofilename,datalist): """Puts the new information about the current packages in the file """ try: infofile=file(infofilename,"w") except IOError: arizonareport.send_out(1,"Cannot clobber information '"+infofilename+"'. Attempting to clobber") os.remove(infofilename) for item in datalist: print >> infofile, "dir "+item[0],item[1] for subitem in item[2]: print >> infofile, "retry "+subitem
def parse_doctype(self): """ <Purpose> Skips DOCTYPE declaration TODO finish comment """ if self.seen_doctype == 1: xmlproc.XMLProcessor.parse_doctype(self) else: arizonareport.send_out(4, str("Ignoring DOCTYPE (%s,%d)" % (self.get_current_sysid(), self.get_line())) ) self.scan_to("]>") self.seen_doctype = 1
def destroy(): global glo_aws_conn global glo_aws_bucket entries = readdir() for entry in entries: result = glo_aws_conn.delete(glo_aws_bucket, entry.key) arizonareport.send_out(1, "delete: " + entry.key + " result=" + \ str(result.http_response.status)) result = glo_aws_conn.delete_bucket(glo_aws_bucket) arizonareport.send_out(1, "delete bucket result=" + \ str(result.http_response.status))
def find_file_list(dir, filename_list, publickey_fn=None, publickey_string=None): # TODO: comment # TODO: args checking (found, found_repo, found_timestamp, found_count) = \ find_file_ts(dir, filename_list, publickey_fn, publickey_string) if found: arizonareport.send_out(4, "[DEBUG] " + os.path.basename(found) + " found on " + found_repo['name'] + "(" + str(found_count) + " candidates)") return (found, found_repo)
def upload_file(username, password, file, type, slice=None): arizonareport.send_out(3, "upload_file initiated for file: "+file) ok_types = ["package","tp","pacman","pk","conf"] if type not in ok_types: arizonareport.send_out(2, type+" not in "+str(ok_types)+" , skipping upload") return None if not login(username,password): arizonareport.send_error(2, "ERROR: Not logged in and unable to login. Aborting file upload.") return None if not os.path.isfile(file): arizonareport.send_out(2, file," is not a file, skipping upload.") return None command = arizonaconfig.get_option("curlpath")+" -L -k -b GetCookie %SLICE% -F \"type=%TYPE%\" -F \"numfiles=1\" -F \"uploadbutton=Upload File\" -F \"file_0=@%FILE%\" "+repository+":8081/stork/upload_handler.php" command = command.replace("%FILE%", file).replace("%TYPE%", type) if type in ["pk", "conf"] and slice!=None: command = command.replace("%SLICE%", "-F \"slice="+slice+"\"") else: command = command.replace("%SLICE%", "") #DEBUG -remove when we go live #command = command.replace("https://stork-repository.cs.arizona.edu", "http://jplichta.ipupdater.com:8080") arizonareport.send_out(2, "About to run: "+command) (sin, sout, serr) = os.popen3( command ) outstring = sout.read() errstring = serr.read()
def build_key_database(reset = False, ignore_username = False): """ <Purpose> Builds a database of public keys. Keys are downloaded from PLC and converted from ssh to openssl format (as long as --noplckeys is not used). In addition, the --publickey file is read if it is present. <Side Effects> glo_keylist is filled with a list of usable publickey tuples <Returns> glo_keylist """ global glo_keylist if reset: reset_key_database() # see if we are already done if glo_keylist: return glo_keylist slicename = arizonageneral.getslicename() if slicename: on_planetlab = True else: on_planetlab = False # SMB: set slicename to "noslice" if no slice name is available. This # will allow a non-planetlab user to have a default filename for his # files. slicename = "noslice" arizonareport.send_out(4, "[DEBUG] slicename = " + str(slicename)) if on_planetlab and (not arizonaconfig.get_option("noplckeys")): try: keylist = get_planetlab_publickeys(slicename) except Exception, e: arizonareport.send_error(0, "Failed to get public keys from planetlab (exception)") # TODO: log the type of exception storkwarning.log_warning("warning.noplckeys", str(e)) sys.exit(-1) # If keylist == [], then PLC didn't return any keys. Treat this as # an error if not keylist: arizonareport.send_error(0, "Failed to get public keys from planetlab (no keys)") storkwarning.log_warning("warning.noplckeys", "nokeys") sys.exit(-1)
def url_exists(relative_path): command = arizonaconfig.get_option("curlpath")+" -L -w '%{http_code}' -k --head "+repository+":8081/"+relative_path arizonareport.send_out(2, "About to run: "+command) (sin, sout, serr) = os.popen3( command ) outline = sout.read() sout.close() serr.close() if outline == "" or outline != "200": return False else: return True
def copyFile(fullfile, targetdir): file = os.path.basename(fullfile) linkname="" if ismetadatafile(file) or ismetalinkfile(file): arizonareport.send_out(1, "Skipping metadata/metalink file " + file); return linkname = getLinkName(fullfile) if not linkname: return try: shutil.copy(fullfile, os.path.join(targetdir, linkname)) except: arizonareport.send_error(0, "error copying: "+linkname+ str( sys.exc_info()[0]) )
def addLink(fullfile, targetdir): file = os.path.basename(fullfile) linkname="" if ismetadatafile(file) or ismetalinkfile(file): arizonareport.send_out(1, "Skipping metadata/metalink file " + file); return linkname = getLinkName(fullfile) if not linkname: return try: os.symlink(fullfile,os.path.join(targetdir, linkname)) except OSError: arizonareport.send_error(0, "Duplicate: "+linkname)
def find_satisfying_packages(name, ver): """ <Purpose> Searches for packages meeting the name and version requirements. <Arguments> name: The single dependency needing to be satisfied. ver: The required version of the dependency, in the format: "OPERATOR VERSION", or "". Where, OPERATOR is a relational operator (=, >, etc), and version is a plain version, without release number (example: 1.2.3, not 1.2.3-1) <Exceptions> None. <Side Effects> None. <Returns> Returns a list of package information dictionaries. """ # check params arizonageneral.check_type_simple(name, "name", str, "stork.find_satisfying_packages") arizonageneral.check_type_simple(ver, "ver", str, "stork.find_satisfying_packages") arizonareport.send_out(4, "[DEBUG] storkdependency.find_statisfying_packages " + "name = " + name + "ver = " + ver); # find packages that have the desired package name in their provides # field (this means they satisfy the dependency) criteria_dict = {} criteria_dict['provides'] = name mylist = storkpackagelist.find_packages(criteria_dict) criteria_dict['provides'] = name + " =*" mylist += storkpackagelist.find_packages(criteria_dict) retlist = [] arizonareport.send_out(4, "[DEBUG] storkdependency.find_satisfying_packages mylist = " + str([(pack['name'], pack['version'], pack['release']) for pack in mylist])); # now check that the version is adequate for package in mylist: for provided in package['provides']: if this_satisfies(name, ver, provided): retlist.append(package) return retlist
def find_file(dir, filename, publickey_fn=None, publickey_string=None): # Note: this function should probably not be used. find_file_list() or # find_file_kind() should probably be used instead, because we now have # multiple types of keys (and thus multiple filenames to arbitrate) # TODO: args checking (found, found_repo, found_timestamp, found_count) = \ find_file_ts(dir, [filename], publickey_fn, publickey_string) if found: arizonareport.send_out(4, "[DEBUG] " + filename + " found on " + found_repo['name'] + "(" + str(found_count) + " candidates)") return (found, found_repo)
def is_latest_version(version_string): """ Returns a tuple (Boolean,String, String). The first part of the tuple is true if this version of the gui is the latest one as reported by the website (or newer), the second part of the tuple will always be None if the first part is True, if the first part is False it will be a string indicating the most recent version from the repository. The third part of the tuple will be a string, either storkslicemanager or storkcurlfuncs to indicate which file is out of date. If both are out of date, only storkslicemanager will be indicated. """ version = "gui-version" command = arizonaconfig.get_option("curlpath")+" -L -w '%{http_code}' -k -o "+version+" "+storksite+"gui-version" arizonareport.send_out(2, "About to run: "+command) (sin, sout, serr) = os.popen3( command ) outline = sout.read() sout.close() serr.close() if outline == "" or outline != "200": # version page did not exist, or could not connect return (False, "unknown", "unknown") else: # try to open the downloaded version page to check the version number try: f = open(version, "r") # Id cvs tag of storkslicemanager should be on first line line1 = f.readline().rstrip("\n").rstrip(" ") line2 = f.readline().rstrip("\n").rstrip(" ") f.close() current_gui_version = float(version_string.split(" ")[1]) current_sc_version = float(scversion.split(" ")[1]) available_gui_version = float(line1.split(" ")[1]) available_sc_version = float(line2.split(" ")[1]) if current_gui_version >= available_gui_version and current_sc_version >= available_sc_version: return (True, None, None) else: return (False, str(available_gui_version), "storkslicemanager") except IOError: return (False, "unknown", "unknown") except OSError: return (False, "unknown", "unknown")
def get_metahash(file): """ <Purpose> Given a filename, find the metahash and the filename of the metadata file, if a metadata file exists. <Returns> (metahash, metafilename) """ global glo_known_metahash # if we already know the answer, return it if glo_known_metahash.has_key(file): return glo_known_metahash[file] metalink = file + ".metalink" metahash = None metafile = None # first check and see if a metalink exists. If it does, then extracting # the metahash can be done by looking at the link. if os.path.exists(metalink): try: metafile = os.readlink(metalink) metahash = os.path.basename(metafile) arizonareport.send_out(0, "loaded metahash from metalink to " + str(metafile)) except: pass if not metahash: try: arizonareport.send_out(0, "Extracting metahash: "+os.path.basename(file)) metadata = storkpackage.get_package_metadata(file) except: return (None, None) metafile_tmp = storkpackage.package_metadata_dict_to_fn(metadata, '/tmp') metahash = os.path.basename(metafile_tmp) os.unlink(metafile_tmp) # remember this in case this function is called again. glo_known_metahash[file] = (metahash, metafile) return (metahash, metafile)
def TrustedPackagesOrder(): """ <Purpose> Parse the trusted packages file, flatten them from a DAG to a list, and finally sort them. <Arguments> None <Side Effects> The globals trustedpackages_parsing_dict and trustedpackages_list are filled in. <Returns> None. """ global trustedpackages_parsing_dict global trustedpackages_list arizonareport.send_out(3, "Initializing trustedpackages files...") # compute tag preference rules. They'll be used when sorting later on compute_prefer_rules() (tpfilename, tp_repo, tp_keytuple) = storkpackagelist.find_file_kind("tpfiles", "tpfile") if not tpfilename: arizonareport.send_error(0, "Failed to locate trusted package file") sys.exit(1) # build up a dictionary of the entries of the tpfiles trustedpackages_parsing_dict = \ TrustedPackagesFileParse(arizonaconfig.get_option("tpdtd"), tpfilename, tp_repo, arizonacrypt.PublicKey(sl=tp_keytuple[2]).string, # publickeystring tp_keytuple[4], # config_prefix []) arizonareport.send_out(3, "Flattening trustedpackages files...") # flatten the DAG into a list flatten_line_number = 0 trustedpackages_list = flatten(tp_keytuple[4], 0, "default", "any", [], [], [], [])
def get_package_info(filename): """ <Purpose> Given a package filename, returns a string list of package information of the form: [NAME, VERSION, RELEASE, SIZE] <Arguments> filename: Package filename. <Exceptions> TypeError: If a type mismatch or parameter error is detected. <Side Effects> None. <Returns> String list containing package information, or None on error. """ # check params arizonageneral.check_type_simple(filename, "filename", str, "storkpackage.get_package_info") arizonareport.send_out(4, "[DEBUG] get_package_info: filename = " + filename) # initialize package managers initialize() # go through every possible package manager arizonareport.send_out(4, "[DEBUG] get_package_metadata: inited_packmanager_list = " + str(inited_packmanager_list)) for packagemanager in inited_packmanager_list: arizonareport.send_out(4, "[DEBUG] get_package_info: packagemanager = " + str(packagemanager)) if packagemanager[1].is_package_understood(filename): arizonareport.send_out(4, "[DEBUG] get_package_info: is_package_understood = True") info = packagemanager[1].get_package_info(filename) arizonareport.send_out(4, "[DEBUG] get_package_info: info = " + str(info)) if info != None: return info return None
def fetch_configuration(slicename, defaultconf=False): if defaultconf: confurl = "http://www.cs.arizona.edu/stork/downloads/sample-stork.conf" else: confurl = repository+":8081/user-upload/conf/"+slicename+".stork.conf" destinationfile = slicename+".stork.conf.unsigned" command = arizonaconfig.get_option("curlpath")+" -L -w '%{http_code}' -k -o "+destinationfile+" "+confurl arizonareport.send_out(2, "About to run: "+command) (sin, sout, serr) = os.popen3( command ) outline = sout.read() sout.close() serr.close() if outline == "" or outline != "200": os.remove(destinationfile) return False else: return destinationfile
def get_installed_by_name(pack): """ <Purpose> Given a package and optional relop and version (nameOPver), see what packages satisfy the version requirements. Packages are checked based on name and version number, not provided dependencies. <Returns> A tuple (sat_vers, unsat_vers), where sat_vers is a list of packages that satisfy the version requirement, and unsat_vers is a list of packags (of the same name) that do not satisfy the version requirement. """ # check params arizonageneral.check_type_simple(pack, "pack", str, "storkdependency.get_installed_by_name") (name, ver, tags) = split_pack_name(pack) cur_versions = storkpackage.get_installed_versions([name]) unsatisfying_versions = [] satisfying_versions = [] for cur in cur_versions: if cur != None: # cur will be of the format "name = ver". we want "name-ver" packname = cur; if packname.find(" = "): packname = packname.replace(" = ", "-") if this_satisfies(name, ver, cur): satisfying_versions.append(packname) else: unsatisfying_versions.append(packname) if satisfying_versions or unsatisfying_versions: arizonareport.send_out(3, "[INFO] pack " + str(pack) + " has installed satisfying versions " + str(satisfying_versions) + " and unsatisfying versions " + str(unsatisfying_versions)) return (satisfying_versions, unsatisfying_versions)
def copyFile_s3(fullfile): import storks3 storks3.init() file = os.path.basename(fullfile) linkname="" if ismetadatafile(file) or ismetalinkfile(file): arizonareport.send_out(1, "Skipping metadata/metalink file " + file); return (metahash, metafile) = get_metahash(fullfile) linkname = getLinkName(fullfile) if not linkname: return # upload the metafile, assuming we know what it is if metafile: metalinkname = linkname + ".metadata" # prep some s3 metadata headers to go with the metafile. They will # provide enough information to link metafile to the package. s3MetafileMetadata = {} s3MetafileMetadata['kind'] = "metafile" s3MetafileMetadata['packagename'] = linkname s3MetafileMetadata['package_localname'] = fullfile try: arizonareport.send_out(1, "upload metafile to s3: " + metafile + " as " + metalinkname) storks3.put_file(metafile, metalinkname) arizonareport.send_out(1, " success") except IOError, e: arizonareport.send_error(0, "error s3-copying: "+fullfile+": "+ str( e ) ) except:
def protect_directory(target_slice, target_dir): """ protect all subdirectories and files of the given directory """ arizonageneral.check_type_simple(target_slice, "target_slice", str, "planetlab_share.protect_directory") arizonageneral.check_type_simple(target_dir, "target_dir", str, "planetlab_share.protect_directory") arizonareport.send_syslog(arizonareport.INFO, 'Protect dir "' + target_dir + '" on slice "' + str(target_slice) + '"') # Make sure the slice has bound if not __verify_shared(target_slice): return False # Uncomment the following to switch to fallback method """ for root, dirs, files in os.walk(target_dir): for name in files: if not protect_file(target_slice, os.path.join(root, name)): return False return True """ path = target_dir # note: add 1 to count_subdirectory_depth, so we include a mask for the # filenames themselves for x in xrange(0, __count_subdirectory_depth(target_dir) + 1): path += "/*" targetfinal = __path(target_slice, path) # TODO FIXME Should catch FNF, etc. errors if not stork_proper.set_flags(targetfinal, "1"): arizonareport.send_out(1, 'Protect dir: failed to protect ' + targetfinal) arizonareport.send_syslog(arizonareport.INFO, 'Protect dir: failed to protect ' + targetfinal) return False # verification if arizonaconfig.get_option("verifyprotect"): for root, dirs, files in os.walk(target_dir): for name in files: __verify_protected_file(target_slice, os.path.join(root, name)) return True