def build_file_tar_path(bundle, file, parent_dir): # if parentDir is a no empty path or already ending with os.sep, add os.sep at the end if parent_dir and not parent_dir.endswith(os.sep): parent_dir = parent_dir + os.sep return constants.FOLDER_BUNDLES + os.sep + generics_utils.remove_URI_forbidden_char(bundle["name"]) \ + os.sep + generics_utils.remove_URI_forbidden_char(bundle["version"]) \ + os.sep + parent_dir \ + generics_utils.remove_URI_forbidden_char(ntpath.basename(file["name"]))
def recursivelyAppendToArchive(bundle, files, parentDir, checkList, archive_files): #must save the filepath before changing it after archive filePathBeforeTar = files["source"] if not "tag" in files or ("tag" in files and files["tag"] != "ospkg"): if files["source"] not in checkList: #add the source path to the check list checkList.append(files["source"]) #if parentDir is a no empty path, add os.sep after. Else keep it as "" if parentDir: parentDir = parentDir + os.sep #add to list of file to tar file_tar_path=constants.FOLDER_BUNDLES + os.sep + generics_utils.remove_URI_forbidden_char(bundle["name"]) + os.sep + generics_utils.remove_URI_forbidden_char(bundle["version"]) + os.sep + parentDir + generics_utils.remove_URI_forbidden_char(ntpath.basename(files["source"])) archive_files.append([file_tar_path,files["source"]]) #changing source path to archive related source path files["source"]=file_tar_path else: raise ValueError("Cannot have identical files in the bundles section: " + filePathBeforeTar) if "files" in files: for subFiles in files["files"]: checkList,archive_files = recursivelyAppendToArchive(bundle, subFiles, parentDir + ntpath.basename(files["source"]), checkList, archive_files) if (not "tag" in files or files["tag"] != "ospkg") and os.path.isdir(filePathBeforeTar): checkList,archive_files = processFilesFromFolder(bundle, files, filePathBeforeTar, parentDir + ntpath.basename(filePathBeforeTar), checkList, archive_files) return checkList, archive_files
def processFilesFromFolder(bundle, files, filePath, parentDir, checkList, archive_files): for subFiles in os.listdir(filePath): subFilesDict = dict({ "name": ntpath.basename(subFiles), "source": filePath + os.sep + ntpath.basename(subFiles), "files": [] }) #must save the filepath before changing it after archive subFilePathBeforeTar = subFilesDict["source"] if subFilesDict["source"] not in checkList: #add the source path to the check list checkList.append(subFilesDict["source"]) #if parentDir is a no empty path, add os.sep after. Else keep it as "" if parentDir: parentDir = parentDir + os.sep #add to list of file to tar file_tar_path = constants.FOLDER_BUNDLES + os.sep + generics_utils.remove_URI_forbidden_char( bundle["name"] ) + os.sep + generics_utils.remove_URI_forbidden_char( bundle["version"] ) + os.sep + parentDir + generics_utils.remove_URI_forbidden_char( ntpath.basename(subFilesDict["source"])) archive_files.append([file_tar_path, subFilesDict["source"]]) #changing source path to archive related source path and add it to files section of parent folder subFilesDict["source"] = file_tar_path files["files"].append(subFilesDict) if os.path.isdir(subFilePathBeforeTar): checkList, archive_files = processFilesFromFolder( bundle, subFilesDict, subFilePathBeforeTar, parentDir + ntpath.basename(subFilePathBeforeTar), checkList, archive_files) return checkList, archive_files
def processFilesFromFolder(bundle, files, filePath, parentDir, checkList, archive_files): for subFiles in os.listdir(filePath): subFilesDict = dict({"name" : ntpath.basename(subFiles), "source" : filePath + os.sep + ntpath.basename(subFiles), "files" : []}) #must save the filepath before changing it after archive subFilePathBeforeTar = subFilesDict["source"] if subFilesDict["source"] not in checkList: #add the source path to the check list checkList.append(subFilesDict["source"]) #if parentDir is a no empty path, add os.sep after. Else keep it as "" if parentDir: parentDir = parentDir + os.sep #add to list of file to tar file_tar_path=constants.FOLDER_BUNDLES + os.sep + generics_utils.remove_URI_forbidden_char(bundle["name"]) + os.sep + generics_utils.remove_URI_forbidden_char(bundle["version"]) + os.sep + parentDir + generics_utils.remove_URI_forbidden_char(ntpath.basename(subFilesDict["source"])) archive_files.append([file_tar_path,subFilesDict["source"]]) #changing source path to archive related source path and add it to files section of parent folder subFilesDict["source"] = file_tar_path files["files"].append(subFilesDict) if os.path.isdir(subFilePathBeforeTar): checkList,archive_files = processFilesFromFolder(bundle, subFilesDict, subFilePathBeforeTar, parentDir + ntpath.basename(subFilePathBeforeTar), checkList, archive_files) return checkList, archive_files
def do_create(self, args): try: # add arguments doParser = self.arg_create() doArgs = doParser.parse_args(shlex.split(args)) # if the help command is called, parse_args returns None object if not doArgs: return 2 # -- # get file (remote or local) file = generics_utils.get_file(doArgs.file) if file is None: return 2 # validate parsing and mandatory fields template = validate(file) if template is None: return 2 isJsonFile = check_extension_is_json(file) if "builders" in template: template["builders"] = None archive_files = [] if "config" in template["stack"]: for config in template["stack"]["config"]: # add to list of file to tar if "source" in config: file_tar_path = ( constants.FOLDER_CONFIGS + os.sep + generics_utils.remove_URI_forbidden_char(ntpath.basename(config["source"])) ) archive_files.append([file_tar_path, config["source"]]) # changing source path to archive related source path config["source"] = file_tar_path else: printer.out("No source file found in config", printer.ERROR) return 2 try: checkList = [] if "bundles" in template["stack"]: for bundle in template["stack"]["bundles"]: if "files" in bundle: for files in bundle["files"]: # if it's a directory if os.path.isdir(files["source"]) and ntpath.basename(files["source"]) not in checkList: # add the source path to the check list checkList.append(ntpath.basename(files["source"])) # creating an archive and add it to the file_tar_path output_filename = files["name"] + ".tar.gz" file_tar_path = ( constants.FOLDER_BUNDLES + os.sep + generics_utils.remove_URI_forbidden_char(bundle["name"]) + os.sep + generics_utils.remove_URI_forbidden_char(bundle["version"]) + os.sep + generics_utils.remove_URI_forbidden_char(output_filename) ) source_dir = files["source"] with tarfile.open(output_filename, "w:gz") as tar: tar.add(source_dir, arcname=os.path.basename(source_dir)) tar.close archive_files.append([file_tar_path, output_filename]) # changing the name of the file files["name"] = output_filename # changing source path to archive related source path files["source"] = file_tar_path elif ( not os.path.isdir(files["source"]) and ntpath.basename(files["source"]) not in checkList ): # add the source path to the check list checkList.append(ntpath.basename(files["source"])) # add to list of file to tar file_tar_path = ( constants.FOLDER_BUNDLES + os.sep + generics_utils.remove_URI_forbidden_char(bundle["name"]) + os.sep + generics_utils.remove_URI_forbidden_char(bundle["version"]) + os.sep + generics_utils.remove_URI_forbidden_char(ntpath.basename(files["source"])) ) archive_files.append([file_tar_path, files["source"]]) # changing source path to archive related source path files["source"] = file_tar_path else: printer.out( "found two files with the same source path in the bundles section", printer.ERROR, ) return 2 else: printer.out("No files section found for bundle", printer.ERROR) return 2 if "license" in bundle and "source" in bundle["license"]: # add to list of file to tar file_tar_path = ( constants.FOLDER_BUNDLES + os.sep + generics_utils.remove_URI_forbidden_char(bundle["name"]) + os.sep + generics_utils.remove_URI_forbidden_char(ntpath.basename(bundle["license"]["source"])) ) archive_files.append([file_tar_path, bundle["license"]["source"]]) # changing source path to archive related source path bundle["license"]["source"] = file_tar_path except KeyError as e: printer.out("Error in bundle", printer.ERROR) return 2 if "source_logo" in template["stack"]: # add to list of file to tar file_tar_path = ( constants.FOLDER_LOGO + os.sep + generics_utils.remove_URI_forbidden_char(ntpath.basename(template["stack"]["source_logo"])) ) archive_files.append([file_tar_path, template["stack"]["source_logo"]]) # changing source path to archive related source path template["stack"]["source_logo"] = file_tar_path if os.path.isdir(constants.TMP_WORKING_DIR): # delete tmp dir shutil.rmtree(constants.TMP_WORKING_DIR) os.mkdir(constants.TMP_WORKING_DIR) if isJsonFile: file = open(constants.TMP_WORKING_DIR + os.sep + constants.TEMPLATE_JSON_NEW_FILE_NAME, "w") json.dump(template, file, indent=4, separators=(",", ": ")) file.close() archive_files.append( [ constants.TEMPLATE_JSON_FILE_NAME, constants.TMP_WORKING_DIR + os.sep + constants.TEMPLATE_JSON_NEW_FILE_NAME, ] ) else: file = open(constants.TMP_WORKING_DIR + os.sep + constants.TEMPLATE_YAML_NEW_FILE_NAME, "w") yaml.safe_dump(template, file, default_flow_style=False, indent=2, explicit_start="---") file.close() archive_files.append( [ constants.TEMPLATE_YAML_FILE_NAME, constants.TMP_WORKING_DIR + os.sep + constants.TEMPLATE_YAML_NEW_FILE_NAME, ] ) if doArgs.archive_path is not None: tar_path = doArgs.archive_path else: tar_path = constants.TMP_WORKING_DIR + os.sep + "archive.tar.gz" tar = tarfile.open(tar_path, "w|gz") for file_tar_path, file_global_path in archive_files: file = generics_utils.get_file( file_global_path, constants.TMP_WORKING_DIR + os.sep + os.path.basename(file_global_path) ) if file is None: printer.out("Downloaded bunlde file not found", printer.ERROR) return 2 tar.add(file, arcname=file_tar_path) tar.close() # arhive is created, doing import r = self.import_stack(tar_path, False, doArgs.force, doArgs.rbundles, doArgs.use_major) if r != 0: return r # delete tmp dir shutil.rmtree(constants.TMP_WORKING_DIR) return 0 except OSError as e: printer.out("OSError: " + str(e), printer.ERROR) except IOError as e: printer.out("File error: " + str(e), printer.ERROR) except ArgumentParserError as e: printer.out("In Arguments: " + str(e), printer.ERROR) self.help_create() except Exception as e: return handle_uforge_exception(e)
def do_create(self, args): try: #add arguments doParser = self.arg_create() doArgs = doParser.parse_args(shlex.split(args)) #if the help command is called, parse_args returns None object if not doArgs: return 2 #-- #get file (remote or local) file = generics_utils.get_file(doArgs.file) if file is None: return 2 bundle = validate_bundle(file) if bundle is None: return 2 isJsonFile= check_extension_is_json(file) archive_files=[] try: checkList = [] if "files" in bundle: for files in bundle["files"]: checkList,archive_files = recursivelyAppendToArchive(bundle, files, "", checkList, archive_files) else: printer.out("No files section found for bundle", printer.ERROR) return 2 if "license" in bundle and "source" in bundle["license"]: #add to list of file to tar file_tar_path=constants.FOLDER_BUNDLES + os.sep + generics_utils.remove_URI_forbidden_char(bundle["name"]) + os.sep + generics_utils.remove_URI_forbidden_char(ntpath.basename(bundle["license"]["source"])) archive_files.append([file_tar_path,bundle["license"]["source"]]) #changing source path to archive related source path bundle["license"]["source"]=file_tar_path if "sourceLogo" in bundle: #add to list of file to tar file_tar_path=constants.FOLDER_BUNDLES + os.sep + generics_utils.remove_URI_forbidden_char(bundle["name"]) + os.sep + generics_utils.remove_URI_forbidden_char(ntpath.basename(bundle["sourceLogo"])) archive_files.append([file_tar_path,bundle["sourceLogo"]]) #changing source path to archive related source path bundle["sourceLogo"]=file_tar_path except ValueError as ve: printer.out(str(ve), printer.ERROR) return 2 except KeyError as e: printer.out("Error in bundle", printer.ERROR) return 2 if os.path.isdir(constants.TMP_WORKING_DIR): #delete tmp dir shutil.rmtree(constants.TMP_WORKING_DIR) os.mkdir(constants.TMP_WORKING_DIR) if isJsonFile: fileName = constants.BUNDLE_JSON_FILE_NAME newFileName = constants.BUNDLE_JSON_NEW_FILE_NAME else: fileName = constants.BUNDLE_YAML_FILE_NAME newFileName = constants.BUNDLE_YAML_NEW_FILE_NAME archive_files = dump_data_in_file(bundle, archive_files, isJsonFile, fileName, newFileName) if doArgs.archive_path is not None: tar_path = doArgs.archive_path else: tar_path = constants.TMP_WORKING_DIR+os.sep+"archive.tar.gz" tar = tarfile.open(tar_path, "w|gz") for file_tar_path,file_global_path in archive_files: if not os.path.isdir(file_global_path): file = generics_utils.get_file(file_global_path, constants.TMP_WORKING_DIR+os.sep+os.path.basename(file_global_path)) if file is None: printer.out("Downloaded bunlde file not found", printer.ERROR) return 2 tar.add(file, arcname=file_tar_path) else: tar.add(file_global_path, arcname=file_tar_path) tar.close() #arhive is created, doing import r = self.import_bundle(tar_path, False) if r != 0: return r #delete tmp dir shutil.rmtree(constants.TMP_WORKING_DIR) return 0 except OSError as e: printer.out("OSError: "+str(e), printer.ERROR) except IOError as e: printer.out("File error: "+str(e), printer.ERROR) except ArgumentParserError as e: printer.out("In Arguments: "+str(e), printer.ERROR) self.help_create() except Exception as e: return handle_uforge_exception(e)
def do_create(self, args): try: #add arguments doParser = self.arg_create() try: doArgs = doParser.parse_args(args.split()) except SystemExit as e: return #-- #get json file (remote or local) file = generics_utils.get_file(doArgs.file) if file is None: return 2 template=validate_json_file(file) if template is None: return 2 if "builders" in template: template["builders"]=None archive_files=[] if "config" in template["stack"]: for config in template["stack"]["config"]: #add to list of file to tar if "source" in config: file_tar_path=constants.FOLDER_CONFIGS + os.sep + generics_utils.remove_URI_forbidden_char(ntpath.basename(config["source"])) archive_files.append([file_tar_path,config["source"]]) #changing source path to archive related source path config["source"]=file_tar_path else: printer.out("No source file found in config", printer.ERROR) return 2 try: if "bundles" in template["stack"]: for bundle in template["stack"]["bundles"]: if "files" in bundle: for files in bundle["files"]: #add to list of file to tar file_tar_path=constants.FOLDER_BUNDLES + os.sep + generics_utils.remove_URI_forbidden_char(bundle["name"]) + os.sep + generics_utils.remove_URI_forbidden_char(bundle["version"]) + os.sep + generics_utils.remove_URI_forbidden_char(ntpath.basename(files["source"])) archive_files.append([file_tar_path,files["source"]]) #changing source path to archive related source path files["source"]=file_tar_path else: printer.out("No files section found for bundle", printer.ERROR) return 2 if "license" in bundle and "source" in bundle["license"]: #add to list of file to tar file_tar_path=constants.FOLDER_BUNDLES + os.sep + generics_utils.remove_URI_forbidden_char(bundle["name"]) + os.sep + generics_utils.remove_URI_forbidden_char(ntpath.basename(bundle["license"]["source"])) archive_files.append([file_tar_path,bundle["license"]["source"]]) #changing source path to archive related source path bundle["license"]["source"]=file_tar_path except KeyError as e: printer.out("Error in bundle", printer.ERROR) return 2 if "source_logo" in template["stack"]: #add to list of file to tar file_tar_path=constants.FOLDER_LOGO + os.sep + generics_utils.remove_URI_forbidden_char(ntpath.basename(template["stack"]["source_logo"])) archive_files.append([file_tar_path,template["stack"]["source_logo"]]) #changing source path to archive related source path template["stack"]["source_logo"]=file_tar_path if os.path.isdir(constants.TMP_WORKING_DIR): #delete tmp dir shutil.rmtree(constants.TMP_WORKING_DIR) os.mkdir(constants.TMP_WORKING_DIR) file = open(constants.TMP_WORKING_DIR + os.sep + constants.TEMPLATE_JSON_NEW_FILE_NAME, "w") json.dump(template, file, indent=4, separators=(',', ': ')) file.close() archive_files.append([constants.TEMPLATE_JSON_FILE_NAME, constants.TMP_WORKING_DIR+ os.sep +constants.TEMPLATE_JSON_NEW_FILE_NAME]) if doArgs.archive_path is not None: tar_path = doArgs.archive_path else: tar_path = constants.TMP_WORKING_DIR+os.sep+"archive.tar.gz" tar = tarfile.open(tar_path, "w|gz") for file_tar_path,file_global_path in archive_files: file = generics_utils.get_file(file_global_path, constants.TMP_WORKING_DIR+os.sep+os.path.basename(file_global_path)) if file is None: printer.out("Downloaded bunlde file not found", printer.ERROR) return 2 tar.add(file, arcname=file_tar_path) tar.close() #arhive is created, doing import r = self.import_stack(tar_path, False, doArgs.force, doArgs.rbundles, doArgs.use_major) if r != 0: return r #delete tmp dir shutil.rmtree(constants.TMP_WORKING_DIR) return 0 except OSError as e: printer.out("OSError: "+str(e), printer.ERROR) except IOError as e: printer.out("File error: "+str(e), printer.ERROR) except ArgumentParserError as e: printer.out("In Arguments: "+str(e), printer.ERROR) self.help_create() except Exception as e: return handle_uforge_exception(e)
def do_create(self, args): try: #add arguments doParser = self.arg_create() doArgs = doParser.parse_args(shlex.split(args)) #if the help command is called, parse_args returns None object if not doArgs: return 2 #-- #get file (remote or local) file = generics_utils.get_file(doArgs.file) if file is None: return 2 #validate parsing and mandatory fields template = validate(file) if template is None: return 2 isJsonFile = check_extension_is_json(file) if "builders" in template: template["builders"] = None archive_files = [] if "config" in template["stack"]: for config in template["stack"]["config"]: #add to list of file to tar if "source" in config: file_tar_path = constants.FOLDER_CONFIGS + os.sep + generics_utils.remove_URI_forbidden_char( ntpath.basename(config["source"])) archive_files.append([file_tar_path, config["source"]]) #changing source path to archive related source path config["source"] = file_tar_path else: printer.out("No source file found in config", printer.ERROR) return 2 try: checkList = [] if "bundles" in template["stack"]: for bundle in template["stack"]["bundles"]: if "files" in bundle: for files in bundle["files"]: checkList, archive_files = recursivelyAppendToArchive( bundle, files, "", checkList, archive_files) else: printer.out("No files section found for bundle", printer.ERROR) return 2 if "license" in bundle and "source" in bundle[ "license"]: #add to list of file to tar file_tar_path = constants.FOLDER_BUNDLES + os.sep + generics_utils.remove_URI_forbidden_char( bundle["name"] ) + os.sep + generics_utils.remove_URI_forbidden_char( ntpath.basename(bundle["license"]["source"])) archive_files.append( [file_tar_path, bundle["license"]["source"]]) #changing source path to archive related source path bundle["license"]["source"] = file_tar_path if "sourceLogo" in bundle: #add to list of file to tar file_tar_path = constants.FOLDER_BUNDLES + os.sep + generics_utils.remove_URI_forbidden_char( bundle["name"] ) + os.sep + generics_utils.remove_URI_forbidden_char( ntpath.basename(bundle["sourceLogo"])) archive_files.append( [file_tar_path, bundle["sourceLogo"]]) #changing source path to archive related source path bundle["sourceLogo"] = file_tar_path except ValueError as ve: printer.out(str(ve), printer.ERROR) return 2 except KeyError as e: printer.out("Error in bundle", printer.ERROR) return 2 if "source_logo" in template["stack"]: #add to list of file to tar file_tar_path = constants.FOLDER_LOGO + os.sep + generics_utils.remove_URI_forbidden_char( ntpath.basename(template["stack"]["source_logo"])) archive_files.append( [file_tar_path, template["stack"]["source_logo"]]) #changing source path to archive related source path template["stack"]["source_logo"] = file_tar_path if os.path.isdir(constants.TMP_WORKING_DIR): #delete tmp dir shutil.rmtree(constants.TMP_WORKING_DIR) os.mkdir(constants.TMP_WORKING_DIR) if isJsonFile: fileName = constants.TEMPLATE_JSON_FILE_NAME newFileName = constants.TEMPLATE_JSON_NEW_FILE_NAME else: fileName = constants.TEMPLATE_YAML_FILE_NAME newFileName = constants.TEMPLATE_YAML_NEW_FILE_NAME archive_files = dump_data_in_file(template, archive_files, isJsonFile, fileName, newFileName) if doArgs.archive_path is not None: tar_path = doArgs.archive_path else: tar_path = constants.TMP_WORKING_DIR + os.sep + "archive.tar.gz" tar = tarfile.open(tar_path, "w|gz") for file_tar_path, file_global_path in archive_files: if not os.path.isdir(file_global_path): file = generics_utils.get_file( file_global_path, constants.TMP_WORKING_DIR + os.sep + os.path.basename(file_global_path)) if file is None: printer.out("Downloaded bunlde file not found", printer.ERROR) return 2 tar.add(file, arcname=file_tar_path) else: tar.add(file_global_path, arcname=file_tar_path) tar.close() #arhive is created, doing import r = self.import_stack(tar_path, False, doArgs.force, doArgs.rbundles, doArgs.use_major) if r != 0: return r #delete tmp dir shutil.rmtree(constants.TMP_WORKING_DIR) return 0 except OSError as e: printer.out("OSError: " + str(e), printer.ERROR) except IOError as e: printer.out("File error: " + str(e), printer.ERROR) except ArgumentParserError as e: printer.out("In Arguments: " + str(e), printer.ERROR) self.help_create() except Exception as e: return handle_uforge_exception(e)