sys.exit(1) else: logger.error("Extracting metadata and provenance failed") raise shock_id = None # Step 3: Package data files into a single compressed file and send to shock try: workspace_id = object_details["metadata"][0][4] object_version = object_details["metadata"][0][2] name = "KBase_{0}_{1}_{2}".format(workspace_name, object_name, object_version) # gather a list of all files downloaded files = [x for x in handler_utils.gen_recursive_filelist(transform_directory)] # gather total size of all files total = 0 for x in files: total += os.path.getsize(x) # TODO # Workaround for Python 2.7.3 bug 9720, http://bugs.python.org/issue9720 # The awe workers and KBase V26 are at Python 2.7.3 and we should migrate # to the same version of Python that Narrative uses, which is currently # Python 2.7.6, after which this workaround can be removed if total < 2**31: archive_name = os.path.join(working_directory, name) + ".zip" with zipfile.ZipFile(archive_name, 'w', zipfile.ZIP_DEFLATED) as archive: for n in files:
# fill in any optional arguments provided by the user if "optional_fields" in job_details["validate"]["handler_options"]: for k in optional_arguments["validate"]: if k in job_details["validate"]["handler_options"]["optional_fields"]: validation_args[k] = optional_arguments["validate"][k] # fill in any custom options needed to run the script if "custom_options" in job_details["validate"]["handler_options"]: for c in job_details["validate"]["handler_options"]["custom_options"]: if c["type"] != "boolean": validation_args[c["name"]] = c["value"] else: validation_args[c["name"]] = c["value"] # TODO: Fix later with example # gather a list of all files downloaded files = list(handler_utils.gen_recursive_filelist(download_directory)) # get the directories common to those files directories = list() for x in files: path = os.path.dirname(x) if path not in directories: directories.append(path) # TODO: 1) The following logic assumes all files are of the same type # and will not work properly if there are multiple file types # 2) input_directory assumes the script can handle files without # using input_mapping # # validate everything in each directory
"validate"][k] # fill in any custom options needed to run the script if "custom_options" in job_details["validate"][ "handler_options"]: for c in job_details["validate"]["handler_options"][ "custom_options"]: if c["type"] != "boolean": validation_args[c["name"]] = c["value"] else: validation_args[c["name"]] = c[ "value"] # TODO: Fix later with example # gather a list of all files downloaded files = list( handler_utils.gen_recursive_filelist(download_directory)) # get the directories common to those files directories = list() for x in files: path = os.path.dirname(x) if path not in directories: directories.append(path) # TODO: 1) The following logic assumes all files are of the same type # and will not work properly if there are multiple file types # 2) input_directory assumes the script can handle files without # using input_mapping # # validate everything in each directory
"Download from {0} failed.".format(workspace_name), traceback.format_exc(), None) sys.exit(1) else: logger.error("Extracting metadata and provenance failed") raise shock_id = None # Step 3: Package data files into a single compressed file and send to shock try: name = "KBase_{0}_{1}_to_{2}_{3}".format(object_name, kbase_type, external_type, datetime.datetime.utcnow().isoformat()) # gather a list of all files downloaded files = list(handler_utils.gen_recursive_filelist(transform_directory)) archive_name = os.path.join(working_directory,name) + ".zip" archive = zipfile.ZipFile(archive_name, 'w', zipfile.ZIP_DEFLATED, allowZip64=True) for n in files: archive.write(n) archive.close() shock_info = script_utils.upload_file_to_shock(logger = logger, shock_service_url = shock_service_url, filePath = archive_name, token= kb_token) shock_id = shock_info["id"] except Exception, e: logger.debug("Caught exception while creating archive and sending to SHOCK!")