def publish_demo_component(self, demo_file_url): # download zip component file root_directory = os.path.join(tempfile.gettempdir(), str(uuid.uuid4())) if not Path.exists(Path(root_directory)): os.mkdir(root_directory) component_name = download_file_from_url(demo_file_url, root_directory) # extract the zip file extracted_file = os.path.join( root_directory, component_name.split(".")[0].split("_")[1]) extract_zip_file(os.path.join(root_directory, component_name), extracted_file) # prepare tar file output_path = os.path.join( root_directory, component_name.split(".")[0].split("_")[1] + '.tar.gz') make_tarfile(source_dir=extracted_file, output_filename=output_path) # upload demo file to s3 and demo attributes key = f"assets/{self.org_id}/{self.service_id}/component.tar.gz" boto_utils.s3_upload_file(filename=output_path, bucket=ASSETS_COMPONENT_BUCKET_NAME, key=key) new_demo_url = f"https://{ASSETS_COMPONENT_BUCKET_NAME}.s3.amazonaws.com/{key}" return new_demo_url
def _extract_zip_and_and_tar(self, org_id, service_id, s3_url): root_directory = ASSET_TEMP_EXTRACT_DIRECTORY zip_directory = root_directory + org_id + "/" + service_id extracted_zip_directory = root_directory + "extracted/" + org_id + "/" + service_id zip_file_name = download_file_from_url(s3_url, zip_directory) zip_file_path = zip_directory + "/" + zip_file_name extracted_file_path = extracted_zip_directory + "/" + zip_file_name.split(".")[0].split("_")[1] extract_zip_file(zip_file_path, extracted_file_path) tar_file_path = extracted_file_path + ".tar.gz" make_tarfile(tar_file_path, extracted_file_path) return tar_file_path
def download_extract_and_upload_proto_files(filename, input_file_extension, bucket_name, download_file_path, upload_file_path): base = os.path.join(TEMP_FILE_DIR, uuid.uuid4().hex) download = f"{base}_{filename}{input_file_extension}" extracted = f"{base}_{filename}" boto_utils.s3_download_file(bucket=bucket_name, key=download_file_path, filename=download) utils.extract_zip_file(zip_file_path=download, extracted_path=extracted) extracted = GenerateStubService.handle_extraction_path( filename=f"{filename}{input_file_extension}", extracted=extracted) boto_utils.upload_folder_contents_to_s3(folder_path=extracted, bucket=bucket_name, key=upload_file_path)
def upload_proto_file_from_hash_to_bucket(self, org_id, service_id, asset_hash): temp_dir = tempfile.gettempdir() base_path = os.path.join(temp_dir, str(uuid.uuid1())) if not os.path.exists(base_path): os.makedirs(base_path) temp_download_path = os.path.join(base_path, 'proto.tar') temp_extraction_path = os.path.join(base_path, 'proto') temp_output_path = os.path.join(base_path, 'proto.tar.gz') io_bytes = self._ipfs_util.read_bytesio_from_ipfs(asset_hash) with open(temp_download_path, 'wb') as outfile: outfile.write(io_bytes.getbuffer()) extract_zip_file(zip_file_path=temp_download_path, extracted_path=temp_extraction_path) make_tarfile(source_dir=temp_extraction_path, output_filename=temp_output_path) self._s3_util.push_file_to_s3(temp_output_path, ASSETS_COMPONENT_BUCKET_NAME, f"assets/{org_id}/{service_id}/proto.tar.gz")
def generate_service_proto_stubs(self, proto_s3_url, stub_s3_url): try: proto_bucket, proto_bucket_key = boto_utils.get_bucket_and_key_from_url( url=proto_s3_url) stub_bucket, stub_bucket_key = boto_utils.get_bucket_and_key_from_url( url=stub_s3_url) to_delete_objects = boto_utils.get_objects_from_s3( bucket=stub_bucket, key=stub_bucket_key) for delete_obj in to_delete_objects: boto_utils.delete_objects_from_s3(bucket=stub_bucket, key=delete_obj["Key"], key_pattern=stub_bucket_key) proto_objects = boto_utils.get_objects_from_s3( bucket=proto_bucket, key=proto_bucket_key) if len(proto_objects) == 0: raise Exception("Proto file is not found") s3_key_pattern = re.compile(PROTO_DIRECTORY_REGEX_PATTERN) for obj in proto_objects: if re.match(s3_key_pattern, obj['Key']): # File details and temp locations filename_with_key, file_extension = os.path.splitext( obj['Key']) temp_path = os.path.join(TEMP_FILE_DIR, uuid.uuid4().hex + '_proto_') temp_downloaded_path = temp_path + file_extension temp_extracted_path = temp_path + 'extracted' temp_generated_stub_location = os.path.join( temp_extracted_path, 'stubs') # Download and unzip if file_extension == '.zip': boto_utils.s3_download_file( bucket=proto_bucket, key=obj['Key'], filename=temp_downloaded_path) utils.extract_zip_file( zip_file_path=temp_downloaded_path, extracted_path=temp_extracted_path) # Generate stubs proto_location = None for filename in os.listdir(temp_extracted_path): if filename.endswith(".proto"): proto_location = os.path.join( temp_extracted_path, filename) filename_without_extn = os.path.splitext( filename)[0] if proto_location: for language in PROTO_STUB_TARGET_LANGUAGES: result, file_location = self.generate_stubs( entry_path=Path(temp_extracted_path), codegen_dir=os.path.join( temp_generated_stub_location, language), target_language=language, proto_file_path=Path(proto_location), proto_file_name=filename_without_extn) else: raise Exception("Proto file is not found") # Zip and upload generated files for folder in os.listdir(temp_generated_stub_location): file_to_be_uploaded = os.path.join( temp_generated_stub_location, folder) upload_file_name = f"{folder}{file_extension}" utils.zip_file( source_path=Path(file_to_be_uploaded), zipped_path=os.path.join( temp_generated_stub_location, upload_file_name)) boto_utils.s3_upload_file( filename=file_to_be_uploaded + file_extension, bucket=stub_bucket, key=stub_bucket_key + '/' + upload_file_name) except Exception as error: raise error