def read_config( root: Path, mf_file: Optional[Union[Path, bytes, str, dict]] = None) -> Optional[Project]: def _load_json(data): json_ = json.load(data) if hasattr(data, 'read') else json.loads(data) validate(instance=json_, schema=_SCHEMA) assert len(json_) > 0, 'json is an empty object' return json_ if mf_file is not None: if isinstance(mf_file, bytes) or isinstance(mf_file, str): return Project(_load_json(mf_file), root) elif isinstance(mf_file, dict): return Project(mf_file, root) conf_file_path = Path( mf_file) if mf_file else root / DEFAULT_CONFIG_FILE_NAME if not conf_file_path.exists(): LOGGER.warning("config file not exists [%s]", conf_file_path) return None with open(conf_file_path, 'r') as f: cfg = _load_json(f) file = Project(cfg, root) return file
def ref(component_name, asset: AssetBase): key = f'{mf_file.repository}/{build.git_branch}/{build.git_sha}/{component_name}/{asset.filename}' url = f'gs://{mf_file.bucket}/{key}' path = asset.path LOGGER.debug("[%s] discovering asset %s", component_name, path) if key not in assets: assets[key] = asset.path return url
def cli(ctx, format, config, debug): ctx.ensure_object(dict) LOGGER.setLevel(logging.INFO) if debug: LOGGER.setLevel(logging.DEBUG) root_dir = __current_dir() ctx.obj['is_debug'] = debug ctx.obj['root_dir'] = root_dir ctx.obj[PROJECT_OPT] = read_config( root_dir, mf_file=Path(config) if config else None) ctx.obj[FORMAT_OPT] = format
def update(self, build: BuildInfo, project_obj: Project, upload: bool = True): """ Compare and update blob by generation. Trying until success. :param build: build info :param upload: to do uploading of a content, (for debug) :param project_obj: """ refs_upload_done = False while True: current_manifest, assets = _merge_new_manifest( self._original_content, build, project_obj) if not upload: return current_manifest # Upload assets first and update manifest only after it. if not refs_upload_done: refs_upload_done = True for key, file in assets.items(): LOGGER.info("Uploading %s [%s]", file, key) self._storage.upload(project_obj.bucket, key, file.absolute()) LOGGER.info("Uploading done for %d objects", len(assets)) manifest_json = json.dumps(current_manifest).encode('utf-8') ok, err_resp = self._storage.cas_blob(data=manifest_json, generation=self._version, bucket_name=self._bucket, blob_name=self._blob_key) if ok: LOGGER.debug("new updated manifest.json \n%s", manifest_json) return current_manifest elif err_resp is None: # TODO any logic to resolve conflict in the content ? LOGGER.warning("manifest have already been modified, retry...") self.__fetch_manifest() else: LOGGER.error("update failed [%s] %s", err_resp.status_code, err_resp.text) raise Exception('GoogleStorage update failed')
def fetch_manifest(self) -> Tuple[str, str, dict]: """ Fetch manifest from GS bucket. Remember blob's generation for concurrency control. :return: """ bucket = self._gs_bucket key = f'{self._semantic_name}/{MANIFEST_NAME}' manifest_blob: storage.bucket.Blob = bucket.get_blob(key) if not manifest_blob: LOGGER.warning( f'{MANIFEST_NAME} not exists by gs://{bucket.name}/{key}, create empty' ) empty_manifest: str = json.dumps({"@spec": 1, "@ns": {}}) ok, err = self.cas_blob(empty_manifest.encode('utf-8'), generation=0, bucket_name=bucket.name, blob_name=key) if ok or err is None: # manifest has just created manifest_blob = bucket.get_blob(key) else: LOGGER.error("Could not create manifest %s", err.content) raise Exception("creating %s failed" % key) str_ = manifest_blob.download_as_string() json_ = json.loads(str_) LOGGER.debug('Fetching manifest -- gs://%s/%s#%d', manifest_blob.bucket.name, manifest_blob.name, manifest_blob.generation) return manifest_blob.name, manifest_blob.generation, json_
def __init__(self, bucket, semantic_name): with warnings.catch_warnings(): warnings.simplefilter("ignore") credentials, _ = google.auth.default() self._storage_client = storage.Client(credentials=credentials) self._credentials = credentials self._semantic_name = semantic_name self._gs_bucket: storage.Bucket = self._storage_client.lookup_bucket( bucket) if self._gs_bucket is None: LOGGER.error("bucket %s not exists", bucket) raise RuntimeError('not_found') if not self._gs_bucket.versioning_enabled: msg = f"Object Versioning for bucket [ {self._gs_bucket.name} ] is not enabled. " \ "This can lead to a potential loss of updates while being published by multiple clients. " \ "Please enable it for further usage. \n" \ f"Simplest way is to fix it -- gsutil versioning set on gs://{self._gs_bucket.name} \n" \ "More information - https://cloud.google.com/storage/docs/gsutil/addlhelp/ObjectVersioningandConcurrencyControl" raise RuntimeError(msg)
def get(ctx, bucket, repo, app, branch, destination): """ Download all found binaries. """ ctx.ensure_object(dict) project = ctx.obj[PROJECT_OPT] if project is None and (bucket is None and repo is None): click.echo( f'Config file not found in [{ctx.obj["root_dir"]}] and --bucket not specifies.\n' 'Please specify --bucket and --repo parameters or --config file path', err=True) return 1 manifest = Manifest(project.bucket, project.repository) binaries_list = manifest.search(branch_name=branch, app_name=app) for bin in binaries_list: manifest.download(bin, dest=destination) LOGGER.info("Downloading... %s", bin['url']) pass
def put(ctx, git_branch, git_commit, build_id, no_upload): """ Scan current folder for .mf.json file that contains description of current repository. Based on configuration upload all found binaries into gcs and update manifest.json with information about success build. """ ctx.ensure_object(dict) root_dir = ctx.obj['root_dir'] project = ctx.obj[PROJECT_OPT] assert git_branch and len( str(git_branch)) > 0, '--git_branch have to be non empty string' assert git_commit and len( str(git_commit)) > 0, '--git_commit have to be non empty string' assert build_id and len( str(build_id)) > 0, '--build_id have to be non empty string' if project is None: click.echo(f'config file not found in {root_dir}', err=True) return 1 LOGGER.debug("Current project %s", project) if no_upload: click.echo('Content wont be uploaded...') build_info = BuildInfo(git_branch=git_branch, git_sha=git_commit, build_id=build_id, date=datetime.datetime.utcnow()) actual_manifest = Manifest(project.bucket, project.repository) new = actual_manifest.update(build_info, project, upload=not no_upload) if no_upload: click.echo(json.dumps(new, indent=4))
def main(): parser = argparse.ArgumentParser() parser.add_argument("--git_sha", help="A git revision checksum", required=True) parser.add_argument("--git_branch", help="Current git branch", required=True) parser.add_argument("--build_id", help="Current GCB id", required=True) parser.add_argument("--upload", help="Do not upload file", default=False, action='store_true') parser.add_argument("--mf_file", help="Path to config file, that describes a repository. " \ "By default search for {} in the root directory for current script" .format(DEFAULT_CONFIG_FILE_NAME), required=False) args = parser.parse_args() try: root_dir = Path('.').absolute() LOGGER.info(f'Set current project root dir ({root_dir})') conf_file = config(root_dir, args.mf_file) build_info = BuildInfo(git_branch=args.git_branch, git_sha=args.git_sha, date=datetime.datetime.utcnow(), build_id=args.build_id) actual_manifest = Manifest(conf_file.bucket, conf_file.repository) new_content = actual_manifest.update(build_info, conf_file, upload=args.upload) if not args.upload: LOGGER.info("skipping upload...") print(json.dumps(new_content, indent=4)) return 0 except Exception as ex: LOGGER.error("error: %s", ex) return 1
def config(root: Path, mf_file=Optional[Union[Path, bytes, str]]): def _load(data): json_ = json.load(data) if hasattr(data, 'read') else json.loads(data) validate(instance=json_, schema=_SCHEMA) assert len(json_) > 0, 'json is an empty object' return json_ if mf_file is not None and (isinstance(mf_file, bytes) or isinstance(mf_file, str)): return Project(_load(mf_file), root) conf_file_path = Path( mf_file) if mf_file else root / DEFAULT_CONFIG_FILE_NAME if not conf_file_path.exists(): LOGGER.error("config file not exists [%s]", conf_file_path) raise Exception('config file not exists %s' % conf_file_path) LOGGER.info("reading config file [%s]", conf_file_path) with open(conf_file_path, 'r') as f: cfg = _load(f) file = Project(cfg, root) LOGGER.info("loaded config: %s", file) return file
def __current_dir() -> Path: cur = Path('.').absolute() LOGGER.debug(f'Set current project root dir ({cur})') return cur