def update_badge(status): if pu.is_repo_empty(): pu.warn('No commit log found. Skipping badge server update.') return remote_url = pu.get_remote_url() if not remote_url: pu.warn('No remote url found. Skipping badge server update.') return baseurl = pu.read_config().get('badge-server-url', 'http://badges.falsifiable.us') org, repo = remote_url.split('/')[-2:] badge_server_url = '{}/{}/{}'.format(baseurl, org, repo) branch_name = check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD'])[:-1] data = { 'timestamp': int(time.time()), 'commit_id': pu.get_head_commit(), 'status': status, 'branch': branch_name, } try: r = requests.post(badge_server_url, data=data) if r.status_code != 201 and r.status_code != 200: pu.warn("Could not create a record on the badge server.") else: pu.info(r.json()['message'], fg="green") except requests.exceptions.RequestException: pu.warn("Could not communicate with the badge server.")
def __init__(self, access_token): super(Figshare, self).__init__() self.baseurl = 'https://api.figshare.com/v2/account/articles' self.params = {'access_token': access_token} r = requests.get(self.baseurl, params=self.params) if r.status_code == 200: depositions = r.json() remote_url = pu.get_remote_url() for deposition in depositions: deposition_id = deposition['id'] url = '{}/{}'.format(self.baseurl, deposition_id) r = requests.get(url, params=self.params) deposition = r.json() if remote_url in deposition['references']: self.deposition = deposition break elif r.status_code == 403: pu.fail( "The access token provided was invalid. " "Please provide a valid access_token." ) else: pu.fail( "Status {}: Could not fetch the depositions." "Try again later.".format(r.status_code) )
def __init__(self, access_token): super(Zenodo, self).__init__() self.baseurl = 'https://zenodo.org/api/deposit/depositions' self.params = {'access_token': access_token} r = requests.get(self.baseurl, params=self.params) try: depositions = r.json() remote_url = pu.get_remote_url() for deposition in depositions: metadata = deposition['metadata'] try: identifiers = metadata['related_identifiers'] if identifiers[0]['identifier'] == remote_url: self.deposition = deposition except KeyError: pass except TypeError: if r.status_code == 401: pu.fail( "The access token provided was invalid. " "Please provide a valid access_token." ) else: pu.fail( "Status {}: Could not fetch the depositions." "Try again later.".format(r.status_code) )
def cli(ctx, service, history, inplace): """Generates markdown for the badge of a service. Currently available services are: CloudLab, Chameleon, Google Cloud Engine and Popper. """ if history and service: raise BadArgumentUsage("--history can't be combined with other flags.") remote_url = pu.get_remote_url() if not remote_url: pu.fail("Failed to infer remote URL for git repository.") org, repo = remote_url.split('/')[-2:] if history: baseurl = pu.read_config().get('badge-server-url', 'http://badges.falsifiable.us') try: r = requests.get('{}/{}/{}/list'.format(baseurl, org, repo)) if r.json(): pu.print_yaml(r.json()) else: pu.info("No records to show") except requests.exceptions.RequestException: pu.fail("Could not communicate with the badge server") sys.exit(0) if not service and inplace: raise BadArgumentUsage("--inplace must be given with --service") if service is None: pu.fail('Please specify a service name.') if service not in services: pu.fail('Unknown service {}.'.format(service)) if service == 'popper': org, repo = remote_url.split('/')[-2:] markup = '[![{}]({})]({})'.format( services[service][0], services[service][1].format(org, repo), services[service][2]) else: markup = '[![{}]({})]({})'.format(*services[service]) if not inplace: pu.info(markup) sys.exit(0) try: os.chdir(pu.get_project_root()) with open('README.md', 'r+') as f: content = f.read() f.seek(0, 0) f.write(markup + '\n\n' + content) except IOError as e: if e.errno == ENOENT: pu.fail("README.md does not exist at the root of the project")
def __init__(self): """The __init__ method of the base class is responsible for checking if there are no unstaged changes in the repository and fail otherwise. The __init__ method of the derived classes should call this method using the super function. The __init__ method of the derived class, however, is responsible for getting the previous relevant deposition from the service url, using the OAuth access token. """ self.remote_url = pu.get_remote_url() if not self.remote_url: pu.fail( "Failed to fetch remote url for git repository. The " "'archive' commands relies on having" )
def update_metadata(self): """Reads required metatdata from .popper.yml and updates the metadata for the record. This will only be called when no previous deposition is found. Args: deposition_id (str): The deposition id whose metadata will be updated """ deposition_id = self.deposition['id'] config = pu.read_config()['metadata'] data = {} required_data = ['title', 'abstract', 'categories', 'keywords'] metadata_is_valid = True for req in required_data: if req not in config: metadata_is_valid = False break if not metadata_is_valid: pu.fail( "Metadata is not defined properly in .popper.yml. " "See the documentation for proper metadata format." ) # Change abstract to description, if present data['description'] = config['abstract'] # Collect the authors in a sorted manner authors = [] for key in sorted(list(config.keys())): if 'author' in key: name, email, affiliation = map( lambda x: x.strip(), config[key].split(',') ) authors.append({'name': name}) if len(authors) != 0: data['authors'] = authors # Change the keywords to a list from string of comma separated values data['tags'] = list( map(lambda x: x.strip(), config['keywords'].split(',')) ) categories = [] try: categories.append(int(config['categories'])) except ValueError: categories = list( map(lambda x: int(x.strip()), config['categories'].split(',')) ) data['categories'] = categories data['references'] = [pu.get_remote_url()] url = '{}/{}'.format(self.baseurl, deposition_id) r = requests.put( url, data=json.dumps(data), params=self.params ) if r.status_code != 205: pu.fail( "Status {}: Failed to update metadata.".format(r.status_code) )
def update_metadata_from_yaml(self): """Reads required metatdata from .popper.yml and updates the metadata for the record. This will only be called when no previous deposition is found. Args: deposition_id (str): The deposition id whose metadata will be updated """ deposition_id = self.deposition['id'] config = pu.read_config()['metadata'] data = {} required_data = ['title', 'upload_type', 'abstract', 'author1'] metadata_is_valid = True for req in required_data: if req not in config: metadata_is_valid = False break if not metadata_is_valid: pu.fail( "Metadata is not defined properly in .popper.yml. " "See the documentation for proper metadata format." ) data['title'] = config['title'] # Change abstract to description, if present data['description'] = '<p>' + config['abstract'] + '</p>' # Collect the authors in a sorted manner creators = [] for key in sorted(list(config.keys())): if 'author' in key: name, email, affiliation = map( lambda x: x.strip(), config[key].split(',') ) if len(name.split()) == 2: name = ', '.join(name.split()[::-1]) creators.append({'name': name, 'affiliation': affiliation}) data['creators'] = creators # Change the keywords to a list from string of comma separated values if 'keywords' in config: data['keywords'] = list( map(lambda x: x.strip(), config['keywords'].split(',')) ) data['related_identifiers'] = [{ "identifier": pu.get_remote_url(), "relation": "hasPart", "scheme": "url" }] data['upload_type'] = config['upload_type'] if config['upload_type'] == 'publication': data['publication_type'] = config['publication_type'] data = {'metadata': data} url = '{}/{}'.format(self.baseurl, deposition_id) r = requests.put( url, data=json.dumps(data), params=self.params, headers={'Content-Type': "application/json"} ) if r.status_code != 200: pu.fail( "Status {}: Failed to update metadata.".format(r.status_code) )
def init_pipeline(self, pipeline_path, stages, envs): """ Generates a README template for the newly initialized pipeline. Args: pipeline_path (str): The absolute path of the pipeline. stages (str): Contains all the stages of the pipeline separated by comma. envs (list): Contains a list of the environments on which the pipeline can be executed. """ pipeline_name = pipeline_path.split('/')[-1] content = """# `{}` <!-- NOTE TO AUTHORS: replace all the **TODO** marks with your own content. --> """ content = content.format(pipeline_name) if (len(stages)) > 0: content += """ **TODO**: insert high-level description of the pipeline. The pipeline consists of the following stages: """ for i, stage in enumerate(stages.split(',')): content += """ * [`{0}`](./{0}.sh). **TODO**: describe `{0}` stage. """ content = content.format(stage) content += """ # Obtaining the pipeline To add this pipeline to your project using the [`popper` CLI tool](https://github.com/systemslab/popper): ```bash cd your-repo popper add {0}/{1}/{2} ``` {3} # Running the pipeline To run the pipeline using the [`popper` CLI tool](https://github.com/systemslab/popper): ```bash cd {1} popper run {2} ``` """ url = pu.get_remote_url() if url: if 'https://' in url: org = os.path.basename(os.path.dirname(url)) else: org = url.split(':')[1] todomark = '' else: org = '<org>' todomark = '**TODO**: replace `org` appropriately.' content = content.format(org, self.repo_name, pipeline_name, todomark) content += """ The pipeline is executed on the following environment(s): `{}`. In addition, the following environment variables are expected: * `<ENV_VAR1>`. Description of variable. * `<ENV_VAR2>`. Another description. > **TODO**: rename or remove ENV_VAR1 and ENV_VAR2 appropiately. For example, the following is an execution with all expected variables: """ content = content.format(','.join(envs)) content += """ ```bash export <ENV_VAR1>=value-for-<ENV_VAR_1> export <ENV_VAR2>=value-for-<ENV_VAR_2> popper run {} ``` > **TODO**: rename or remove `export` statements above appropriately. """ content = content.format(pipeline_name) content += """ # Dependencies **TODO**: add list of dependencies, for example: * Python. * C++ compiler. * [Docker](https://docker.com) (for generating plots). * etc. """ self.write_readme(content, pipeline_path)