def check_env_cli_token(args): token = os.environ.get('INFRABOX_CLI_TOKEN', None) if not token: logger.error('INFRABOX_CLI_TOKEN env var must be set') exit(1) args.token = token
def add_project_token(args): check_project_is_set(args) url = args.url + api_projects_endpoint_url + args.project_id + '/tokens' data = { 'description': args.description, #TODO<Steffen> when scope push/pull functionality is implemented, # delete following 2 lines and uncomment next 2 lines 'scope_push': True, 'scope_pull': True #'scope_push': args.scope_push, #'scope_pull': args.scope_pull } response = post(url, data, get_user_headers(), verify=args.ca_bundle, timeout=60) if response.status_code != 200: logger.error(response.json()['message']) return # Print project token to the CLI logger.info('Authentication Token:' + '\nPlease save your token at a secure place. We will not show it to you again.\n') logger.log(response.json()['data']['token'], print_header=False) return response
def delete_project_token(args): if args.id: delete_project_token_by_id(args) elif args.description: delete_project_token_by_description(args) else: logger.error('Please, provide either token id or description.')
def build_and_run(args, job): job_type = job['type'] start_date = datetime.now() logger.info("Starting job %s" % job['name']) if job_type == "docker-compose": build_and_run_docker_compose(args, job) elif job_type == "docker": build_and_run_docker(args, job) elif job_type == "wait": # do nothing pass else: logger.error("Unknown job type") sys.exit(1) end_date = datetime.now() # track as parent parent_jobs.append({ "name": job['name'], "state": 'finished', "start_date": str(start_date), "end_date": str(end_date), "machine_config": job.get('machine_config', None), "depends_on": job.get('depends_on', []) }) logger.info("Finished job %s" % job['name'])
def delete_secret(args): if args.id: delete_secret_by_id(args) elif args.name: delete_secret_by_name(args) else: logger.error('Please, provide either token id or description.')
def delete_project(args): if args.id: delete_project_by_id(args) elif args.name: delete_project_by_name(args) else: logger.error('Please, provide either token id or name.')
def save_user_token(url, cookies_dict): config = local_config.get_config() if config is None: config = {} config.setdefault('remotes', {}) is_new_remote_or_null = False remote_url = get_remote_url(url) if remote_url not in config['remotes'] \ or config['remotes'][remote_url] is None: is_new_remote_or_null = True # Decide what are we going to do if user entered invalid username or password: # either use `current_user_token` if it exists or raise an error allow_login_if_current_user_token_is_set = False user_token = None if 'token' not in cookies_dict: if is_new_remote_or_null or not allow_login_if_current_user_token_is_set: logger.error('Unauthorized: invalid username and/or password.') exit(1) else: user_token = config['remotes'][remote_url]['current_user_token'] else: user_token = cookies_dict['token'] config['current_remote'] = remote_url config['remotes'].setdefault(remote_url, {}) config['remotes'][remote_url]['current_user_token'] = user_token local_config.save_config(config) logger.info('Logged in successfully.')
def login(args): if args.remote_url: args.url = args.remote_url if args.remote_url and not validate_url(args.remote_url): logger.error('Invalid url.') exit(1) infraboxcli.env.check_env_url(args) email = args.email password = args.password if not email: email = raw_input("Email: ") # Don't allow to pass password without email password = None if not password: password = getpass.getpass('Password: '******'account/login' response = post(url, data, cookies_handler=save_user_token, verify=args.ca_bundle) return response
def check_env_project_id(args): project_id = os.environ.get('INFRABOX_CLI_PROJECT_ID', None) if not project_id: logger.error('INFRABOX_CLI_PROJECT_ID env var must be set') exit(1) args.project_id = project_id
def push(args): infraboxcli.env.check_project_root(args) infraboxcli.env.check_env_cli_token(args) if not args.url: logger.error('either --url or INFRABOX_URL must be set') sys.exit(1) if not os.path.isdir(args.project_root): logger.error('%s does not exist or is not a directory' % args.project_root) sys.exit(1) validate_infrabox_file(args) if args.validate_only: return zip_file = zipdir(args) result = upload_zip(args, zip_file) logger.info(result['url']) if args.show_console: show_console(result['build']['id'], args)
def validate(args): if not os.path.isdir(args.project_root): logger.error('%s does not exist or is not a directory' % args.project_root) sys.exit(1) validate_infrabox_json(args) logger.info("No issues found infrabox.json")
def validate_infrabox_json(args): args.project_root = os.path.abspath(args.project_root) infrabox_json_path = os.path.join(args.project_root, 'infrabox.json') if not os.path.isfile(infrabox_json_path): logger.error('%s does not exist' % infrabox_json_path) sys.exit(1) with open(infrabox_json_path, 'r') as f: data = json.load(f) validate_json(data)
def check_env_cli_token(args): token = os.environ.get('INFRABOX_CLI_TOKEN', None) if not token: logger.error('INFRABOX_CLI_TOKEN env var must be set') exit(1) args.token = token t = jwt.decode(token, verify=False) args.project_id = t['project']['id']
def load_infrabox_json(path): if path in LOADED_FILES: logger.error('Recursive included detected with %s' % path) sys.exit(1) LOADED_FILES[path] = path with open(path) as f: data = json.load(f) validate_json(data) return data
def delete_project_by_id(args): infraboxcli.env.check_env_url(args) url = args.url + api_projects_endpoint_url + args.id response = delete(url, headers=get_user_headers(), verify=args.ca_bundle, timeout=60) if response.status_code != 200: logger.error(response.json()['message']) else: logger.info(response.json()['message']) return response
def on_job_update(*args): u = args[0]['data'] job = u['job'] job_id = job['id'] if job_id not in jobs: s.emit('listen:console', job_id) color = colors[len(jobs) % len(colors)] job['color'] = color jobs[job_id] = job global job_name_len job_name_len = max(job_name_len, len(job['name'])) else: jobs[job_id]['state'] = job['state'] # no jobs yet if not jobs: return # check if create job failed if len(jobs) == 1: for job_id in jobs: state = jobs[job_id]['state'] name = jobs[job_id]['name'] if state == 'failure' or state == 'error' or state == 'killed': logger.error("Job %s failed with '%s'" % (name, state)) sys.exit(1) # wait until we received the real jobs if len(jobs) < 2: return rc = 0 active = False for job_id in jobs: state = jobs[job_id]['state'] if state == 'failure' or state == 'error': rc = 1 elif state == 'scheduled' or state == 'queued' or state == 'running': active = True if not active: for job_id in jobs: state = jobs[job_id]['state'] name = jobs[job_id]['name'] if state == 'finished': logger.info("Job %s finished successfully" % state) else: logger.error("Job %s failed with '%s'" % (name, state)) sys.exit(rc)
def get_all_remotes(): try: config = get_config() remotes = config['remotes'].keys() if not remotes: raise Exception('No remotes') return remotes except: logger.error('No available remotes. Please, log in.') exit(1)
def check_env_url(args): if not args.url: current_remote_url = get_current_remote_url() if current_remote_url: args.url = current_remote_url return True error_msg = textwrap.dedent("\ Remote URL is not specified. Either set INFRABOX_URL env var or specify an url via `--url` argument." ) logger.error(error_msg) exit(1)
def list_jobs(args): args.project_root = os.path.abspath(args.project_root) infrabox_json_path = os.path.join(args.project_root, 'infrabox.json') if not os.path.isfile(infrabox_json_path): logger.error('%s does not exist' % infrabox_json_path) sys.exit(1) data = load_infrabox_json(args.infrabox_json) jobs = get_job_list(data, args, base_path=args.project_root) jobs.sort(key=lambda x: x['name']) for j in jobs: print(j['name'])
def list_jobs(args): args.project_root = os.path.abspath(args.project_root) infrabox_json_path = os.path.join(args.project_root, 'infrabox.json') if not os.path.isfile(infrabox_json_path): logger.error('%s does not exist' % infrabox_json_path) sys.exit(1) data = load_infrabox_json(args.infrabox_json) jobs = get_job_list(data, args, base_path=args.project_root) cache = WorkflowCache(args) cache.add_jobs(jobs) cache.print_tree()
def get_secret(args, name): secrets_file = os.path.join(args.project_root, '.infraboxsecrets.json') if not os.path.exists(secrets_file): logger.error("No secrets file found") sys.exit(1) with open(secrets_file) as f: secrets = json.load(f) if name not in secrets: logger.error("%s not found in .infraboxsecrets.json" % name) sys.exit(1) return secrets[name]
def graph(args): check_project_root(args) args.project_root = os.path.abspath(args.project_root) infrabox_file_path = args.infrabox_file_path if not os.path.isfile(infrabox_file_path): logger.error('%s does not exist' % infrabox_file_path) sys.exit(1) data = load_infrabox_file(args.infrabox_file_path) jobs = get_job_list(data, args, infrabox_context=args.project_root) cache = WorkflowCache(args) cache.add_jobs(jobs) cache.print_graph()
def upload_zip(args, f): logger.info('Uploading ...') url = '%s/v1/project/%s/upload' % (args.host, args.project_id) files = {'project.zip': f} headers = {'Authorization': args.token} r = requests.post(url, files=files, headers=headers, timeout=120) d = r.json() if r.status_code != 200: logger.error("Upload failed: %s" % d['message']) sys.exit(1) return d['data']
def validate_infrabox_file(args): args.project_root = os.path.abspath(args.project_root) infrabox_file_path = args.infrabox_file_path if not os.path.isfile(infrabox_file_path): logger.error('%s does not exist' % infrabox_file_path) sys.exit(1) with open(infrabox_file_path, 'r') as f: try: data = json.load(f) except ValueError: f.seek(0) data = yaml.load(f) validate_json(data)
def load_infrabox_file(path): if path in LOADED_FILES: logger.error('Recursive included detected with %s' % path) sys.exit(1) LOADED_FILES[path] = path with open(path) as f: try: data = json.load(f) except ValueError: f.seek(0) data = yaml.load(f) validate_json(data) return data
def download_file(url, filename, args): headers = {'Authorization': 'token ' + args.token} r = requests.get(url, headers=headers, stream=True, timeout=5, verify=args.ca_bundle) if r.status_code == 404: # no file exists return if r.status_code != 200: logger.error("Failed to download output of job") sys.exit(1) with open(filename, 'wb') as f: for chunk in r.iter_content(chunk_size=1024): if chunk: f.write(chunk)
def get_current_user_token(): try: config = local_config.get_config() current_remote = config['current_remote'] if not current_remote: raise Exception('Current remote not set') current_user_token = config['remotes'][current_remote][ 'current_user_token'] if current_user_token is None or not current_user_token: raise Exception('Current user token not found') return current_user_token except: logger.error('Could not load current user token. Please, log in.') exit(1)
def validate_infrabox_file(args): args.project_root = os.path.abspath(args.project_root) infrabox_file_path = args.infrabox_file_path if not os.path.isfile(infrabox_file_path): logger.error('%s does not exist' % infrabox_file_path) sys.exit(1) with open(infrabox_file_path, 'r') as f: try: data = json.load(f) except ValueError: f.seek(0) if (sys.version_info.major == 2) or (yaml.__version__ < "5.1"): data = yaml.load(f) else: data = yaml.load(f, Loader=yaml.FullLoader) validate_json(data)
def get_jobs(self, job_name=None, children=False): if not job_name: return self.jobs jobs = [] for j in self.jobs: if j['name'] == job_name: jobs.append(j) if children: for p in j.get('depends_on', []): jobs += self.get_jobs(p['name'], children) if not jobs: logger.error("job %s not found in infrabox.json" % job_name) sys.exit(1) return jobs
def load_infrabox_file(path): if path in LOADED_FILES: logger.error('Recursive included detected with %s' % path) sys.exit(1) LOADED_FILES[path] = path with open(path) as f: try: data = json.load(f) except ValueError: f.seek(0) if (sys.version_info.major == 2) or (yaml.__version__ < "5.1"): data = yaml.load(f) else: data = yaml.load(f, Loader=yaml.FullLoader) validate_json(data) return data