def main(*args): """Main method of artman.""" # If no arguments are sent, we are using the entry point; derive # them from sys.argv. if not args: args = sys.argv[1:] # Get to a normalized set of arguments. flags = parse_args(*args) user_config = loader.read_user_config(flags.user_config) _adjust_root_dir(flags.root_dir) pipeline_name, pipeline_kwargs = normalize_flags(flags, user_config) if flags.local: try: pipeline = pipeline_factory.make_pipeline(pipeline_name, **pipeline_kwargs) # Hardcoded to run pipeline in serial engine, though not necessarily. engine = engines.load( pipeline.flow, engine='serial', store=pipeline.kwargs) engine.run() except: logger.error(traceback.format_exc()) sys.exit(32) finally: _change_owner(flags, pipeline_name, pipeline_kwargs) else: support.check_docker_requirements(flags.image) # Note: artman currently won't work if input directory doesn't contain # common-protos. logger.info('Running artman command in a Docker instance.') _run_artman_in_docker(flags)
def download(url, directory): filename = os.path.basename(urllib.parse.urlsplit(url).path) if not os.path.isfile(os.path.join(directory, filename)): subprocess.check_call(['mkdir', '-p', directory]) logger.info('Downloading file from URL: %s' % url) subprocess.check_call(['curl', '-o', directory + filename, '-sL', url]) return directory + filename
def execute(self, src_proto_path, import_proto_path, output_dir, api_name, api_version, organization_name, toolkit_path, desc_proto_path=None, excluded_proto_path=[]): desc_proto_path = desc_proto_path or [] desc_protos = list( protoc_utils.find_protos(src_proto_path + desc_proto_path, excluded_proto_path)) header_proto_path = import_proto_path + desc_proto_path header_proto_path.extend(src_proto_path) desc_out_file = task_utils.api_full_name(api_name, api_version, organization_name) + '.desc' logger.info('Compiling descriptors for {0}'.format(desc_protos)) self.exec_command(['mkdir', '-p', output_dir]) # DescGen don't use _group_by_dirname right now because # - it doesn't have to # - and multiple invocation will overwrite the desc_out_file self.exec_command( ['protoc'] + protoc_utils.protoc_header_params( header_proto_path, toolkit_path) + protoc_utils.protoc_desc_params(output_dir, desc_out_file) + desc_protos) return os.path.join(output_dir, desc_out_file)
def main(*args): """Main method of artman.""" # If no arguments are sent, we are using the entry point; derive # them from sys.argv. if not args: args = sys.argv[1:] # Get to a normalized set of arguments. flags = parse_args(*args) user_config = loader.read_user_config(flags.user_config) _adjust_root_dir(flags.root_dir) pipeline_name, pipeline_kwargs = normalize_flags(flags, user_config) if flags.local: try: pipeline = pipeline_factory.make_pipeline(pipeline_name, False, **pipeline_kwargs) # Hardcoded to run pipeline in serial engine, though not necessarily. engine = engines.load( pipeline.flow, engine='serial', store=pipeline.kwargs) engine.run() except: logger.error(traceback.format_exc()) sys.exit(32) finally: _change_owner(flags, pipeline_name, pipeline_kwargs) else: support.check_docker_requirements(flags.image) # Note: artman currently won't work if input directory doesn't contain # shared configuration files (e.g. gapic/packaging/dependencies.yaml). # This will make artman less useful for non-Google APIs. # TODO(ethanbao): Fix that by checking the input directory and # pulling the shared configuration files if necessary. logger.info('Running artman command in a Docker instance.') _run_artman_in_docker(flags)
def _cancel_task_lease(task_client, task): body = {'scheduleTime': task['scheduleTime'], 'responseView': 'FULL'} response = task_client.projects().locations().queues().tasks().cancelLease( name=task['name'], body=body).execute() logger.info('Cancel task request returned %s' % response) return response
def execute(self, local_paths, files_dict={}): repo_root = local_paths['reporoot'] if os.path.exists(os.path.realpath(os.path.expanduser(repo_root))): # Do nothing if the repo_root exists. The repo_root exists if # artman is running locally. return logger.info('root repo: %s' % repo_root) try: os.makedirs(repo_root) except OSError as e: raise e testfile = urllib.request.FancyURLopener() tmp_repo_file = os.path.join(repo_root, "file.zip") testfile.retrieve( "https://github.com/googleapis/googleapis/archive/master.zip", tmp_repo_file) zip_ref = zipfile.ZipFile(tmp_repo_file, 'r') zip_ref.extractall(repo_root) zip_ref.close() os.remove(tmp_repo_file) shutil.move(os.path.join(repo_root, "googleapis-master"), os.path.join(repo_root, "googleapis")) remote_repo_dir = os.path.join(repo_root, "googleapis") # Write/overwrite the additonal files into the remote_repo_dir so that # user can include additional files which are not in the public repo. for f, content in files_dict.items(): filename = os.path.join(remote_repo_dir, f) if not os.path.exists(os.path.dirname(filename)): os.makedirs(os.path.dirname(filename)) with open(filename, "w+") as text_file: text_file.write(base64.b64decode(content)) return remote_repo_dir
def execute(self, root_dir, files_dict={}): repo_root = os.path.abspath(os.path.join(root_dir, os.pardir)) if os.path.exists(os.path.realpath(os.path.expanduser(repo_root))): # Do nothing if the repo_root exists. The repo_root exists if # artman is running locally. return logger.info('root repo: %s' % repo_root) try: os.makedirs(repo_root) except OSError as e: raise e testfile = urllib.request.FancyURLopener() tmp_repo_file = os.path.join(repo_root, "file.zip") testfile.retrieve( "https://github.com/googleapis/googleapis/archive/master.zip", tmp_repo_file) zip_ref = zipfile.ZipFile(tmp_repo_file, 'r') zip_ref.extractall(repo_root) zip_ref.close() os.remove(tmp_repo_file) shutil.move(os.path.join(repo_root, "googleapis-master"), os.path.join(repo_root, "googleapis")) remote_repo_dir = os.path.join(repo_root, "googleapis") # Write/overwrite the additonal files into the remote_repo_dir so that # user can include additional files which are not in the public repo. for f, content in files_dict.items(): filename = os.path.join(remote_repo_dir, f) if not os.path.exists(os.path.dirname(filename)): os.makedirs(os.path.dirname(filename)) with io.open(filename, "w+", encoding='UTF-8') as text_file: text_file.write(base64.b64decode(content)) return remote_repo_dir
def main(*args): """Main method of artman.""" # If no arguments are sent, we are using the entry point; derive # them from sys.argv. if not args: args = sys.argv[1:] # Get to a normalized set of arguments. flags = parse_args(*args) user_config = loader.read_user_config(flags.user_config) _adjust_root_dir(flags.root_dir) pipeline_name, pipeline_kwargs = normalize_flags(flags, user_config) if flags.local: try: pipeline = pipeline_factory.make_pipeline(pipeline_name, **pipeline_kwargs) # Hardcoded to run pipeline in serial engine, though not necessarily. engine = engines.load( pipeline.flow, engine='serial', store=pipeline.kwargs) engine.run() except: logger.error(traceback.format_exc()) sys.exit(32) finally: _change_owner(flags, pipeline_name, pipeline_kwargs) else: support.check_docker_requirements(flags.image) # Note: artman currently won't work if input directory doesn't contain # common-protos. logger.info('Running artman command in a Docker instance.') _run_artman_in_docker(flags)
def execute(self, gapic_code_dir): abs_code_dir = os.path.abspath(gapic_code_dir) logger.info('Formatting file using php-cs-fixer in %s.' % abs_code_dir) subprocess.call(['php-cs-fixer', 'fix', gapic_code_dir]) logger.info('Formatting file using phpcbf in %s.' % abs_code_dir) subprocess.call(['phpcbf', '--standard=PSR2', '--no-patch', gapic_code_dir])
def _ack_task(task_client, task): body = {'scheduleTime': task['scheduleTime']} response = task_client.projects().locations().queues().tasks().acknowledge( name=task['name'], body=body).execute() logger.info('Acknowledge task request returned %s' % response) return response
def _run_artman_in_docker(flags): """Executes artman command. Args: root_dir: The input directory that will be mounted to artman docker container as local googleapis directory. Returns: The output directory with artman-generated files. """ ARTMAN_CONTAINER_NAME = 'artman-docker' root_dir = flags.root_dir output_dir = flags.output_dir artman_config_dirname = os.path.dirname(flags.config) docker_image = flags.image inner_artman_cmd_str = ' '.join(sys.argv[1:]) # Because artman now supports setting root dir in either command line or # user config, make sure `--root-dir` flag gets explicitly passed to the # artman command running inside Artman Docker container. if '--root-dir' not in inner_artman_cmd_str: inner_artman_cmd_str = '--root-dir %s %s' % ( root_dir, inner_artman_cmd_str) # TODO(ethanbao): Such folder to folder mounting won't work on windows. base_cmd = [ 'docker', 'run', '--name', ARTMAN_CONTAINER_NAME, '--rm', '-i', '-t', '-e', 'HOST_USER_ID=%s' % os.getuid(), '-e', 'HOST_GROUP_ID=%s' % os.getgid(), '-e', '%s=True' % RUNNING_IN_ARTMAN_DOCKER_TOKEN, '-v', '%s:%s' % (root_dir, root_dir), '-v', '%s:%s' % (output_dir, output_dir), '-v', '%s:%s' % (artman_config_dirname, artman_config_dirname), '-w', root_dir ] base_cmd.extend([docker_image, '/bin/bash', '-c']) inner_artman_debug_cmd_str = inner_artman_cmd_str # Because debug_cmd is run inside the Docker image, we want to # make sure --local is set if '--local' not in inner_artman_debug_cmd_str: inner_artman_debug_cmd_str = '--local %s' % inner_artman_debug_cmd_str debug_cmd = list(base_cmd) debug_cmd.append('"artman %s; bash"' % inner_artman_debug_cmd_str) cmd = base_cmd cmd.append('artman --local %s' % (inner_artman_cmd_str)) try: output = subprocess.check_output(cmd) logger.info(output.decode('utf8')) return output_dir except subprocess.CalledProcessError as e: logger.error(e.output.decode('utf8')) logger.error( 'Artman execution failed. For additional logging, re-run the ' 'command with the "--verbose" flag') sys.exit(32) finally: logger.debug('For further inspection inside docker container, run `%s`' % ' '.join(debug_cmd))
def _run_artman_in_docker(flags): """Executes artman command. Args: root_dir: The input directory that will be mounted to artman docker container as local googleapis directory. Returns: The output directory with artman-generated files. """ ARTMAN_CONTAINER_NAME = 'artman-docker' root_dir = flags.root_dir output_dir = flags.output_dir artman_config_dirname = os.path.dirname(flags.config) docker_image = flags.image inner_artman_cmd_str = ' '.join(["'" + arg + "'" for arg in sys.argv[1:]]) # Because artman now supports setting root dir in either command line or # user config, make sure `--root-dir` flag gets explicitly passed to the # artman command running inside Artman Docker container. if '--root-dir' not in inner_artman_cmd_str: inner_artman_cmd_str = '--root-dir %s %s' % ( root_dir, inner_artman_cmd_str) # TODO(ethanbao): Such folder to folder mounting won't work on windows. base_cmd = [ 'docker', 'run', '--name', ARTMAN_CONTAINER_NAME, '--rm', '-i', '-t', '-e', 'HOST_USER_ID=%s' % os.getuid(), '-e', 'HOST_GROUP_ID=%s' % os.getgid(), '-e', '%s=True' % RUNNING_IN_ARTMAN_DOCKER_TOKEN, '-v', '%s:%s' % (root_dir, root_dir), '-v', '%s:%s' % (output_dir, output_dir), '-v', '%s:%s' % (artman_config_dirname, artman_config_dirname), '-w', root_dir ] base_cmd.extend([docker_image, '/bin/bash', '-c']) inner_artman_debug_cmd_str = inner_artman_cmd_str # Because debug_cmd is run inside the Docker image, we want to # make sure --local is set if '--local' not in inner_artman_debug_cmd_str: inner_artman_debug_cmd_str = '--local %s' % inner_artman_debug_cmd_str debug_cmd = list(base_cmd) debug_cmd.append('"artman %s; bash"' % inner_artman_debug_cmd_str) cmd = base_cmd cmd.append('artman --local %s' % (inner_artman_cmd_str)) try: output = subprocess.check_output(cmd) logger.info(output.decode('utf8')) return output_dir except subprocess.CalledProcessError as e: logger.error(e.output.decode('utf8')) logger.error( 'Artman execution failed. For additional logging, re-run the ' 'command with the "--verbose" flag') sys.exit(32) finally: logger.debug('For further inspection inside docker container, run `%s`' % ' '.join(debug_cmd))
def execute(self, language, staging_lang_api_dir, staging_code_dir): logger.info('Copying %s/* to %s.' % (staging_code_dir, staging_lang_api_dir)) self.exec_command(['mkdir', '-p', staging_lang_api_dir]) for entry in os.listdir(staging_code_dir): src_path = os.path.join(staging_code_dir, entry) self.exec_command([ 'cp', '-rf', src_path, staging_lang_api_dir])
def _find_protobuf_path(toolkit_path): """Fetch and locate protobuf source""" global _protobuf_path if not _protobuf_path: logger.info('Searching for latest protobuf source') _protobuf_path = task_utils.get_gradle_task_output( 'showProtobufPath', toolkit_path) return _protobuf_path
def _find_protobuf_path(toolkit_path): """Fetch and locate protobuf source""" global _protobuf_path if not _protobuf_path: logger.info('Searching for latest protobuf source') _protobuf_path = task_utils.get_gradle_task_output( 'showProtobufPath', toolkit_path) return _protobuf_path
def execute(self, grpc_code_dir): for filename in protoc_utils.list_files_recursive(grpc_code_dir): if filename.endswith('GrpcClient.php'): logger.info('Performing replacements in: %s' % (filename, )) with io.open(filename, encoding='UTF-8') as f: contents = f.read() contents = protoc_utils.php_proto_rename(contents) with io.open(filename, 'w', encoding='UTF-8') as f: f.write(contents)
def execute(self, api_name, api_version, language, organization_name, output_dir, gapic_code_dir, grpc_code_dir): final_output_dir = os.path.join(gapic_code_dir, 'lib') logger.info('Copying %s/* to %s.' % (grpc_code_dir, final_output_dir)) if not os.path.exists(final_output_dir): self.exec_command(['mkdir', '-p', final_output_dir]) for entry in sorted(os.listdir(grpc_code_dir)): src_path = os.path.join(grpc_code_dir, entry) self.exec_command(['cp', '-rf', src_path, final_output_dir])
def execute(self, grpc_code_dir): for filename in protoc_utils.list_files_recursive(grpc_code_dir): if filename.endswith('GrpcClient.php'): logger.info('Performing replacements in: %s' % (filename,)) with io.open(filename, encoding='UTF-8') as f: contents = f.read() contents = protoc_utils.php_proto_rename(contents) with io.open(filename, 'w', encoding='UTF-8') as f: f.write(contents)
def execute(self, api_name, api_version, language, organization_name, output_dir, gapic_code_dir, grpc_code_dir): final_output_dir = os.path.join(gapic_code_dir, 'lib') logger.info('Copying %s/* to %s.' % (grpc_code_dir, final_output_dir)) if not os.path.exists(final_output_dir): self.exec_command(['mkdir', '-p', final_output_dir]) for entry in sorted(os.listdir(grpc_code_dir)): src_path = os.path.join(grpc_code_dir, entry) self.exec_command([ 'cp', '-rf', src_path, final_output_dir])
def execute(self, bucket_name, src_path, dest_path): logger.info('Start blob upload') client = storage.Client() bucket = client.get_bucket(bucket_name) blob = bucket.blob(dest_path) with io.open(src_path, 'r', encoding='UTF-8') as f: blob.upload_from_file(f) logger.info('Uploaded to %s' % blob.public_url) return bucket_name, dest_path, blob.public_url
def _pull_task(task_client, queue_name): body = { "maxTasks": 1, "leaseDuration": {"seconds": 300, "nanos": 0}, # Expire after 300 secs. "responseView": "FULL", "name": "%s" % queue_name } tasks = task_client.projects().locations().queues().tasks().pull( name=queue_name, body=body).execute() logger.info('Pulling tasks request returned %s' % tasks) return tasks
def execute(self, gapic_code_dir): abs_code_dir = os.path.abspath(gapic_code_dir) logger.info('Formatting file using php-cs-fixer in %s.' % abs_code_dir) subprocess.call(['php-cs-fixer', 'fix', gapic_code_dir]) # We require a second call to php-cs-fixer because instances of @type # have been converted to @var. We cannot disable this conversion in # the first call without affecting other aspects of the formatting. subprocess.call(['php-cs-fixer', 'fix', gapic_code_dir, '--fixers=phpdoc_var_to_type']) logger.info('Formatting file using phpcbf in %s.' % abs_code_dir) subprocess.call(['phpcbf', '--standard=PSR2', '--no-patch', gapic_code_dir])
def execute(self, gapic_code_dir, grpc_code_dir): final_output_dir = os.path.join(gapic_code_dir, 'proto') if not os.path.exists(final_output_dir): self.exec_command(['mkdir', '-p', final_output_dir]) logger.info('Moving %s/* to %s.' % (grpc_code_dir, final_output_dir)) for entry in sorted(os.listdir(grpc_code_dir)): src_path = os.path.join(grpc_code_dir, entry) self.exec_command( ['mv', src_path, os.path.join(final_output_dir, entry)]) self.exec_command(['rm', '-r', grpc_code_dir]) return final_output_dir
def execute(self, gapic_code_dir, grpc_code_dir): final_output_dir = os.path.join(gapic_code_dir, 'proto') if not os.path.exists(final_output_dir): self.exec_command(['mkdir', '-p', final_output_dir]) logger.info('Moving %s/* to %s.' % (grpc_code_dir, final_output_dir)) for entry in sorted(os.listdir(grpc_code_dir)): src_path = os.path.join(grpc_code_dir, entry) self.exec_command([ 'mv', src_path, os.path.join(final_output_dir, entry)]) self.exec_command([ 'rm', '-r', grpc_code_dir]) return final_output_dir
def execute(self, gapic_code_dir): logger.info('Formatting files in %s.' % os.path.abspath(gapic_code_dir)) targetFiles = [] for root, dirs, files in os.walk(gapic_code_dir): for filename in files: if filename.endswith('.py'): targetFile = os.path.abspath(os.path.join(root, filename)) targetFiles.append(targetFile) # yapf returns code 2 when it formats, so we can't use `check_call`. exit_code = subprocess.call(['yapf', '-i'] + targetFiles) if exit_code not in [0, 2]: raise subprocess.CalledProcessError(exit_code, 'yapf')
def execute(self, gapic_code_dir, toolkit_path): logger.info('Formatting files in %s.' % os.path.abspath(gapic_code_dir)) # TODO(shinfan): Move gradle task into requirement path = task_utils.get_gradle_task_output('showJavaFormatterPath', toolkit_path) targetFiles = [] for root, dirs, files in os.walk(gapic_code_dir): for filename in files: if filename.endswith('.java'): targetFile = os.path.abspath(os.path.join(root, filename)) targetFiles.append(targetFile) self.exec_command(['java', '-jar', path, '--replace'] + targetFiles)
def execute(self, bucket_name, src_path, dest_path): logger.info('Start blob upload') client = storage.Client() bucket = client.get_bucket(bucket_name) blob = bucket.blob(dest_path) with open(src_path, 'r') as f: blob.upload_from_file(f) logger.info('Uploaded to %s' % blob.public_url) return bucket_name, dest_path, blob.public_url
def execute(self, gapic_code_dir): logger.info('Formatting files in %s.' % os.path.abspath(gapic_code_dir)) targetFiles = [] for root, dirs, files in os.walk(gapic_code_dir): for filename in files: if filename.endswith('.py'): targetFile = os.path.abspath(os.path.join(root, filename)) targetFiles.append(targetFile) # yapf returns code 2 when it formats, so we can't use `check_call`. exit_code = subprocess.call(['yapf', '-i'] + targetFiles) if exit_code not in [0, 2]: raise subprocess.CalledProcessError(exit_code, 'yapf')
def _run_artman_in_docker(flags): """Executes artman command. Args: input_dir: The input directory that will be mounted to artman docker container as local googleapis directory. Returns: The output directory with artman-generated files. """ ARTMAN_CONTAINER_NAME = 'artman-docker' input_dir = flags.input_dir output_dir = flags.output_dir artman_config_dirname = os.path.dirname(flags.config) user_config = os.path.join(os.path.expanduser('~'), '.artman') docker_image = flags.image inner_artman_cmd_str = ' '.join(sys.argv[1:]) # TODO(ethanbao): Such folder to folder mounting won't work on windows. base_cmd = [ 'docker', 'run', '--name', ARTMAN_CONTAINER_NAME, '--rm', '-i', '-t', '-e', 'HOST_USER_ID=%s' % os.getuid(), '-e', 'HOST_GROUP_ID=%s' % os.getgid(), '-e', '%s=True' % RUNNING_IN_ARTMAN_DOCKER_TOKEN, '-v', '%s:%s' % (input_dir, input_dir), '-v', '%s:%s' % (output_dir, output_dir), '-v', '%s:%s' % (artman_config_dirname, artman_config_dirname), '-v', '%s:/home/.artman' % user_config, '-w', input_dir, docker_image, '/bin/bash', '-c' ] debug_cmd = list(base_cmd) debug_cmd.append('"artman2 %s; bash"' % inner_artman_cmd_str) cmd = base_cmd cmd.append('artman2 --local %s' % (inner_artman_cmd_str)) try: output = subprocess.check_output(cmd) logger.info(output.decode('utf8')) return output_dir except subprocess.CalledProcessError as e: logger.error(e.output.decode('utf8')) logger.error( 'Artman execution failed. For additional logging, re-run the ' 'command with the "--verbose" flag') raise finally: logger.debug( 'For further inspection inside docker container, run `%s`' % ' '.join(debug_cmd))
def execute(self, gapic_code_dir): abs_code_dir = os.path.abspath(gapic_code_dir) logger.info('Formatting file using php-cs-fixer in %s.' % abs_code_dir) subprocess.call(['php-cs-fixer', 'fix', gapic_code_dir]) # We require a second call to php-cs-fixer because instances of @type # have been converted to @var. We cannot disable this conversion in # the first call without affecting other aspects of the formatting. subprocess.call([ 'php-cs-fixer', 'fix', gapic_code_dir, '--fixers=phpdoc_var_to_type' ]) logger.info('Formatting file using phpcbf in %s.' % abs_code_dir) subprocess.call( ['phpcbf', '--standard=PSR2', '--no-patch', gapic_code_dir])
def _pull_and_execute_tasks(task_client, queue_name): pull_task_response = _pull_task(task_client, queue_name) tasks = pull_task_response.get('tasks', []) if not tasks: # Sleep for 30 seconds if there is no tasks returned. logger.debug('There is no pending task. Sleep for 10 seconds.') time.sleep(10) for task in tasks: task_id, tmp_root, artman_user_config, log_file_path = _prepare_dir() log_file_handler = None try: log_file_handler = _setup_logger(log_file_path) logger.info('Starting to execute task %s' % task) if int(task['taskStatus']['attemptDispatchCount']) > MAX_ATTEMPTS: logger.info('Delete task which exceeds max attempts.') _delete_task(task_client, task) continue _execute_task(artman_user_config, task) _ack_task(task_client, task) logger.info('Task execution finished') except Exception as e: logger.error('\n'.join(traceback.format_tb(sys.exc_info()[2]))) _cancel_task_lease(task_client, task) finally: logger.info('Cleanup tmp directory %s' % tmp_root) # Use task id as log name _write_to_cloud_logging(task_id, log_file_path) _cleanup(tmp_root, log_file_handler)
def _pull_task(task_client, queue_name): body = { "maxTasks": 1, "leaseDuration": { "seconds": 300, "nanos": 0 }, # Expire after 300 secs. "responseView": "FULL", "name": "%s" % queue_name } tasks = task_client.projects().locations().queues().tasks().pull( name=queue_name, body=body).execute() logger.info('Pulling tasks request returned %s' % tasks) return tasks
def _run_artman_in_docker(flags): """Executes artman command. Args: root_dir: The input directory that will be mounted to artman docker container as local googleapis directory. Returns: The output directory with artman-generated files. """ ARTMAN_CONTAINER_NAME = 'artman-docker' root_dir = flags.root_dir output_dir = flags.output_dir artman_config_dirname = os.path.dirname(flags.config) user_config = os.path.join(os.path.expanduser('~'), '.artman') docker_image = flags.image inner_artman_cmd_str = ' '.join(sys.argv[1:]) # TODO(ethanbao): Such folder to folder mounting won't work on windows. base_cmd = [ 'docker', 'run', '--name', ARTMAN_CONTAINER_NAME, '--rm', '-i', '-t', '-e', 'HOST_USER_ID=%s' % os.getuid(), '-e', 'HOST_GROUP_ID=%s' % os.getgid(), '-e', '%s=True' % RUNNING_IN_ARTMAN_DOCKER_TOKEN, '-v', '%s:%s' % (root_dir, root_dir), '-v', '%s:%s' % (output_dir, output_dir), '-v', '%s:%s' % (artman_config_dirname, artman_config_dirname), '-v', '%s:/home/.artman' % user_config, '-w', root_dir, docker_image, '/bin/bash', '-c' ] debug_cmd = list(base_cmd) debug_cmd.append('"artman %s; bash"' % inner_artman_cmd_str) cmd = base_cmd cmd.append('artman --local %s' % (inner_artman_cmd_str)) try: output = subprocess.check_output(cmd) logger.info(output.decode('utf8')) return output_dir except subprocess.CalledProcessError as e: logger.error(e.output.decode('utf8')) logger.error( 'Artman execution failed. For additional logging, re-run the ' 'command with the "--verbose" flag') raise finally: logger.debug('For further inspection inside docker container, run `%s`' % ' '.join(debug_cmd))
def execute(self, gapic_code_dir, toolkit_path): logger.info('Formatting files in %s.' % os.path.abspath(gapic_code_dir)) # TODO(shinfan): Move gradle task into requirement path = task_utils.get_gradle_task_output( 'showJavaFormatterPath', toolkit_path) targetFiles = [] for root, dirs, files in os.walk(gapic_code_dir): for filename in files: if filename.endswith('.java'): targetFile = os.path.abspath(os.path.join(root, filename)) targetFiles.append(targetFile) self.exec_command( ['java', '-jar', path, '--replace'] + targetFiles)
def _configure_local_paths(local_paths): """Return a copy of user_config with local_paths set. Args: local_paths (dict): The starting local_paths portion ofuser config. Returns: dict: The new local_paths dictionary. """ answer = copy(local_paths) # Ask the user for a repository root. while not answer.get('reporoot'): logger.info('First, we need to know where you store most code on your ' 'local machine.') logger.info('Other paths (example: toolkit) will derive from this, ' 'but most are individually configurable.') logger.info('The use of ${REPOROOT} in GAPIC YAMLs will point here.') logger.info('Note: Use of ~ is fine here.') answer['reporoot'] = six.moves.input('Local code path: ') answer['reporoot'] = answer['reporoot'].rstrip('/').strip() # Set up dependent directories. reporoot = answer['reporoot'] for dep in ('api-client-staging', 'googleapis', 'toolkit'): location = six.moves.input( 'Path for {0} (default: {1}/{0}): '.format(dep, reporoot) ).rstrip('/').strip() if location: answer[dep.replace('-', '_')] = location # Done; return the answer. return answer
def _pull_and_execute_tasks(task_client, queue_name): pull_task_response = _pull_task(task_client, queue_name) tasks = pull_task_response.get('tasks', []) if not tasks: # Sleep for 30 seconds if there is no tasks returned. logger.debug('There is no pending task. Sleep for 10 seconds.') time.sleep(10) for task in tasks: task_id, tmp_root, artman_user_config, log_file_path = _prepare_dir() log_file_handler = None try: log_file_handler = _setup_logger(log_file_path) logger.info('Starting to execute task %s' % task) if int(task['taskStatus']['attemptDispatchCount']) > MAX_ATTEMPTS: logger.info('Delete task which exceeds max attempts.') _delete_task(task_client, task) continue _execute_task(artman_user_config, task) _ack_task(task_client, task) logger.info('Task execution finished') except Exception as e: logger.error('\n'.join(traceback.format_tb(sys.exc_info()[2]))) _cancel_task_lease(task_client, task) finally: logger.info('Cleanup tmp directory %s' % tmp_root) # Use task id as log name _write_to_cloud_logging(task_id, log_file_path) _cleanup(tmp_root, log_file_handler)
def execute(self, bucket_name, path, output_dir): client = storage.Client() bucket = client.get_bucket(bucket_name) blob = bucket.get_blob(path) if not blob: logger.error('Cannot find the output from GCS.') return filename = os.path.join(output_dir, path) if not os.path.exists(os.path.dirname(filename)): try: os.makedirs(os.path.dirname(filename)) except: raise with io.open(filename, "w", encoding='UTF-8') as f: blob.download_to_file(f) logger.info('File downloaded to %s.' % f.name)
def execute(self, bucket_name, path, output_dir): client = storage.Client() bucket = client.get_bucket(bucket_name) blob = bucket.get_blob(path) if not blob: logger.error('Cannot find the output from GCS.') return filename = os.path.join(output_dir, path) if not os.path.exists(os.path.dirname(filename)): try: os.makedirs(os.path.dirname(filename)) except: raise with open(filename, "w") as f: blob.download_to_file(f) logger.info('File downloaded to %s.' % f.name)
def _print_log(pipeline_id): # Fetch the cloud logging entry if the exection fails. Wait for 30 secs, # because it takes a while for the logging to become available. logger.critical( 'The remote pipeline execution failed. It will wait for 30 ' 'seconds before fetching the log for remote pipeline execution.', ) time.sleep(30) client = logging.Client() pipeline_logger = client.logger(pipeline_id) entries, token = pipeline_logger.list_entries() for entry in entries: logger.error(entry.payload) logger.info( 'You can always run the following command to fetch the log entry:\n' ' gcloud beta logging read "logName=projects/vkit-pipeline/logs/%s"' % pipeline_id, )
def execute(self, gapic_code_dir): abs_code_dir = os.path.abspath(gapic_code_dir) logger.info('Formatting file using php-cs-fixer in %s.' % abs_code_dir) subprocess.call(['php-cs-fixer', 'fix', '--rules=@Symfony,-phpdoc_annotation_without_dot', gapic_code_dir]) # We require a second call to php-cs-fixer because instances of @type # have been converted to @var. We cannot disable this conversion in # the first call without affecting other aspects of the formatting. subprocess.call(['php-cs-fixer', 'fix', '--rules={"phpdoc_no_alias_tag" : {"replacements" : ' '{"var" : "type"}}}', gapic_code_dir]) logger.info('Formatting file using phpcbf in %s.' % abs_code_dir) subprocess.call(['phpcbf', '--standard=PSR2', '--no-patch', gapic_code_dir])
def execute(self, api_name, api_version, organization_name, language, go_import_base, output_dir, gapic_code_dir): pkg_dir = protoc_utils.prepare_grpc_pkg_dir(output_dir, api_name, api_version, organization_name, language) logger.info(pkg_dir) for pbfile in self.find_pb_files(pkg_dir): out_file = os.path.join(gapic_code_dir, 'proto', os.path.relpath(pbfile, pkg_dir)) logger.info('outfile {}'.format(out_file)) out_dir = os.path.dirname(out_file) if not os.path.exists(out_dir): os.makedirs(out_dir) with open(pbfile) as fin: with open(out_file, 'w') as fout: for line in fin: fout.write(self.modify_imports(go_import_base, line))
def execute(self, gapic_code_dir): abs_code_dir = os.path.abspath(gapic_code_dir) logger.info('Formatting file using php-cs-fixer in %s.' % abs_code_dir) subprocess.call([ 'php-cs-fixer', 'fix', '--rules=@Symfony,-phpdoc_annotation_without_dot', gapic_code_dir ]) # We require a second call to php-cs-fixer because instances of @type # have been converted to @var. We cannot disable this conversion in # the first call without affecting other aspects of the formatting. subprocess.call([ 'php-cs-fixer', 'fix', '--rules={"phpdoc_no_alias_tag" : {"replacements" : ' '{"var" : "type"}}}', gapic_code_dir ]) logger.info('Formatting file using phpcbf in %s.' % abs_code_dir) subprocess.call( ['phpcbf', '--standard=PSR2', '--no-patch', gapic_code_dir])
def _print_log(pipeline_id): # Fetch the cloud logging entry if the exection fails. Wait for 30 secs, # because it takes a while for the logging to become available. logger.critical( 'The remote pipeline execution failed. It will wait for 30 ' 'seconds before fetching the log for remote pipeline execution.', ) time.sleep(30) client = logging.Client() pipeline_logger = client.logger(pipeline_id) entries, token = pipeline_logger.list_entries() for entry in entries: logger.error(entry.payload) logger.info( 'You can always run the following command to fetch the log entry:\n' ' gcloud beta logging read "logName=projects/vkit-pipeline/logs/%s"' % pipeline_id, )
def _configure_publish(publish=None): """Determine and return the default publisher. Args: publish (str): The current default publisher (may be None). Returns: str: The new default publisher. """ # Set up publishing defaults. logger.info('Where do you want to publish code by default?') logger.info('The common valid options are "github" and "local".') publish = six.moves.input('Default publisher: ').lower() try: importlib.import_module('artman.tasks.publish.%s' % publish) return publish except ImportError: logger.error('Invalid publisher.') return _configure_publish()
def configure(log_level=logging.INFO): """Allow the user to write a new configuration file. Returns: int: An exit status. """ user_config = UserConfig() # Walk the user through basic configuration. setup_logging(log_level) logger.info('Welcome to artman. We will get you configured.') logger.info( 'When this is done, config will be stored in ~/.artman/config.yaml.') logger.info('') # Go through each step. # These are split out to make testing them easier. user_config.local.CopyFrom(_configure_local_config()) try: config_dir = os.path.expanduser('~/.artman/') os.makedirs(config_dir) except OSError: pass _write_pb_to_yaml(user_config, os.path.join(config_dir, 'config.yaml')) logger.success('Configuration written successfully to ' '~/.artman/config.yaml.')
def post_remote_pipeline_job_and_wait(pipeline, jobboard_name): """Post a pipeline job and wait until it is finished.""" my_name = POSTER_NAME logger.info("Starting poster with name: %s" % my_name) persist_backend = backend_helper.default_persistence_backend() with contextlib.closing(persist_backend): with contextlib.closing(persist_backend.get_connection()) as conn: conn.upgrade() jobboard = backend_helper.get_jobboard(my_name, jobboard_name) jobboard.connect() with contextlib.closing(jobboard): # Create information in the persistence backend about the # unit of work we want to complete and the factory that # can be called to create the tasks that the work unit needs # to be done. lb = logbook.LogBook("post-from-%s" % my_name) flow_uuid = uuidutils.generate_uuid() fd = logbook.FlowDetail("flow-of-%s" % my_name, flow_uuid) lb.add(fd) with contextlib.closing(persist_backend.get_connection()) as conn: conn.save_logbook(lb) engines.save_factory_details(fd, pipeline_factory.make_pipeline_flow, [pipeline.name, True], pipeline.kwargs, backend=persist_backend) # Post, and be done with it! jb = jobboard.post("job-from-%s" % my_name, book=lb) logger.info('Posted: %s' % jb) # TODO(cbao): Move wait until into a seperate method. # TODO(lukesneeringer): ...and fix the logging. state = states.UNCLAIMED print('Job status: %s' % state) while state != states.COMPLETE: if (jb.state != state): state = jb.state print('Job status: %s' % state) time.sleep(1) return jb
def configure(log_level=logging.INFO): """Allow the user to write a new configuration file. Returns: int: An exit status. """ user_config = UserConfig() # Walk the user through basic configuration. setup_logging(log_level) logger.info('Welcome to artman. We will get you configured.') logger.info('When this is done, config will be stored in ~/.artman/config.yaml.') logger.info('') # Go through each step. # These are split out to make testing them easier. user_config.local.CopyFrom(_configure_local_config()) try: config_dir = os.path.expanduser('~/.artman/') os.makedirs(config_dir) except OSError: pass _write_pb_to_yaml(user_config, os.path.join(config_dir, 'config.yaml')) logger.success('Configuration written successfully to ' '~/.artman/config.yaml.')
def execute(self, src_proto_path, import_proto_path, output_dir, api_name, api_version, organization_name, toolkit_path, desc_proto_path=None, excluded_proto_path=[]): desc_proto_path = desc_proto_path or [] desc_protos = list( protoc_utils.find_protos(src_proto_path + desc_proto_path, excluded_proto_path)) header_proto_path = import_proto_path + desc_proto_path header_proto_path.extend(src_proto_path) desc_out_file = task_utils.api_full_name( api_name, api_version, organization_name) + '.desc' logger.info('Compiling descriptors for {0}'.format(desc_protos)) self.exec_command(['mkdir', '-p', output_dir]) # DescGen don't use _group_by_dirname right now because # - it doesn't have to # - and multiple invocation will overwrite the desc_out_file self.exec_command( ['protoc'] + protoc_utils.protoc_header_params(header_proto_path, toolkit_path) + protoc_utils.protoc_desc_params(output_dir, desc_out_file) + desc_protos) return os.path.join(output_dir, desc_out_file)
def _prepare_dir(source_repo="https://github.com/googleapis/googleapis.git"): """Prepare the temporary folder to task execution. It downloads the googleapis repo and adds a one-time artman config yaml. TODO(ethanbao): support loading more input files from heterogeneous data sources""" task_id = str(uuid.uuid4())[0:8] repo_root = '/tmp/artman/%s' % task_id logger.info('Prepare a temporary root repo: %s' % repo_root) try: os.makedirs(repo_root) except OSError as e: raise e logger.info('Checking out fresh clone of %s.' % source_repo) googleapis_dir = os.path.join(repo_root, "googleapis") subprocess.check_output(['rm', '-f', '.git/config']) git_clone_args = ['git', 'clone', source_repo, googleapis_dir] output = subprocess.check_output(git_clone_args) if output: output = output.decode('utf8') output_logger.success(output) artman_user_config = os.path.join(repo_root, 'artman-config.yaml') with io.open(artman_user_config, 'w+') as file_: file_.write(u'---\n') file_.write(u'local_paths:\n') file_.write(u' reporoot: %s\n' % repo_root) if os.environ.get('TOOLKIT_HOME'): toolkit_home = os.environ.get('TOOLKIT_HOME') file_.write(u' toolkit: %s \n' % toolkit_home) file_.write(u'publish: noop \n') log_path = os.path.join(repo_root, 'artman.log') with io.open(log_path, 'w+') as file_: file_.write(u'-------- Beginning of %s -----------\n' % task_id) return task_id, repo_root, artman_user_config, log_path
def _prepare_dir(source_repo="https://github.com/googleapis/googleapis.git"): """Prepare the temporary folder to task execution. It downloads the googleapis repo and adds a one-time artman config yaml. TODO(ethanbao): support loading more input files from heterogeneous data sources""" task_id = str(uuid.uuid4())[0:8] repo_root = '/tmp/artman/%s' % task_id logger.info('Prepare a temporary root repo: %s' % repo_root) try: os.makedirs(repo_root) except OSError as e: raise e logger.info('Checking out fresh clone of %s.' % source_repo) googleapis_dir = os.path.join(repo_root, "googleapis") subprocess.check_output(['rm', '-f', '.git/config']) git_clone_args = ['git', 'clone', source_repo, googleapis_dir] output = subprocess.check_output(git_clone_args) if output: output = output.decode('utf8') output_logger.success(output) artman_user_config = os.path.join(repo_root, 'artman-config.yaml') with io.open(artman_user_config, 'w+') as file_: file_.write(u'---\n') file_.write(u'local_paths:\n') file_.write(u' reporoot: %s\n' % repo_root) if os.environ.get('TOOLKIT_HOME'): toolkit_home = os.environ.get('TOOLKIT_HOME') file_.write(u' toolkit: %s \n' % toolkit_home) file_.write(u'publish: noop \n') log_path = os.path.join(repo_root, 'artman.log') with io.open(log_path, 'w+') as file_: file_.write(u'-------- Beginning of %s -----------\n' % task_id) return task_id, repo_root, artman_user_config, log_path
def _configure_github_config(): """Determine and return artman user GitHub config. Returns: dict: The new GitHub configuration. """ answer = GitHubConfig() logger.info('Since you intend to publish to GitHub, you need to ' 'supply credentials.') logger.info('Create an access token at: ' 'https://github.com/settings/tokens') logger.info('It needs the "repo" scope and nothing else.') while not answer.username: answer.username = six.moves.input('GitHub username: '******'GitHub token (input is hidden): ') return answer
def _configure_github_config(): """Determine and return artman user GitHub config. Returns: dict: The new GitHub configuration. """ answer = GitHubConfig() logger.info('Since you intend to publish to GitHub, you need to ' 'supply credentials.') logger.info('Create an access token at: ' 'https://github.com/settings/tokens') logger.info('It needs the "repo" scope and nothing else.') while not answer.username: answer.username = six.moves.input('GitHub username: '******'GitHub token (input is hidden): ') return answer
def configure(log_level=logging.INFO): """Allow the user to write a new configuration file. Returns: int: An exit status. """ user_config = {} # Walk the user through basic configuration. setup_logging(log_level) logger.info('Welcome to artman. We will get you configured.') logger.info('When this is done, config will be stored in ~/.artman/config.yaml.') logger.info('') # Go through each step. # These are split out to make testing them easier. user_config['local_paths'] = _configure_local_paths( user_config.get('local_paths', {}), ) user_config['publish'] = _configure_publish() if user_config['publish'] == 'github': user_config['github'] = _configure_github( user_config.get('github', {}), ) # Write the final configuration. config_yaml = yaml.dump(user_config, block_seq_indent=2, default_flow_style=False, indent=2, ) if isinstance(config_yaml, six.binary_type): config_yaml = config_yaml.decode('utf8') try: os.makedirs(os.path.expanduser('~/.artman/')) except OSError: pass with io.open(os.path.expanduser('~/.artman/config.yaml'), 'w+') as file_: file_.write(u'---\n') file_.write(config_yaml) logger.success('Configuration written successfully to ' '~/.artman/config.yaml.')
def _configure_github(github): """Determine and return the GitHub configuration. Args: github (dict): The current GitHub configuration. Returns: dict: The new GitHub configuration. """ answer = copy(github) logger.info('Since you intend to publish to GitHub, you need to ' 'supply credentials.') logger.info('Create an access token at: ' 'https://github.com/settings/tokens') logger.info('It needs the "repo" scope and nothing else.') while not answer.get('username'): answer['username'] = six.moves.input('GitHub username: '******'token'): answer['token'] = getpass.getpass('GitHub token (input is hidden): ') return answer