def main(args): """ Store the defect results in the specified input list as bug reports in the database. """ logger.setup_logger(args.verbose if 'verbose' in args else None) if not host_check.check_zlib(): raise Exception("zlib is not available on the system!") # To ensure the help message prints the default folder properly, # the 'default' for 'args.input' is a string, not a list. # But we need lists for the foreach here to work. if isinstance(args.input, str): args.input = [args.input] if 'name' not in args: LOG.debug("Generating name for analysis...") generated = __get_run_name(args.input) if generated: setattr(args, 'name', generated) else: LOG.error("No suitable name was found in the inputs for the " "analysis run. Please specify one by passing argument " "--name run_name in the invocation.") sys.exit(2) # argparse returns error code 2 for bad invocations. LOG.info("Storing analysis results for run '" + args.name + "'") if 'force' in args: LOG.info("argument --force was specified: the run with name '" + args.name + "' will be deleted.") protocol, host, port, product_name = split_product_url(args.product_url) # Before any transmission happens, check if we have the PRODUCT_STORE # permission to prevent a possibly long ZIP operation only to get an # error later on. product_client = libclient.setup_product_client(protocol, host, port, product_name) product_id = product_client.getCurrentProduct().id auth_client, _ = libclient.setup_auth_client(protocol, host, port) has_perm = libclient.check_permission(auth_client, Permission.PRODUCT_STORE, {'productID': product_id}) if not has_perm: LOG.error("You are not authorised to store analysis results in " "product '{0}'".format(product_name)) sys.exit(1) # Setup connection to the remote server. client = libclient.setup_client(args.product_url, product_client=False) LOG.debug("Initializing client connecting to {0}:{1}/{2} done.".format( host, port, product_name)) _, zip_file = tempfile.mkstemp('.zip') LOG.debug("Will write mass store ZIP to '{0}'...".format(zip_file)) try: assemble_zip(args.input, zip_file, client) if os.stat(zip_file).st_size > MAX_UPLOAD_SIZE: LOG.error("The result list to upload is too big (max: {}).".format( sizeof_fmt(MAX_UPLOAD_SIZE))) sys.exit(1) with open(zip_file, 'rb') as zf: b64zip = base64.b64encode(zf.read()) context = generic_package_context.get_context() trim_path_prefixes = args.trim_path_prefix if \ 'trim_path_prefix' in args else None client.massStoreRun(args.name, args.tag if 'tag' in args else None, str(context.version), b64zip, 'force' in args, trim_path_prefixes) LOG.info("Storage finished successfully.") except RequestFailed as reqfail: if reqfail.errorCode == ErrorCode.SOURCE_FILE: header = ['File', 'Line', 'Checker name'] table = twodim_to_str('table', header, [c.split('|') for c in reqfail.extraInfo]) LOG.warning("Setting the review statuses for some reports failed " "because of non valid source code comments: " "{0}\n {1}".format(reqfail.message, table)) sys.exit(1) except Exception as ex: LOG.info("Storage failed: " + str(ex)) sys.exit(1) finally: os.remove(zip_file)
def get_analysis_statistics(inputs, limits): """ Collects analysis statistics information and returns them. """ statistics_files = [] for input_path in inputs: input_path = os.path.abspath(input_path) if not os.path.exists(input_path): raise OSError(errno.ENOENT, "Input path does not exist", input_path) dirs = [] if os.path.isfile(input_path): files = [input_path] else: _, dirs, files = next(os.walk(input_path), ([], [], [])) for inp_f in files: if inp_f == 'compile_cmd.json': compilation_db = os.path.join(input_path, inp_f) compilation_db_size = \ limits.get(StoreLimitKind.COMPILATION_DATABASE_SIZE) if os.stat(compilation_db).st_size > compilation_db_size: LOG.debug("Compilation database is too big (max: %s).", sizeof_fmt(compilation_db_size)) else: LOG.debug( "Copying file '%s' to analyzer statistics " "ZIP...", compilation_db) statistics_files.append(compilation_db) elif inp_f in [ 'compiler_includes.json', 'compiler_target.json', 'metadata.json' ]: analyzer_file = os.path.join(input_path, inp_f) statistics_files.append(analyzer_file) for inp_dir in dirs: if inp_dir == 'failed': failure_zip_limit = limits.get(StoreLimitKind.FAILURE_ZIP_SIZE) failed_dir = os.path.join(input_path, inp_dir) _, _, files = next(os.walk(failed_dir), ([], [], [])) failed_files_size = 0 for f in files: failure_zip = os.path.join(failed_dir, f) failure_zip_size = os.stat(failure_zip).st_size failed_files_size += failure_zip_size if failed_files_size > failure_zip_limit: LOG.debug( "We reached the limit of maximum uploadable " "failure zip size (max: %s).", sizeof_fmt(failure_zip_limit)) break else: LOG.debug( "Copying failure zip file '%s' to analyzer " "statistics ZIP...", failure_zip) statistics_files.append(failure_zip) return statistics_files