def forbidtoken(files, config_name): include_patterns = common.get_option('forbidtoken-hook.' + config_name, default=tr[config_name][2]).split() common.note('Checking for "' + config_name + '" tokens on ' + ', '.join(include_patterns) + ' files') abort = False token = tr[config_name][0] line_iter = lambda x: enumerate(re.finditer(".*\n", x, re.MULTILINE), 1) line_match = lambda test, x: (n for n, m in line_iter(x) if test(m.group())) count = 0 for f in files: if not any(f.fnmatch(p) for p in include_patterns): continue common.trace('Checking ' + str(f.path) + '...') content = f.contents if not common.binary(content) and token(content.decode()): if not abort: common.error(WARNING % (tr[config_name][1])) for n in line_match(token, content.decode()): common.error(FILEWARN % (f.path, n)) abort = True count += 1 if abort: common.error('Hook "' + config_name + '" failed.') common.note('%d file(s) checked.' % count) return abort
def filesize(files): abort = False limit = int(common.get_option('filesize-hook.max-size', default=1024**2)) check_all_files = common.get_option('filesize-hook.type', "all").strip().lower() != "binary" too_big_files = [] common.note('Checking files size...') count = 0 for f in files: check_file = check_all_files or common.binary(f.contents) if check_file: common.trace('Checking ' + str(f.path) + ' size...') count += 1 if f.size > limit: too_big_files.append(f) common.note('%d file(s) checked.' % count) if too_big_files: common.error(WARNING % limit) for f in too_big_files: common.error(FILEWARN % (f.path, f.size, limit)) abort = True return abort
def main(): args = common.parse_args(use_isolate_server=True, use_swarming=False) tempdir = unicode(tempfile.mkdtemp(prefix=u'hello_world')) try: isolated_sha1 = common.archive(tempdir, args.isolate_server, args.verbose, args.which) common.note( 'Downloading from %s and running in a temporary directory' % args.isolate_server) cachei = os.path.join(tempdir, u'cachei') cachen = os.path.join(tempdir, u'cachen') common.run([ 'run_isolated.py', '--cache', cachei.encode('utf-8'), '--named-cache-root', cachen.encode('utf-8'), '--isolate-server', args.isolate_server, '--isolated', isolated_sha1, '--no-log', '--', args.which + u'.py', 'Dear 💩', '${ISOLATED_OUTDIR}', ], args.verbose) return 0 except subprocess.CalledProcessError as e: return e.returncode finally: shutil.rmtree(tempdir)
def main(): options = common.parse_args(use_isolate_server=False, use_swarming=False) tempdir = tempfile.mkdtemp(prefix='hello_world') try: # All the files are put in a temporary directory. This is optional and # simply done so the current directory doesn't have the following files # created: # - hello_world.isolated # - hello_world.isolated.state # - cache/ # - hashtable/ cachedir = os.path.join(tempdir, 'cache') hashtabledir = os.path.join(tempdir, 'hashtable') isolateddir = os.path.join(tempdir, 'isolated') isolated = os.path.join(isolateddir, 'hello_world.isolated') os.mkdir(isolateddir) common.note('Archiving to %s' % hashtabledir) # TODO(maruel): Parse the output from run() to get 'isolated_sha1'. common.run([ 'isolate.py', 'hashtable', '--isolate', os.path.join('payload', 'hello_world.isolate'), '--isolated', isolated, '--outdir', hashtabledir, '--config-variable', 'OS', 'Yours', ], options.verbose) common.note( 'Running the executable in a temporary directory from the hash table' ) with open(isolated, 'rb') as f: isolated_sha1 = hashlib.sha1(f.read()).hexdigest() common.run( [ 'run_isolated.py', '--cache', cachedir, '--indir', hashtabledir, '--hash', isolated_sha1, # TODO(maruel): Should not require this. '--namespace', 'default', '--no-log', ], options.verbose) return 0 except subprocess.CalledProcessError as e: return e.returncode finally: shutil.rmtree(tempdir)
def main(): options = common.parse_args(use_isolate_server=True, use_swarming=True) try: tempdir = tempfile.mkdtemp(prefix=u'hello_world') try: _, hashval = common.isolate(tempdir, options.isolate_server, options.swarming_os, options.verbose) json_file = os.path.join(tempdir, 'task.json') common.note('Running on %s' % options.swarming) cmd = [ 'swarming.py', 'trigger', '--swarming', options.swarming, '--isolate-server', options.isolate_server, '--dimension', 'os', options.swarming_os, '--task-name', options.task_name, '--dump-json', json_file, '--isolated', hashval, '--shards', '2', ] if options.idempotent: cmd.append('--idempotent') if options.priority is not None: cmd.extend(('--priority', str(options.priority))) cmd.extend(('--', '${ISOLATED_OUTDIR}')) common.run(cmd, options.verbose) common.note('Getting results from %s' % options.swarming) common.run([ 'swarming.py', 'collect', '--swarming', options.swarming, '--json', json_file, '--task-output-dir', 'example_result', ], options.verbose) for root, _, files in os.walk('example_result'): for name in files: p = os.path.join(root, name) with open(p, 'rb') as f: print('%s content:' % p) print(f.read()) return 0 finally: shutil.rmtree(tempdir) except subprocess.CalledProcessError as e: return e.returncode
def main(): options = common.parse_args(use_isolate_server=True, use_swarming=True) tempdir = tempfile.mkdtemp(prefix=u'hello_world') try: # All the files are put in a temporary directory. This is optional and # simply done so the current directory doesn't have the following files # created: # - hello_world.isolated # - hello_world.isolated.state isolated = os.path.join(tempdir, 'hello_world.isolated') common.note( 'Creating hello_world.isolated. Note that this doesn\'t archives ' 'anything.') common.run([ 'isolate.py', 'check', '--isolate', os.path.join('payload', 'hello_world.isolate'), '--isolated', isolated, '--config-variable', 'OS', options.swarming_os, ], options.verbose) common.note('Running the job remotely. This:\n' ' - archives to %s\n' ' - runs and collect results via %s' % (options.isolate_server, options.swarming)) cmd = [ 'swarming.py', 'run', '--swarming', options.swarming, '--isolate-server', options.isolate_server, '--dimension', 'os', options.swarming_os, '--task-name', options.task_name, isolated, ] if options.idempotent: cmd.append('--idempotent') if options.priority is not None: cmd.extend(('--priority', str(options.priority))) common.run(cmd, options.verbose) return 0 except subprocess.CalledProcessError as e: return e.returncode finally: shutil.rmtree(tempdir)
def main(): options = common.parse_args(use_isolate_server=True, use_swarming=False) tempdir = tempfile.mkdtemp(prefix='hello_world') try: # All the files are put in a temporary directory. This is optional and # simply done so the current directory doesn't have the following files # created: # - hello_world.isolated # - hello_world.isolated.state # - cache/ cachedir = os.path.join(tempdir, 'cache') isolateddir = os.path.join(tempdir, 'isolated') isolated = os.path.join(isolateddir, 'hello_world.isolated') os.mkdir(isolateddir) common.note('Archiving to %s' % options.isolate_server) # TODO(maruel): Parse the output from run() to get 'isolated_sha1'. # Note that --config-variable OS is not specified and nobody cares. common.run([ 'isolate.py', 'archive', '--isolate', os.path.join('payload', 'hello_world.isolate'), '--isolated', isolated, '--isolate-server', options.isolate_server, ], options.verbose) common.note( 'Downloading from %s and running in a temporary directory' % options.isolate_server) with open(isolated, 'rb') as f: isolated_sha1 = hashlib.sha1(f.read()).hexdigest() common.run([ 'run_isolated.py', '--cache', cachedir, '--isolate-server', options.isolate_server, '--hash', isolated_sha1, '--no-log', ], options.verbose) return 0 except subprocess.CalledProcessError as e: return e.returncode finally: shutil.rmtree(tempdir)
def main(): options = common.parse_args(use_isolate_server=True, use_swarming=True) try: common.note( 'Archiving directory \'payload\' to %s' % options.isolate_server) payload_isolated_sha1 = common.capture( [ 'isolateserver.py', 'archive', '--isolate-server', options.isolate_server, 'payload', ]).split()[0] common.note( 'Archiving custom .isolated file to %s' % options.isolate_server) handle, isolated = tempfile.mkstemp( prefix=u'hello_world', suffix=u'.isolated') os.close(handle) try: data = { 'algo': 'sha-1', 'command': ['python', 'hello_world.py', 'Custom'], 'includes': [payload_isolated_sha1], 'version': '1.0', } with open(isolated, 'wb') as f: json.dump(data, f, sort_keys=True, separators=(',',':')) isolated_sha1 = common.capture( [ 'isolateserver.py', 'archive', '--isolate-server', options.isolate_server, isolated, ]).split()[0] finally: common.note('Deleting temporary file, it is not necessary anymore.') os.remove(isolated) # Now trigger as usual. You could look at run_exmaple_swarming_involved for # the involved way but use the short way here. common.note('Running %s on %s' % (isolated_sha1, options.swarming)) cmd = [ 'swarming.py', 'run', '--swarming', options.swarming, '--isolate-server', options.isolate_server, '--dimension', 'os', options.swarming_os, '--dimension', 'pool', 'default', '--task-name', options.task_name, isolated_sha1, ] if options.idempotent: cmd.append('--idempotent') if options.priority is not None: cmd.extend(('--priority', str(options.priority))) common.run(cmd, options.verbose) return 0 except subprocess.CalledProcessError as e: return e.returncode
def main(): options = common.parse_args(use_isolate_server=True, use_swarming=True) try: tempdir = tempfile.mkdtemp(prefix=u'hello_world') try: _, hashval = common.isolate( tempdir, options.isolate_server, options.swarming_os, options.verbose) json_file = os.path.join(tempdir, 'task.json') common.note('Running on %s' % options.swarming) cmd = [ 'swarming.py', 'trigger', '--swarming', options.swarming, '--isolate-server', options.isolate_server, '--dimension', 'os', options.swarming_os, '--dimension', 'pool', 'default', '--task-name', options.task_name, '--dump-json', json_file, '--isolated', hashval, '--shards', '2', ] if options.idempotent: cmd.append('--idempotent') if options.priority is not None: cmd.extend(('--priority', str(options.priority))) cmd.extend(('--', '${ISOLATED_OUTDIR}')) common.run(cmd, options.verbose) common.note('Getting results from %s' % options.swarming) common.run( [ 'swarming.py', 'collect', '--swarming', options.swarming, '--json', json_file, '--task-output-dir', 'example_result', ], options.verbose) for root, _, files in os.walk('example_result'): for name in files: p = os.path.join(root, name) with open(p, 'rb') as f: print('%s content:' % p) print(f.read()) return 0 finally: shutil.rmtree(tempdir) except subprocess.CalledProcessError as e: return e.returncode
def main(): options = common.parse_args(use_isolate_server=False, use_swarming=False) tempdir = tempfile.mkdtemp(prefix='hello_world') try: # All the files are put in a temporary directory. This is optional and # simply done so the current directory doesn't have the following files # created: # - hello_world.isolated # - hello_world.isolated.state # - cache/ # - hashtable/ cachedir = os.path.join(tempdir, 'cache') hashtabledir = os.path.join(tempdir, 'hashtable') isolateddir = os.path.join(tempdir, 'isolated') isolated = os.path.join(isolateddir, 'hello_world.isolated') os.mkdir(isolateddir) common.note('Archiving to %s' % hashtabledir) # TODO(maruel): Parse the output from run() to get 'isolated_sha1'. common.run( [ 'isolate.py', 'hashtable', '--isolate', os.path.join('payload', 'hello_world.isolate'), '--isolated', isolated, '--outdir', hashtabledir, '--config-variable', 'OS', 'Yours', ], options.verbose) common.note( 'Running the executable in a temporary directory from the hash table') with open(isolated, 'rb') as f: isolated_sha1 = hashlib.sha1(f.read()).hexdigest() common.run( [ 'run_isolated.py', '--cache', cachedir, '--indir', hashtabledir, '--hash', isolated_sha1, # TODO(maruel): Should not require this. '--namespace', 'default', '--no-log', ], options.verbose) return 0 except subprocess.CalledProcessError as e: return e.returncode finally: shutil.rmtree(tempdir)
def main(): options = common.parse_args(use_isolate_server=True, use_swarming=True) tempdir = tempfile.mkdtemp(prefix=u'hello_world') try: # All the files are put in a temporary directory. This is optional and # simply done so the current directory doesn't have the following files # created: # - hello_world.isolated # - hello_world.isolated.state isolated = os.path.join(tempdir, 'hello_world.isolated') common.note( 'Creating hello_world.isolated. Note that this doesn\'t archives ' 'anything.') common.run( [ 'isolate.py', 'check', '--isolate', os.path.join('payload', 'hello_world.isolate'), '--isolated', isolated, '--config-variable', 'OS', options.swarming_os, ], options.verbose) common.note( 'Running the job remotely. This:\n' ' - archives to %s\n' ' - runs and collect results via %s' % (options.isolate_server, options.swarming)) cmd = [ 'swarming.py', 'run', '--swarming', options.swarming, '--isolate-server', options.isolate_server, '--dimension', 'os', options.swarming_os, '--task-name', options.task_name, isolated, ] if options.idempotent: cmd.append('--idempotent') if options.priority is not None: cmd.extend(('--priority', str(options.priority))) common.run(cmd, options.verbose) return 0 except subprocess.CalledProcessError as e: return e.returncode finally: shutil.rmtree(tempdir)
def main(): options = common.parse_args(use_isolate_server=True, use_swarming=True) tempdir = tempfile.mkdtemp(prefix=u'hello_world') try: isolated_hash = common.isolate(tempdir, options.isolate_server, options.swarming_os, options.verbose) common.note('Running the job remotely. This:\n' ' - archives to %s\n' ' - runs and collect results via %s' % (options.isolate_server, options.swarming)) cmd = [ 'swarming.py', 'run', '--swarming', options.swarming, '--isolate-server', options.isolate_server, '--dimension', 'os', options.swarming_os, '--dimension', 'pool', 'default', '--task-name', options.task_name, '--task-summary-json', 'example_result.json', '--decorate', '--isolated', isolated_hash, ] if options.idempotent: cmd.append('--idempotent') if options.priority is not None: cmd.extend(('--priority', str(options.priority))) if options.service_account: cmd.extend(('--service-account', options.service_account)) common.run(cmd, options.verbose) with open('example_result.json', 'rb') as f: print('example_result.json content:') print(f.read()) return 0 except subprocess.CalledProcessError as e: return e.returncode finally: shutil.rmtree(tempdir)
def main(): options = common.parse_args(use_isolate_server=True, use_swarming=False) tempdir = tempfile.mkdtemp(prefix='hello_world') try: # All the files are put in a temporary directory. This is optional and # simply done so the current directory doesn't have the following files # created: # - hello_world.isolated # - hello_world.isolated.state # - cache/ cachedir = os.path.join(tempdir, 'cache') isolateddir = os.path.join(tempdir, 'isolated') isolated = os.path.join(isolateddir, 'hello_world.isolated') os.mkdir(isolateddir) common.note('Archiving to %s' % options.isolate_server) # TODO(maruel): Parse the output from run() to get 'isolated_sha1'. # Note that --config-variable OS is not specified and nobody cares. common.run( [ 'isolate.py', 'archive', '--isolate', os.path.join('payload', 'hello_world.isolate'), '--isolated', isolated, '--isolate-server', options.isolate_server, ], options.verbose) common.note( 'Downloading from %s and running in a temporary directory' % options.isolate_server) with open(isolated, 'rb') as f: isolated_sha1 = hashlib.sha1(f.read()).hexdigest() common.run( [ 'run_isolated.py', '--cache', cachedir, '--isolate-server', options.isolate_server, '--hash', isolated_sha1, '--no-log', ], options.verbose) return 0 except subprocess.CalledProcessError as e: return e.returncode finally: shutil.rmtree(tempdir)
def check_commit_messages(commit_messages): results = [False] for commit_message in commit_messages: # Split commit message according to "--pretty=format:%h:%aE:%s" split_message = commit_message.split(':', 2) if len(split_message) == 3: # Extract the type commit_hash = split_message[0] commit_author = split_message[1] commit_title = split_message[2] results.append(__check_commit_title(commit_hash, commit_title)) results.append(__check_commit_author(commit_hash, commit_author)) common.note('%d commit(s) checked, %d error(s) found.' % (len(commit_messages), results.count(True))) return results
def main(): options = common.parse_args(use_isolate_server=True, use_swarming=True) tempdir = tempfile.mkdtemp(prefix=u"hello_world") try: isolated, _ = common.isolate(tempdir, options.isolate_server, options.swarming_os, options.verbose) common.note( "Running the job remotely. This:\n" " - archives to %s\n" " - runs and collect results via %s" % (options.isolate_server, options.swarming) ) cmd = [ "swarming.py", "run", "--swarming", options.swarming, "--isolate-server", options.isolate_server, "--dimension", "os", options.swarming_os, "--task-name", options.task_name, "--task-summary-json", "example_result.json", "--decorate", isolated, ] if options.idempotent: cmd.append("--idempotent") if options.priority is not None: cmd.extend(("--priority", str(options.priority))) common.run(cmd, options.verbose) with open("example_result.json", "rb") as f: print("example_result.json content:") print(f.read()) return 0 except subprocess.CalledProcessError as e: return e.returncode finally: shutil.rmtree(tempdir)
def check_file(file): common.note('Checking with ' + CPPCHECK_PATH + ' file: ' + file) # Invoke cppcheck for source code files p = subprocess.Popen([CPPCHECK_PATH, \ '--suppress=missingInclude', \ '--suppress=noExplicitConstructor', \ '--suppress=unmatchedSuppression', \ '--suppress=unusedFunction', \ '--enable=all', '--quiet', \ '--template={file}@!@{line}@!@{severity}@!@{message}', \ file], \ stdout=subprocess.PIPE, stderr=subprocess.STDOUT) out, err = p.communicate() if err is not None: print(err) if out is not None: print(out) if p.wait() != 0: common.error('Cppcheck failure on file: ' + file) common.error('Aborting') return True if out: common.error('Cppcheck failure on file: ' + file) for line in out.splitlines(): words = re.findall('(.+)@!@(.+)@!@(.+)@!@(.+)', line) if (words): num_line = words[0][1] severity = words[0][2] message = words[0][3] common.error('[%s] line %s: %s' % (severity, num_line, message)) common.error(SEPARATOR) return True return False
def main(): options = common.parse_args(use_isolate_server=True, use_swarming=True) tempdir = tempfile.mkdtemp(prefix=u'hello_world') try: isolated, _ = common.isolate( tempdir, options.isolate_server, options.swarming_os, options.verbose) common.note( 'Running the job remotely. This:\n' ' - archives to %s\n' ' - runs and collect results via %s' % (options.isolate_server, options.swarming)) cmd = [ 'swarming.py', 'run', '--swarming', options.swarming, '--isolate-server', options.isolate_server, '--dimension', 'os', options.swarming_os, '--dimension', 'pool', 'default', '--task-name', options.task_name, '--task-summary-json', 'example_result.json', '--decorate', isolated, ] if options.idempotent: cmd.append('--idempotent') if options.priority is not None: cmd.extend(('--priority', str(options.priority))) common.run(cmd, options.verbose) with open('example_result.json', 'rb') as f: print('example_result.json content:') print(f.read()) return 0 except subprocess.CalledProcessError as e: return e.returncode finally: shutil.rmtree(tempdir)
def main(): options = common.parse_args(use_isolate_server=True, use_swarming=True) try: tempdir = tempfile.mkdtemp(prefix=u'hello_world') try: # All the files are put in a temporary directory. This is optional and # simply done so the current directory doesn't have the following files # created: # - hello_world.isolated # - hello_world.isolated.state isolated = os.path.join(tempdir, 'hello_world.isolated') common.note('Archiving to %s' % options.isolate_server) common.run( [ 'isolate.py', 'archive', '--isolate', os.path.join('payload', 'hello_world.isolate'), '--isolated', isolated, '--isolate-server', options.isolate_server, '--config-variable', 'OS', options.swarming_os, ], options.verbose) with open(isolated, 'rb') as f: hashval = hashlib.sha1(f.read()).hexdigest() json_file = os.path.join(tempdir, 'task.json') common.note('Running on %s' % options.swarming) cmd = [ 'swarming.py', 'trigger', '--swarming', options.swarming, '--isolate-server', options.isolate_server, '--dimension', 'os', options.swarming_os, '--task-name', options.task_name, '--dump-json', json_file, '--isolated', hashval, ] if options.idempotent: cmd.append('--idempotent') if options.priority is not None: cmd.extend(('--priority', str(options.priority))) common.run(cmd, options.verbose) common.note('Getting results from %s' % options.swarming) common.run( [ 'swarming.py', 'collect', '--swarming', options.swarming, '--json', json_file, ], options.verbose) return 0 finally: shutil.rmtree(tempdir) except subprocess.CalledProcessError as e: print e.returncode or 1
def main(): options = common.parse_args(use_isolate_server=True, use_swarming=True) try: tempdir = tempfile.mkdtemp(prefix='hello_world') try: # All the files are put in a temporary directory. This is optional and # simply done so the current directory doesn't have the following files # created: # - hello_world.isolated # - hello_world.isolated.state isolated = os.path.join(tempdir, 'hello_world.isolated') common.note('Archiving to %s' % options.isolate_server) common.run( [ 'isolate.py', 'archive', '--isolate', os.path.join('payload', 'hello_world.isolate'), '--isolated', isolated, '--isolate-server', options.isolate_server, '--config-variable', 'OS', options.isolate_os, ], options.verbose) with open(isolated, 'rb') as f: hashval = hashlib.sha1(f.read()).hexdigest() finally: shutil.rmtree(tempdir) # At this point, the temporary directory is not needed anymore. tempdir = None task_name = common.unique_task_name() common.note('Running on %s' % options.swarming) common.run( [ 'swarming.py', 'trigger', '--swarming', options.swarming, '--isolate-server', options.isolate_server, '--dimension', 'os', options.swarming_os, '--task-name', task_name, hashval, ], options.verbose) common.note('Getting results from %s' % options.swarming) common.run( [ 'swarming.py', 'collect', '--swarming', options.swarming, task_name, ], options.verbose) return 0 except subprocess.CalledProcessError as e: print e.returncode or 1
def codingstyle(files, enable_reformat, check_lgpl, check_commits_date): source_patterns = common.get_option('codingstyle-hook.source-patterns', default='*.cpp *.cxx *.c').split() header_patterns = common.get_option('codingstyle-hook.header-patterns', default='*.hpp *.hxx *.h').split() misc_patterns = common.get_option('codingstyle-hook.misc-patterns', default='*.cmake *.txt *.xml *.json').split() code_patterns = source_patterns + header_patterns include_patterns = code_patterns + misc_patterns sort_includes = common.get_option('codingstyle-hook.sort-includes', default="true", type='--bool') == "true" global repoRoot repoRoot = common.get_repo_root() if repoRoot is None: common.warn("Cannot find 'fw4spl' repository structure") parent_repo = "" else: parent_repo = os.path.abspath(os.path.join(repoRoot, os.pardir)) fw4spl_configured_projects = common.get_option('codingstyle-hook.additional-projects', default=None) fw4spl_projects = [] if fw4spl_configured_projects is None: # no additional-projects specified in config file. Default is parent repository folder fw4spl_projects.append(parent_repo) else: fw4spl_projects = fw4spl_configured_projects.split(";") # adds current repository folder to the additional-projects specified in config file. fw4spl_projects.append(repoRoot) # normalize pathname fw4spl_projects = list(map(os.path.normpath, fw4spl_projects)) # remove duplicates fw4spl_projects = list(set(fw4spl_projects)) global UNCRUSTIFY_PATH if common.g_uncrustify_path_arg is not None and len(common.g_uncrustify_path_arg) > 0: UNCRUSTIFY_PATH = common.g_uncrustify_path_arg else: UNCRUSTIFY_PATH = common.get_option('codingstyle-hook.uncrustify-path', default=UNCRUSTIFY_PATH, type='--path').strip() common.note('Using uncrustify: ' + UNCRUSTIFY_PATH) if common.execute_command(UNCRUSTIFY_PATH + ' -v -q').status != 0: common.error('Failed to launch uncrustify.\n') return [] checked = set() reformatted_list = [] sortincludes.find_libraries_and_bundles(fw4spl_projects) ret = False count = 0 reformat_count = 0 for f in files: if f in checked or not any(f.fnmatch(p) for p in include_patterns): continue content = f.contents if not common.binary(content): # Do this last because contents of the file will be modified by uncrustify # Thus the variable content will no longer reflect the real content of the file file_path = os.path.join(repoRoot, f.path) if os.path.isfile(file_path): res = format_file(file_path, enable_reformat, code_patterns, header_patterns, misc_patterns, check_lgpl, sort_includes, f.status, check_commits_date) count += 1 if res == FormatReturn.Modified: reformatted_list.append(f.path) reformat_count += 1 elif res == FormatReturn.Error: # Error in reformatting ret = True checked.add(f) common.note('%d file(s) checked, %d file(s) reformatted.' % (count, reformat_count)) return ret, reformatted_list
def fix_license_year(path, enable_reformat, status, check_commits_date): with open(path, 'r', encoding='utf-8') as source_file: content = source_file.read() common.trace('Checking for LGPL license in: ' + path) YEAR = common.get_file_datetime(path, check_commits_date).year # Look for the license pattern licence_number = len(re.findall(LICENSE, content, re.MULTILINE)) if licence_number > 1: common.error("There should be just one licence header per file in :" + FILEWARN(path) + ".") return FormatReturn.Error elif licence_number < 1: if enable_reformat: lic = LICENSE lic = lic.replace("(.*)", "%s" % YEAR) lic = lic.replace("\\", "") with open(path, 'wb') as source_file: source_file.write(lic + "\n\n") source_file.write(content.encode()) common.note('LGPL license header fixed in : ' + FILEWARN(path) + '.') return FormatReturn.Modified else: common.error("There should be at least one licence header per file in :" + FILEWARN(path) + ".") return FormatReturn.Error # Here, it has only one occurrences that must be checked # Check license LICENSE_YEAR = r"(.*)FW4SPL - Copyright \(C\) IRCAD, ([0-9]+)." LICENSE_YEAR_RANGE = r"(.*)FW4SPL - Copyright \(C\) IRCAD, ([0-9]+)-([0-9]+)." # Check date if re.search(LICENSE_YEAR_RANGE, content): LICENSE_YEAR_REPLACE = r"\1FW4SPL - Copyright (C) IRCAD, \2-" + str(YEAR) + "." str_new_file = re.sub(LICENSE_YEAR_RANGE, LICENSE_YEAR_REPLACE, content) else: match = re.search(LICENSE_YEAR, content) if match: if status == 'A' or match.group(2) == str(YEAR): LICENSE_YEAR_REPLACE = r"\1FW4SPL - Copyright (C) IRCAD, " + str(YEAR) + "." str_new_file = re.sub(LICENSE_YEAR, LICENSE_YEAR_REPLACE, content) else: LICENSE_YEAR_REPLACE = r"\1FW4SPL - Copyright (C) IRCAD, \2-" + str(YEAR) + "." str_new_file = re.sub(LICENSE_YEAR, LICENSE_YEAR_REPLACE, content) else: common.error('Licence year format in : ' + FILEWARN(path) + ' is not correct.') return FormatReturn.Error if str_new_file != content: if enable_reformat: common.note('Licence year fixed in : ' + FILEWARN(path)) with open(path, 'wb') as source_file: source_file.write(str_new_file.encode()) return FormatReturn.Modified else: common.error('Licence year in : ' + FILEWARN(path) + ' is not up-to-date.') return FormatReturn.Error return FormatReturn.NotModified
def main(): args = common.parse_args(use_isolate_server=True, use_swarming=True) try: tempdir = tempfile.mkdtemp(prefix=u'hello_world') try: isolated_hash = common.archive(tempdir, args.isolate_server, args.verbose, args.which) json_file = os.path.join(tempdir, 'task.json').encode('utf-8') common.note('Running on %s' % args.swarming) cmd = [ 'swarming.py', 'trigger', '--swarming', args.swarming, '--isolate-server', args.isolate_server, '--task-name', args.task_name, '--dump-json', json_file, '--isolated', isolated_hash, '--raw-cmd', ] for k, v in args.dimensions: cmd.extend(('--dimension', k, v)) if args.idempotent: cmd.append('--idempotent') if args.priority is not None: cmd.extend(('--priority', str(args.priority))) if args.service_account: cmd.extend(('--service-account', args.service_account)) cmd.extend( ('--', args.which + u'.py', 'Dear 💩', '${ISOLATED_OUTDIR}')) common.run(cmd, args.verbose) common.note('Getting results from %s' % args.swarming) resdir = os.path.join(tempdir, 'results').encode('utf-8') common.run([ 'swarming.py', 'collect', '--swarming', args.swarming, '--json', json_file, '--task-output-dir', resdir, ], args.verbose) for root, _, files in os.walk(resdir): for name in files: p = os.path.join(root, name) with open(p, 'rb') as f: print('%s content:' % p) print(f.read()) return 0 finally: shutil.rmtree(tempdir) except subprocess.CalledProcessError as e: return e.returncode
def fix_header_guard(path, enable_reformat): ret = FormatReturn() with open(path, 'r', encoding='utf-8') as source_file: content = source_file.read() # Regex for '#pragma once' single_comment = "(\/\/([^(\n|\r)]|\(|\))*)" multi_comment = "(\/\*([^\*\/]|\*[^\/]|\/)*\*\/)" useless_char = "\t| |\r" pragma_once = "#pragma(" + useless_char + ")+once" all_before_pragma = ".*" + pragma_once + "(" + useless_char + ")*\n" # Remove old style path_upper = path.upper() path_upper = path_upper.replace("\\", "/") substrings = path_upper.split('/'); find = False; res = "__"; for i in range(0, len(substrings)): if substrings[i] == "INCLUDE": find = True; elif substrings[i] == "TEST": res += substrings[i - 1].upper() + "_UT_"; elif find: res += substrings[i].upper() + "_"; expected_guard = res.split('.'); if len(re.findall("HXX", expected_guard[1], re.DOTALL)) != 0: expected_guard[0] += "_HXX__"; else: expected_guard[0] += "_HPP__"; expected_guard = expected_guard[0] # Remove all about expected guard while len(re.findall("#(ifndef|define|endif)((" + useless_char + ")|(/\*)|(\/\/))*" + expected_guard + "[^\n]*", content, re.DOTALL)) != 0: match2 = re.search("#(ifndef|define|endif)((" + useless_char + ")|(/\*)|(\/\/))*" + expected_guard + "[^\n]*", content, re.DOTALL) if enable_reformat: content = content.replace(match2.group(0), "") common.note("Old style of header guard fixed : " + match2.group(0) + "in file : " + FILEWARN(path) + ".") with open(path, 'wb') as source_file: source_file.write(content.encode()) ret.add(FormatReturn.Modified) else: common.error("Old style of header guard found : " + match2.group(0) + "in file : " + FILEWARN(path) + ".") ret.add(FormatReturn.Error) return ret.value # Number of occurrences of '#pragma once' pragma_number = len(re.findall(pragma_once, content, re.MULTILINE)) if pragma_number > 1: common.error("There should be just one '#pragma once' per file in :" + FILEWARN(path) + ".") ret.add(FormatReturn.Error) return ret.value elif pragma_number < 1: # Add 'pragma once' if enable_reformat: match = re.search("(" + single_comment + "|" + multi_comment + "|" + useless_char + "|\n)*", content, re.MULTILINE) with open(path, 'wb') as source_file: source_file.write(match.group(0).encode()) source_file.write(b"#pragma once\n\n") source_file.write(content.replace(match.group(0), "").encode()) common.note("'#pragma once' fixed in :" + FILEWARN(path)) ret.add(FormatReturn.Modified) return ret.value else: common.error("There should be at least one '#pragma once' per file in :" + FILEWARN(path) + ".") ret.add(FormatReturn.Error) return ret.value # Here, it has only one occurrences that must be checked # Get all string before first '#pragma once' out = re.search(all_before_pragma, content, re.DOTALL).group(0) # Remove '#pragma once' match2 = re.search(pragma_once, out, re.DOTALL) out = out.replace(match2.group(0), "") # Remove multi line comments while len(re.findall(multi_comment, out, re.DOTALL)) != 0: match2 = re.search(multi_comment, out, re.DOTALL) out = out.replace(match2.group(0), "") # Remove single line comments while len(re.findall(single_comment, out, re.DOTALL)) != 0: match2 = re.search(single_comment, out, re.DOTALL) out = out.replace(match2.group(0), "") # If it's not empty, they are an error if len(re.findall("[^\n]", out, re.DOTALL)) != 0: common.error( ("Unexpected : '%s' befor #pragma once in :" % re.search("^.+$", out, re.MULTILINE).group(0)) + FILEWARN( path) + ".") ret.add(FormatReturn.Error) return ret.value # Check space number between '#pragma' and 'once' if len(re.findall("#pragma once", content, re.DOTALL)) == 0: if enable_reformat: # Get all string before first '#pragma once' out = re.search(all_before_pragma, content, re.DOTALL).group(0) # Remove '#pragma once' match2 = re.search(pragma_once, out, re.DOTALL) out2 = out.replace(match2.group(0), "") with open(path, 'wb') as source_file: source_file.write(out2.encode()) source_file.write(b"#pragma once\n") source_file.write(content.replace(out, "").encode()) ret.add(FormatReturn.Modified) return ret.value else: common.error("Needed : '#pragma once', actual : '" + re.search(pragma_once, content, re.DOTALL).group( 0) + "' in file :" + FILEWARN(path) + ".") ret.add(FormatReturn.Error) return ret.value ret.add(FormatReturn.NotModified) return ret.value
def main(argv): # default values for options defaults = { "api": "webdav.json", "credentials-file": "credentials.json", "printf": "{date} {size:r} {path}", "timeout": 86400 } # define quick options, long: short quickopts = { "overwrite": "o", "headers": "", "head": "", "no-parse": "", "recursive": "R", "sort": "", "reverse": "r", "dirs-first": "t", "files-only": "f", "dirs-only": "d", "summary": "u", "list-empty": "e", "checksum": "", "human": "h", "confirm": "y", "exists": "", "no-path": "", "verbose": "v", "no-verify": "k", "hide-root": "", "debug": "", "dry-run": "n", "quiet": "q", "no-colors": "", "api=": "", "credentials-file=": "c:", "printf=": "p:", "help": "", "version": "" } # remove = and : in options quickoptsm = dict( (k.replace('=', ''), v.replace(':', '')) for k, v in quickopts.items()) # assign values to quick options defaults = dict( defaults, **{k: False for k in quickoptsm.keys() if k not in defaults}) common.options = ClientOptions(copy.deepcopy(defaults), copy.deepcopy(defaults)) # handle arguments try: opts, args = getopt.gnu_getopt( argv, "".join(list(filter(lambda x: x > "", quickopts.values()))), list(quickopts.keys())) except getopt.GetoptError as e: error(e, 1) # set operation operation = args[0] if len(args) > 0 else "" # parse options and arguments for opt, arg in opts: if opt[2:] in quickoptsm.keys(): common.options[opt[2:]] = arg if arg > "" else True elif opt[1:] in quickoptsm.values(): index = [k for k, v in quickoptsm.items() if v == opt[1:]][0] common.options[index] = arg if arg > "" else True # create object and read credentials wd = WebDAVClient(operation, common.options) if common.options['help']: help(wd, operation, quickopts) sys.exit(0) elif common.options['version']: version() sys.exit(0) # check operation if operation == "": usage() sys.exit(1) # init operation and credentials if not wd.setargs(operation, args[1:]) or \ not wd.credentials(common.options['credentials-file']): sys.exit(1) # get result and print res = wd.run() # if there is a result, print it if res: if wd.request.hassuccess() and (wd.results is None or type(wd.results) is bool): note("%s successful" % operation) else: # print out the result, could be XML data sys.stdout.write(wd.format()) sys.stdout.flush() else: sys.exit(1)
def main(): options = common.parse_args(use_isolate_server=True, use_swarming=True) try: tempdir = tempfile.mkdtemp(prefix=u'hello_world') try: # All the files are put in a temporary directory. This is optional and # simply done so the current directory doesn't have the following files # created: # - hello_world.isolated # - hello_world.isolated.state isolated = os.path.join(tempdir, 'hello_world.isolated') common.note('Archiving to %s' % options.isolate_server) common.run([ 'isolate.py', 'archive', '--isolate', os.path.join('payload', 'hello_world.isolate'), '--isolated', isolated, '--isolate-server', options.isolate_server, '--config-variable', 'OS', options.swarming_os, ], options.verbose) with open(isolated, 'rb') as f: hashval = hashlib.sha1(f.read()).hexdigest() json_file = os.path.join(tempdir, 'task.json') common.note('Running on %s' % options.swarming) cmd = [ 'swarming.py', 'trigger', '--swarming', options.swarming, '--isolate-server', options.isolate_server, '--dimension', 'os', options.swarming_os, '--task-name', options.task_name, '--dump-json', json_file, '--isolated', hashval, ] if options.idempotent: cmd.append('--idempotent') if options.priority is not None: cmd.extend(('--priority', str(options.priority))) common.run(cmd, options.verbose) common.note('Getting results from %s' % options.swarming) common.run([ 'swarming.py', 'collect', '--swarming', options.swarming, '--json', json_file, ], options.verbose) return 0 finally: shutil.rmtree(tempdir) except subprocess.CalledProcessError as e: print e.returncode or 1