def main(): print('Incrementation CLI started') parser = argparse.ArgumentParser(description="BigBrain incrementation") parser.add_argument('filename', type=str, help=('the file to be incremented')) parser.add_argument('output_dir', type=str, help='the output directory') parser.add_argument('--benchmark_file', type=str, default=None, help='the file to write benchmarks to') parser.add_argument('--start', type=float, default=0, help='start time of the application') parser.add_argument('--delay', type=float, default=0, help='task duration time (in s)') parser.add_argument('--avg', type=str, help='average chunk to be added to incremented chunk') args = parser.parse_args() try: md(args.output_dir) except Exception as e: pass increment(args.filename, args.output_dir, args.delay, args.benchmark_file, args.delay, args.avg)
def __init__(self, migrate_dir, **options): LOGGER.setLevel(options.get('LOGGING', 'WARNING')) if not op.exists(migrate_dir): LOGGER.warn('Migration directory: %s does not exists.', migrate_dir) md(migrate_dir) config = {} if op.exists(op.join(migrate_dir, 'conf.py')): with open(op.join(migrate_dir, 'conf.py')) as f: exec_in(f.read(), config, config) for key in config: if not key.startswith('_'): options[key] = config[key] else: LOGGER.warn('Configuration file `conf.py` didnt found in migration directory') self.migrate_dir = migrate_dir self.db = options.get('DATABASE') if not isinstance( self.db, (SqliteDatabase, MySQLDatabase, PostgresqlDatabase)) and self.db: self.db = connect(self.db) try: assert self.db self.proxy.initialize(self.db) assert self.proxy.database except (AttributeError, AssertionError): LOGGER.error("Invalid database: %s", self.db) sys.exit(1) except Exception: pass self.db.create_tables([MigrateHistory], safe=True)
def __init__(self, migrate_dir, **options): LOGGER.setLevel(options.get('LOGGING', 'WARNING')) if not op.exists(migrate_dir): LOGGER.warn('Migration directory: %s does not exists.', migrate_dir) md(migrate_dir) config = {} if op.exists(op.join(migrate_dir, 'conf.py')): with open(op.join(migrate_dir, 'conf.py')) as f: exec_in(f.read(), config, config) for key in config: if not key.startswith('_'): options[key] = config[key] else: LOGGER.warn( 'Configuration file `conf.py` didnt found in migration directory' ) self.migrate_dir = migrate_dir self.db = options.get('DATABASE') if not isinstance( self.db, (SqliteDatabase, MySQLDatabase, PostgresqlDatabase)) and self.db: self.db = connect(self.db) try: assert self.db self.proxy.initialize(self.db) assert self.proxy.database MigrateHistory.create_table() except (AttributeError, AssertionError): LOGGER.error("Invalid database: %s", self.db) sys.exit(1) except Exception: pass
def main(): start = time.time() print('Incrementation CLI started') parser = argparse.ArgumentParser(description="BigBrain incrementation") parser.add_argument('filename', type=str, help=('the file to be incremented')) parser.add_argument('output_dir', type=str, help='the output directory') parser.add_argument('--benchmark_file', type=str, default=None, help='the file to write benchmarks to') parser.add_argument('--start', type=float, default=0, help='start time of the application') parser.add_argument('--delay', type=float, default=0, help='task duration time (in s)') args = parser.parse_args() try: md(args.output_dir) except Exception as e: pass increment(args.filename, args.output_dir, args.delay, args.benchmark_file, args.delay) end = time.time() write_bench(args.benchmark_file, "task_duration", start, end, op.basename(args.output_dir), get_ident())
from os import listdir as ld from os import makedirs as md from os.path import isdir as d from re import findall from shutil import copyfile as cp root = 'C:/Users/Ritik/Desktop/2021 Autumn Semester/Assignments/AI/ArtificialIntelligenceProject/Algorithms/' maxver = 0 for f in ld(root): if d(root + f) and f[0] == 'v': x = findall('v([0-9]*).*', f) maxver = max(maxver, int(x[0])) fmaxver = root + f + '/' if not maxver: ver = 1 md(root + f'v{ver}') else: ver = maxver + 1 md(root + f'v{ver}') for f in ld(fmaxver): if f'v{maxver}' in f: d = f.replace(f'v{maxver}', f'v{ver}') else: d = f cp(fmaxver + f, root + f'v{ver}/' + d)
#!/usr/bin/env python from shutil import copy from os import system as run, mkdir as md, makedirs as mds from os import chdir as cd, listdir as ls, name as osname from os.path import exists, isfile, isdir # update from svn if possible run('svn up') # create build dir for cmake if not exists('build'): md('build') # prepare bin dir and data files if not exists('bin'): md('bin') if not exists('bin/data'): md('bin/data') for i in ls('data'): src = 'data/' + i if isfile(src): print 'copying:', src copy(src, 'bin/data') # configure cmake cd('build') if osname == 'nt': run('cmake .. -G "MinGW Makefiles"') else: run('cmake ..') # start make
#!/usr/bin/env python from shutil import copy from os import system as run, mkdir as md, makedirs as mds from os import chdir as cd, listdir as ls, name as osname from os.path import exists, isfile, isdir # update from svn if possible run('svn up') # create build dir for cmake if not exists('build'): md('build') # prepare bin dir and data files if not exists('bin'): md('bin') if not exists('bin/data'): md('bin/data') for i in ls('data'): src='data/'+i if isfile(src): print 'copying:', src copy(src, 'bin/data') # configure cmake cd('build') if osname=='nt': run('cmake .. -G "MinGW Makefiles"') else: run('cmake ..') # start make
def fs_migrations(self): """Scan migrations in file system.""" if not op.exists(self.migrate_dir): self.app.logger.warn('Migration directory: %s does not exists.', self.migrate_dir) md(self.migrate_dir) return sorted(''.join(f[:-3]) for f in ls(self.migrate_dir) if self.filemask.match(f))