예제 #1
0
 def setUp(self):
     out = common.runcmd_output("cd %s; ./teracli showts|grep kReady" %
                                (const.teracli_dir),
                                ignore_status=True)
     #assert( len(out.split('\n')) == len(const.tabletnode_list) )
     common.check_core()
     common.cleanup()
예제 #2
0
def setup():

    os_name = platform.system()
    if os_name == "Windows":
        import win_build
        win_build.build()
    elif os_name == "Linux":
        import linux_build
        linux_build.build()

    common.cleanup()
예제 #3
0
def main(b_date, e_date, clean):

    try:
        # initialize logger
        logger_reports = setup_logger("REPORTS", REPORT_LOG_FILE_PATH,
                                      REPORT_LOG_SIZE, REPORT_LOG_FILES,
                                      LOG_TO_CONSOLE)
        logger_reports.info("**** reports script started ****")

        # If date arguments are None (i.e. the script was initiated by the schduler or mistake by user), execute this block
        if ((b_date == None) or (e_date == None)):

            #get today's date
            today = datetime.date.today()

            #get first day of this month and find the last month
            first_day_current_month = today.replace(day=1)
            last_month = first_day_current_month - datetime.timedelta(days=1)

            # Calculate the number of days in the curemt month using the todays's date
            num_days = calendar.monthrange(last_month.year,
                                           last_month.month)[-1]

            # Find the first and last days of the month
            first_day = datetime.datetime(last_month.year, last_month.month, 1)
            last_day = datetime.datetime(last_month.year, last_month.month,
                                         num_days)

        else:
            # If date arguments are not None assign parsed arguments to first and last day
            first_day = b_date
            last_day = e_date

        # Call reports function with begin and end date
        for name in REPORT_DEVICE_LIST:
            reports(first_day, last_day, name)

        if clean:
            # purge old reports
            cleanup(DAYS_TO_KEEP_REPORTS, REPORT_LOCAL_FILE_PATH, "REPORTS",
                    "Reports")

            #purge old HDR files
            cleanup(DAYS_TO_KEEP_HDR, HDR_LOCAL_PATH, "REPORTS", "HDR")

    except Exception:
        logger_reports.exception("!!! Exception Occured !!!!")

    else:
        logger_reports.info("**** report script finished ****\n")

    finally:
        logger_reports.handlers.pop()
예제 #4
0
def host_setup():
    cleanup()
    # Spawn the python host
    vim.command(
        'let pyhost_id = ' +
        'rpcstart("python", ["-c", "import neovim; neovim.start_host()"])')
    ok(vim.eval('g:pyhost_id'))
    # Use rpc_request to wait for the host setup(rpc_spawn will return a channel
    # id but only after a while the channel will register handlers for python_*
    # methods)
    ok(vim.eval('rpcrequest(g:pyhost_id, "python_eval", "10")') == 10)
    # Verify the feature
    ok(vim.eval('has("python")'))
    # Import the vim module
    vim.command('python import vim')
    # Ensure the python host was updated accordingly
    ok(vim.eval('pyeval("vim.channel_id") == g:pyhost_id'))
예제 #5
0
파일: undeci.py 프로젝트: axtl/deci
def _fec_decode(ns):
    logging.debug('UNFEC pass started')
    tmpd = tempfile.mkdtemp(dir=os.getcwd())
    logging.debug('created tempdir at %s' % tmpd)

    # walk first input dir, decode as we go along
    for root, dirs, files in os.walk(ns.inputs[0]):
        unrooted = os.path.relpath(root, ns.inputs[0])
        logging.debug('unrooted path: %s' % unrooted)
        for dname in dirs:
            osubdir = os.path.join(tmpd, dname)
            os.mkdir(osubdir)
            logging.debug('created: %s' % osubdir)
        for f in files:
            # get real name
            rname = re.split('\.[0-9]*_[0-9]*\.fec$', f, re.IGNORECASE)[0]
            logging.debug('processing chunks for file: %s' % rname)
            # get all the file chunks into a list
            fecs = []
            for indir in ns.inputs:
                gpath = common.fec_glob(os.path.join(indir, unrooted, rname))
                fecs.extend(glob.glob(gpath))
            logging.debug('FEC chunks found for %s: %s' % (rname, fecs))
            fec_fds = [open(fec, 'rb') for fec in fecs]
            try:
                outpath = os.path.join(tmpd, unrooted, rname)
                outfd = open(outpath, 'wb')
                filefec.decode_from_files(outfd, fec_fds, False)
                logging.debug('decoded successfully to %s' % outpath)
            except filefec.InsufficientShareFilesError as e:
                logging.debug('failed to write %s' % outpath)
                sys.stderr.write(repr(e))
                common.cleanup(tmpd)
                sys.exit(ERR['INSUF_SHARES'])

    # all done, rename to output dir
    if os.path.exists(ns.output) and ns.force:
        shutil.rmtree(ns.output)
        logging.debug('removed existing output dir at %s' % ns.output)
    shutil.move(tmpd, ns.output)
    logging.debug('renamed temp dir %s to output dir %s' % (tmpd, ns.output))
    logging.info('UNFEC pass completed')
예제 #6
0
파일: setup.py 프로젝트: nicole-mcg/OUI
def setup():
    if not os.path.isdir("{}/OUI-engine".format(common.LIB_PATH)):

        ouiEngineBranch = "master"
        if "-B" in sys.argv or "--branch" in sys.argv:
            branchIndex = (sys.argv.index("-B") if "-B" in sys.argv else
                           sys.argv.index("--branch")) + 1
            ouiEngineBranch = sys.argv[branchIndex]

        print("## Cloning OUI engine into ./lib/ (branch {})".format(
            ouiEngineBranch))
        common.exec([
            'git', 'clone', "-b", ouiEngineBranch,
            'https://github.com/nik-m2/OUI-engine.git', 'lib/OUI-engine'
        ], "Failed to clone OUI engine")

    print("Downloading Google Test")
    for binary_info in LIB_INFO:
        file_util.download_and_unzip(binary_info)

    common.cleanup()
예제 #7
0
def process_directory(dir_path, acct=None):
    
    target_dir = Path(dir_path)
    list_of_tweets = list()
    
    cnx = eventdb.create_connection('social')

    if not acct:
        acct = get_account_id(f"{dir_path}/acct/account.js") if Path(f"{dir_path}/acct/account.js").exists() else None
    
    for target_file in target_dir.iterdir():

        if target_file.is_file():
            with open(target_file, "r", errors="replace") as file:

                file = file.read()
                list_of_tweets += parse_js_text(file, acct)

    eventdb.insert_tweets(list_of_tweets, cnx)
    eventdb.close_connection(cnx)

    common.cleanup(dir_path)
예제 #8
0
파일: stats.py 프로젝트: ryant26/ece420Lab4
def run(ws, starting_row, problem_size):
    write_header(ws, starting_row)
    common.run_datagen(problem_size)
    counter = 0

    for i in range(1, min(problem_size + 1, 201)):
        if (problem_size % i == 0):
            counter += 1
            average = 0
            ws['A' + str(starting_row + counter)] = i

            for j in range(5):
                common.run_main(i)
                runtime = common.get_runtime()
                ws[columns[j + 1] + str(starting_row + counter)] = runtime
                average += runtime

            average = average / 5
            ws[columns[6] + str(starting_row + counter)] = average

    common.cleanup()
    return counter + 2
예제 #9
0
def run(ws, starting_row, problem_size):
	write_header(ws, starting_row)
	common.run_datagen(problem_size)
	counter = 0;

	for i in range(1, min(problem_size+1, 201)):
		if(problem_size % i == 0):
			counter += 1
			average = 0
			ws['A'+str(starting_row + counter)] = i
			
			for j in range(5):
				common.run_main(i)
				runtime = common.get_runtime()
				ws[columns[j+1]+str(starting_row + counter)] = runtime
				average += runtime

			average = average / 5
			ws[columns[6]+str(starting_row + counter)] = average

	
	common.cleanup()
	return counter + 2;
예제 #10
0
파일: test_data.py 프로젝트: xuxic/coverity
 def setUp(self):
     out = common.runcmd_output("cd %s; ./teracli showts|grep kReady" % (const.teracli_dir), ignore_status=True)
     #assert( len(out.split('\n')) == len(const.tabletnode_list) )
     common.check_core()
     common.cleanup()
예제 #11
0
    prefix = backup[2]

    filename = now + "_" + tablename
    filename_gz = filename + ".gz"
    filename_sql = filename + ".sql"
   
    ssh = paramiko.SSHClient()
    ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
    pkey = paramiko.ECDSAKey.from_private_key_file(ssh_private_key_file)
    ssh.connect(hostname, 22, username, pkey=pkey )

    command = "mysqldump " + tablename + " > " + filename_sql
    cli_exec(ssh, command)
    sftp = ssh.open_sftp()
    sftp.get(filename_sql,filename_sql)
    sftp.close()
    command = "rm " +  filename_sql
    cli_exec(ssh, command)

    with open(filename_sql, 'rb') as f_in:
        with gzip.open(filename_gz, 'wb') as f_out:
            shutil.copyfileobj(f_in, f_out) 
    os.remove(filename_sql)               

    putfile(minioClient, filename_gz, bucketname, prefix, filename_gz)
    os.remove(filename_gz)

    cleanup(minioClient, bucketname, prefix)
    

예제 #12
0
from common import cleanup
from setup_ldap import setup
from setup_users import add_existing_users, add_new_users

if input("Do you need to setup LDAP? y/N ") == 'y':
    setup()
if input("Do you want to add users? y/N ") == 'y':
    add_existing_users()
    add_new_users()
if input("Delete temporary data? y/N ") == 'y':
    cleanup()
예제 #13
0
from keras.layers.recurrent import SimpleRNN
from keras.callbacks import EarlyStopping, ModelCheckpoint
from sklearn.model_selection import train_test_split
from sklearn import metrics

options = cmdargv.parse_argv(sys.argv, ANN_NAME)

# read file
print('===== read file =====')
df = pd.read_csv(options.dataset)
print(df.info())
common.dropp_columns_regex(df, options.exclude)

# dealing with: NaN, ∞, -∞
print('===== cleanup =====')
dropped_columns = common.cleanup(df)
print('dropped_columns: {}'.format(dropped_columns))

# encode
print('===== encode =====')


def encode(df):
    columns = enc.encode_numeric_zscore(df)
    print('Z-scored columns: \n  {}'.format(columns))
    classes = enc.encode_text_index(df, 'marker')
    print('marker classes: \n  {}'.format(classes))


encode(df)
예제 #14
0
                self.player.play(playlist, windowed=True)
            else:
                self.changing = True
                self.player.playprevious()
        elif action == 2:
            if int(playlist.size()) == (int(playlist.getposition())+1):
                self.player.pause()
                self.changing = True
                self.file = "REPEATING"
                self.player.play(playlist, windowed=True)
            else:
                self.changing = True
                self.player.playnext()

if __name__ == '__main__':
    c.cleanup()
    if len(sys.argv) == 1:
        c.beta("Starting Screensaver from Executable")
        Start()
    elif 'auto' in sys.argv[1]:
        c.beta("Starting Screensaver from Preview or Automatically")
        Start()
    elif 'download' in sys.argv[1]:
        c.beta("DOWNLOAD_DEBUG: User clicked on Manual Download")
        if c.download_folder != "":
            c.beta("DOWNLOAD_DEBUG: User has a download folder set")
            local.download()
        else:
            c.beta("DOWNLOAD_DEBUG: User has not set download folder.")
            c.ok("Please set a Download Location in the General Tab of Settings.")
            c.beta("DOWNLOAD_DEBUG: User has accepted the notification")
예제 #15
0
                        )
                        failed += 1
                    else:
                        print(
                            "OK: {0}[C{1}t{2}]: X={3}, E={4}, Y={5}, t={6}, p={7}".format(
                                name, ik, it, X, E, Y, (X - Y) / E, p
                            )
                        )
    if failed > 0:
        sys.exit(1)


with open(JSONFILE) as f:
    data = json.load(f)

ID = ".".join(os.path.basename(JSONFILE).split(".")[0:-1])

param = data["Parameter"]
ref = data["Result"]

exename = 'dla'

param["kpoints"] = {"ksteps": 1}

nset = param["parameter"]["nset"]
cleanup(ID)
geninp(param, SEED, name=ID)
run(exename, BINDIR=BINDIR, name=ID)
evaluate(ref, nset, alpha, name=ID)
sys.exit(0)
예제 #16
0
			common.run_main(problem_Size)
			if (common.is_result_correct()):
				out = "Correct"
			else:
				out = "Wrong!!!"
				all_correct = False

			print("Threads", i, ":", out)

	if (all_correct):
		print("TEST WAS SUCCESSFUL!")
	else:
		print("TEST FAILED!!")

	return all_correct;

def perform_multiple_sizes(sizes):
	result = True;
	for i in sizes:
		result = result and perform_run(i)

	print("=====================================================================")

	if (result):
		print("All problem sizes passed")
	else:
		print("There were failures!!!!")

perform_multiple_sizes([3, 5, 10, 15, 20, 30, 40, 50, 100, 100, 100, 200, 300])
common.cleanup()
예제 #17
0
 def setup(self):
     common.cleanup()
예제 #18
0
def process_from_file(file_path):
    current_user = common.UserPreferences(1)
    process_dir = common.unpack_and_store_files(file_path, "output")
    checkin_import = foursquareImporter(process_dir)
    checkin_import.add_to_database(current_user)
    common.cleanup(process_dir)
예제 #19
0
    shadersDir = os.path.join(testPath, "shadersDir")
    print shadersDir
    if not os.path.exists(shadersDir):
        os.mkdir(shadersDir)
    return curDir, testPath, shadersDir

if __name__ == '__main__':
    arch = args.arch
    buildType = args.buildType
    test = args.testName
    #    testName = os.path.join("apl", "tests", "traces", test)
    configSect = "%s_%s_%s" % ('TRex', osType, 'smoke')
    testName = os.path.join(config.get(configSect, 'testPrefix'), test)
    print testName
    artifactPath = setLLP(arch, buildType)
    curDir, testPath, shadersDir = getTestDir(testName)
    print testPath, shadersDir
    print "Cleaning up test directory %s" % testPath
    cleanup(testPath)
    print "Cleaning up shaders directory %s" % shadersDir
    cleanup(shadersDir)
    testPath = run_drv_trex(testName, buildType, testPath, artifactPath,
                            configSect)
    testSect = "%s_%s_%s_%s" % ('TRex', osType, 'smoke', 'tests')
    #    expNumImages = config.get(testSect, 'num_images')
    #    check_num_images(expNumImages)
    goldDir = config.get(configSect, 'gold_dir')
    traceDir = testName.split(".")[0]
    goldPath = os.path.join(curDir, traceDir, goldDir)
    diff_images(testPath, goldPath)
예제 #20
0
 def setup(self):
     common.cleanup()
예제 #21
0
    "U": 0.0,
    "mu": 0.0,
}

param["parameter"] = {"beta": 1.0, "ntau": 5, "nset": 100}

param["kpoints"] = {}

exename = 'dla'

# if param["hamiltonian"]["model"] == "boson":
#     exename = "dla_B"
# else:
#     exename = "dla_H"

cleanup(ID)
cleanup("{0}_restarted".format(ID))

geninp(param, SEED, simtime=simtime, name=ID)
run(exename, BINDIR=BINDIR, name=ID)
geninp(param, 2 * SEED, simtime="INF", name=ID)
run(exename, BINDIR=BINDIR, name=ID)
for nm in ["res", "sf", "cf", "ck"]:
    shutil.move("{0}_{1}.dat".format(nm, ID),
                "{0}_{1}_restarted.dat".format(nm, ID))
os.remove("res_{0}.dat.0.cjob".format(ID))

geninp(param, SEED, simtime=0.0, name=ID)
run(exename, BINDIR=BINDIR, name=ID)

evaluate(ID)
예제 #22
0
파일: deci.py 프로젝트: axtl/deci
def _fec_encode(ns):
    logging.info('FEC pass started')

    tmpd = tempfile.mkdtemp(dir=os.getcwd())
    logging.debug('created temp dir at %s' % tmpd)

    # total shares
    tshares = len(ns.outputs)

    for root, dirs, files in os.walk(ns.input):
        # output dir, name mapping
        od = root.replace(ns.input, os.path.basename(tmpd))
        # recreate tree structure in temp dir
        for dname in dirs:
            osubdir = os.path.join(tmpd, dname)
            os.mkdir(osubdir)
            logging.debug('created %s' % osubdir)
        for f in files:
            fpath = os.path.join(root, f)
            logging.debug('processing file: %s' % fpath)
            with open(os.path.join(root, f)) as fd:
                fsize = os.path.getsize(fpath)
                logging.debug('FEC %s (%d bytes)' % (fpath, fsize))
                filefec.encode_to_files(fd, fsize, od, f, ns.shares, tshares,
                    '.fec', ns.force, False)

    logging.info('FEC pass completed')
    logging.info('Distribution pass started')
    for root, dirs, files in os.walk(ns.input):
        unrooted = os.path.relpath(root, ns.input)
        logging.debug('unrooted path: %s' % unrooted)
        # map dir tree structure unto output directories
        for outdir in ns.outputs:
            for dname in dirs:
                try:
                    osubdir = os.path.join(outdir, dname)
                    os.mkdir(osubdir)
                    logging.debug('created %s' % osubdir)
                except OSError:
                    logging.debug('exists: %s' % osubdir)
        for f in files:
            # glob on FEC output files to build list of things to distribute
            gexpr = common.fec_glob(f)
            gpath = os.path.join(tmpd, unrooted, gexpr)
            logging.debug('glob path for %s: %s' % (f, gpath))
            fecs = [os.path.basename(fec) for fec in glob.glob(gpath)]
            logging.debug('FEC chunks for %s: %s' % (f, fecs))
            if len(fecs) != tshares:
                logging.debug('len(fecs)=%d;shares=%d' % (len(fecs), tshares))
                sys.stdout.write('Chunks and output dir counts mismatch\n')
                common.cleanup(tmpd)
                sys.exit(ERR['CHUNK_COUNT_MISMATCH'])
            # spread chunks over output dirs
            for idx, fec in enumerate(fecs):
                ofec = os.path.join(ns.outputs[idx], unrooted, fec)
                if not ns.force and os.path.exists(ofec):
                    logging.debug('chunk collision: %s' % ofec)
                    sys.stderr.write('Some chunks with the same name exist\n')
                    common.cleanup(tmpd)
                    sys.exit(ERR['NO_OVERWRITE'])
                ifec = os.path.join(tmpd, unrooted, fec)
                logging.debug('input FEC for %s: %s' % (f, ifec))
                shutil.copyfile(ifec, ofec)
                logging.debug('wrote %s' % ofec)

    logging.info('Distribution pass completed')
    common.cleanup(tmpd)
예제 #23
0
            print("Threads", i, ":", out)

    if (all_correct):
        print("TEST WAS SUCCESSFUL!")
    else:
        print("TEST FAILED!!")

    return all_correct


def perform_multiple_sizes(sizes):
    result = True
    for i in sizes:
        result = result and perform_run(i)

    print(
        "====================================================================="
    )

    if (result):
        print("All problem sizes passed")
    else:
        print("There were failures!!!!")


# perform_multiple_sizes([3, 5, 10, 15, 20, 30, 40, 50, 100, 100, 100, 200, 300])
common.run_datagen(1112)
perform_run(1112)
common.cleanup()
예제 #24
0
def process_from_file(file_path):
    current_user = common.UserPreferences(1)
    process_dir = common.unpack_and_store_files(file_path, "output")
    sleep_import = FitbitSleepImporter(process_dir)
    sleep_import.add_to_database(current_user)
    common.cleanup(process_dir)