Example #1
0
    def test():
        """Run the unit tests."""

        # Run the tests in each of the virtual environments defined in Project.test_python_versions
        # or if not defined, then in Project.wheel_python_versions.  If neither are defined, then
        # run the test in the current environment.

        venvs = VirtualenvInfo('test_python_versions', 'wheel_python_versions')
        coverage = '--cov-report term-missing --cov={package}'.format(package=Project.package)
        reports = '--junitxml={quality}/tests.xml'.format(quality=Project.quality_dir)
        mkdir_p(Project.tests_dir)

        if not venvs.in_virtualenv and venvs.defined:
            for venv_info in venvs.infos():
                info('Running unit tests using the {venv} virtual environment.'.format(venv=venv_info.venv))
                venv_info.run('py.test {coverage} {reports} {tests_dir}'.format(coverage=coverage,
                                                                                reports=reports,
                                                                                tests_dir=Project.tests_dir),
                              verbose=True)
        else:
            with LocalShell() as local:
                info('Running unit tests using the current python environment')
                local.run("py.test {coverage} {reports} {tests_dir}".format(coverage=coverage,
                                                                            reports=reports,
                                                                            tests_dir=Project.tests_dir),
                          verbose=True)
Example #2
0
def feagen_run_with_configs(global_config, bundle_config, dag_output_path=None,
                            no_bundle=False):
    """Generate feature with configurations.

    global_config (collections.Mapping): global configuration
        generator_class: string
        data_bundles_dir: string
        generator_kwargs: collections.Mapping

    bundle_config (collections.Mapping): bundle configuration
        name: string
        structure: collections.Mapping
    """
    if not isinstance(global_config, collections.Mapping):
        raise ValueError("global_config should be a "
                         "collections.Mapping object.")
    if not isinstance(bundle_config, collections.Mapping):
        raise ValueError("bundle_config should be a "
                         "collections.Mapping object.")
    data_generator = get_data_generator_from_config(global_config)
    data_keys = get_data_keys_from_structure(bundle_config['structure'])
    data_generator.generate(data_keys, dag_output_path)

    if not no_bundle:
        mkdir_p(global_config['data_bundles_dir'])
        bundle_path = join(global_config['data_bundles_dir'],
                           bundle_config['name'] + '.h5')
        data_generator.bundle(
            bundle_config['structure'], data_bundle_hdf_path=bundle_path,
            structure_config=bundle_config['structure_config'])
Example #3
0
 def make_exec_file(self, config):
     outputPath = config.exec_file_path
     cmd_str = config.cmd_str
     mkdir_p(os.path.dirname(outputPath))
     outputFile = open(outputPath, "w")
     outputFile.write(cmd_str)
     outputFile.close()
Example #4
0
 def __init__(self, suite_dir=None, new_mode=False):
     if suite_dir is None:
         suite_dir = os.getcwd()
     self.db_file_name = os.path.join(suite_dir, self.DB_FILE_BASE_NAME)
     # create the host directory if necessary
     try:
         mkdir_p( suite_dir )
     except Exception, x:
         raise Exception( "ERROR: " + str(x) )
Example #5
0
def setup_experiments(auto_var):
    exp_name = 'experiment01'
    mkdir_p(f"./results/{exp_name}")
    auto_var.register_experiment(f'{exp_name}', run_experiment01,
            {'file_format': 'pickle', 'result_file_dir': f'./results/{exp_name}'})
    exp_name = 'restrictedImgnet'
    mkdir_p(f"./results/{exp_name}")
    auto_var.register_experiment(f'{exp_name}', run_restrictedImgnet,
            {'file_format': 'pickle', 'result_file_dir': f'./results/{exp_name}'})
Example #6
0
 def __init__(self, suite_dir=None, new_mode=False):
     if suite_dir is None:
         suite_dir = os.getcwd()
     self.db_file_name = os.path.join(suite_dir, self.DB_FILE_BASE_NAME)
     # create the host directory if necessary
     try:
         mkdir_p( suite_dir )
     except Exception, x:
         raise Exception( "ERROR: " + str(x) )
Example #7
0
def init_config():
    mkdir_p(".dagianrc")
    default_global_config = """\
generator_class: feature_generator.FeatureGenerator
data_bundles_dir: data_bundles

# The additional arguments that will be given when initiating the data generator
# object.
generator_kwargs:
  h5py_hdf_dir:
    h5py
  pandas_hdf_dir:
    pandas.h5
"""
    default_bundle_config = """\
# The name of this bundle. This will be the file name of the data bundle.
# Another suggested usage is to comment out this line, so the name will be
# obtained from the file name of this config, that is, the name will be the same
# as the config file name without the extension.
name: default

# The structure of the data bundle. All the involved data will be generated and
# put into the global data file first (if data not exist), and then be bundled
# according to this structure, and then write to the data bundle file.
structure:
  id: id
  label: label
  features:
  - feature_1
  - feature_2

# Special configuration for the structure. Here we set concat=True for
# 'features'. It means that the data list in 'features' will be concatenated
# into a dataset.
structure_config:
  features:
    concat: True
"""
    default_global_config_path = join(".dagianrc", "config.yml")
    if exists(default_global_config_path):
        print("Warning: %s exists so it's not generated." %
              default_global_config_path)
    else:
        with open(default_global_config_path, "w") as fp:
            fp.write(default_global_config)

    default_bundle_config_path = join(".dagianrc", "bundle_config.yml")
    if exists(default_bundle_config_path):
        print("Warning: %s exists so it's not generated." %
              default_bundle_config_path)
    else:
        with open(default_bundle_config_path, "w") as fp:
            fp.write(default_bundle_config)
Example #8
0
 def __init__( self, suite, run_mode='live', clock=None, start_tag=None, stop_tag=None ):
     self.run_mode = run_mode
     self.clock = clock
     self.start_tag = start_tag
     self.stop_tag = stop_tag
     globals = gcfg
     self.dir = os.path.join( globals.cfg['task hosts']['local']['run directory'], suite, 'state' ) 
     self.path = os.path.join( self.dir, 'state' )
     try:
         mkdir_p( self.dir )
     except Exception, x:
         # To Do: handle error 
         raise 
Example #9
0
def daemonize( suite, port ):
    """
    ATTRIBUTION: base on a public domain code recipe by Jurgen Hermann:
    http://code.activestate.com/recipes/66012-fork-a-daemon-process-on-unix/
    """

    # Do the UNIX double-fork magic, see Stevens' "Advanced
    # Programming in the UNIX Environment" for details (ISBN 0201563177)

    sout = suite_output( suite )
    try:
        mkdir_p( sout.get_dir() )
    except Exception, x:
        sys.exit( str(x) )
Example #10
0
 def make_condor_file(self,condorConfig):
     outputPath = condorConfig.condor_file_path
     mkdir_p(os.path.dirname(outputPath))
     condor_file_content = condor_file_template.format(
             exec_file_path = condorConfig.exec_file_path,
             arguments = condorConfig.arguments,
             input = condorConfig.input,
             output = condorConfig.output,
             error = condorConfig.error,
             log = condorConfig.log,
             njob = condorConfig.njob,
             )
     outputFile = open(outputPath,"w")
     outputFile.write(condor_file_content)
     outputFile.close()
Example #11
0
    def __init__(self, suite_dir=None, new_mode=False, primary_db=True):
        if suite_dir is None:
            suite_dir = os.getcwd()
        if primary_db:
            prefix = os.path.join(suite_dir, 'state')
        else:
            prefix = suite_dir

        self.db_file_name = os.path.join(prefix, self.DB_FILE_BASE_NAME)
        self.db_dump_name = os.path.join(prefix, self.DB_DUMP_BASE_NAME)
        # create the host directory if necessary
        try:
            mkdir_p( suite_dir )
        except Exception, x:
            raise Exception( "ERROR: " + str(x) )
Example #12
0
    def __init__(self, suite, config, initial_oldest_ctime, start_tag):
        self.config = config
        self.initial_oldest_ctime = initial_oldest_ctime
        self.start_tag = start_tag

        title = 'suite ' + suite + ' run-time dependency graph'
        # create output directory if necessary
        odir = config['visualization']['runtime graph']['directory']
        # raises OSError:
        mkdir_p(odir)

        self.file = os.path.join(odir, 'runtime-graph.dot')
        self.graph = CGraph(title, config['visualization'])
        self.finalized = False
        self.cutoff = config['visualization']['runtime graph']['cutoff']
def convert_dir(oldDir, newDir):
    dat = open('{}.dat'.format(newDir), 'w')
    mkdir_p(newDir)
    for svgFile in os.listdir(oldDir):
        try:
            dat.write('{}: '.format(svgFile))
            convert_svg(os.path.join(oldDir, svgFile),
                        os.path.join(newDir, svgFile),
                        dataWriter=dat)
            dat.write('\n')

        except Exception:
            print 'Could not convert {}'.format(svgFile)

    dat.close()
Example #14
0
    def __init__(self, suite, config, initial_oldest_ctime, start_tag ):
        self.config = config
        self.initial_oldest_ctime = initial_oldest_ctime
        self.start_tag = start_tag

        title = 'suite ' + suite + ' run-time dependency graph'
        # create output directory if necessary
        odir = config['visualization']['runtime graph']['directory']
        # raises OSError:
        mkdir_p( odir )

        self.file = os.path.join( odir, 'runtime-graph.dot' )
        self.graph = CGraph( title, config['visualization'] )
        self.finalized = False
        self.cutoff = config['visualization']['runtime graph']['cutoff']
Example #15
0
    def __init__(self,
                 before_experiment_hooks=None,
                 after_experiment_hooks=None,
                 settings: Dict = None,
                 logging_level: int = logging.WARNING) -> None:
        """
        settings : {
            'server_url': 'http://127.0.0.1:8080/nn_attack/',
            'result_file_dir': './results/'
            'file_format': 'json' or 'pickle'
        }
        """
        logger.setLevel(logging_level)

        self.experiments: Dict[str, Any] = {}
        self.variables: Dict[str, dict] = {}
        self.var_shown_name: Dict[str, Dict[str, str]] = {}
        self.var_description: Dict[str, str] = {}
        self.var_class: Dict[str, Any] = {}
        self.var_value: Dict[str, Any] = {}

        self.inter_var: Dict[str, Any] = {}
        self.result_fields: List[str] = []
        self.settings: Dict
        self.settings = {'file_format': 'json'}
        if isinstance(settings, dict):
            self.settings.update(settings)
        if ('result_file_dir'
                in self.settings) and self.settings['result_file_dir']:
            mkdir_p(self.settings['result_file_dir'])

        try:
            self.repo = git.Repo(search_parent_directories=True)
            self.var_value['git_hash'] = self.repo.head.object.hexsha
        except git.exc.InvalidGitRepositoryError:
            logger.warning("Git repository not found.")

        try:
            self.var_value['hostname'] = socket.gethostname()
        except:
            logger.warning("Unable to get hostname.")

        self.after_experiment_hooks = after_experiment_hooks
        self.before_experiment_hooks = before_experiment_hooks

        self._read_only: bool = False
        self._no_hooks: bool = False
Example #16
0
 def generate( self, dir ):
     pfile = os.path.join(dir, 'passphrase')
     if os.path.isfile( pfile ):
         try:
             self.get( pfile )
             return
         except SecurityError:
             pass
     # Note: Perhaps a UUID might be better here?
     char_set = string.ascii_uppercase + string.ascii_lowercase + string.digits
     self.passphrase = ''.join(random.sample(char_set, 20))
     mkdir_p(dir)
     f = open(pfile, 'w')
     f.write(self.passphrase)
     f.close()
     # set passphrase file permissions to owner-only
     os.chmod( pfile, 0600 )
     if flags.verbose:
         print 'Generated suite passphrase file on', user + '@' + get_hostname() + ':', pfile
Example #17
0
 def generate(self, dir):
     pfile = os.path.join(dir, 'passphrase')
     if os.path.isfile(pfile):
         try:
             self.get(pfile)
             return
         except SecurityError:
             pass
     # Note: Perhaps a UUID might be better here?
     char_set = string.ascii_uppercase + string.ascii_lowercase + string.digits
     self.passphrase = ''.join(random.sample(char_set, 20))
     mkdir_p(dir)
     f = open(pfile, 'w')
     f.write(self.passphrase)
     f.close()
     # set passphrase file permissions to owner-only
     os.chmod(pfile, 0600)
     if self.verbose:
         print 'Generated suite passphrase file on', user + '@' + get_hostname(
         ) + ':', pfile
Example #18
0
def main():
	input_fn = sys.argv[1]
	#input_fn = 'clip009.mp4'
	#input_fn = 'clip010.mp4'
	
	output_fn = os.path.splitext(input_fn)[0] + '_output.mp4'
	output_tracking_result_fn = os.path.splitext(input_fn)[0] + '_tracking_result.csv'
	output_background_fn = os.path.splitext(input_fn)[0] + '_background.bmp'
	tmp_dir = '__tmp/'
	tmp_dir_cluster = os.path.join(tmp_dir, 'cluster/')
	tmp_dir_extraction = os.path.join(tmp_dir, 'extraction/')
	output_temp_file_zip = input_fn + '.__tmp'
	mkdir_p(tmp_dir)
	make_clean_dir(tmp_dir_cluster) # dangerous
	make_clean_dir(tmp_dir_extraction) # dangerous
	print('auto_background starts...')
	#dangerous: race condition(ignore it)
	if os.path.basename(output_background_fn) in os.listdir('.'):
		print('use cached background file: ' + output_background_fn)
	else:
		auto_background(input_fn, output_background_fn)
	print('auto_background finished')
	print('ball_extraction starts...')
	ball_extraction(input_fn, output_background_fn, tmp_dir_extraction)
	print('ball_extraction finished')
	print('ball_clustering starts...')
	ball_clustering(tmp_dir_extraction, tmp_dir_cluster, 'bmp')
	print('ball_clustering finished')
	print('tracking_from_auto_template starts...')
	if os.path.basename(output_tracking_result_fn) in os.listdir('.'):
		print('use cached tracking result csv file: ' + output_tracking_result_fn)
	else:
		tracking_from_auto_template(input_fn, output_background_fn, tmp_dir_cluster, output_tracking_result_fn)
	print('tracking_from_auto_template finished')
	print('creating output video...')
	create_tracking_video_from_detection_result_csv(input_fn, output_tracking_result_fn, output_fn)
	print('output video %s created' % output_fn)
	print('creating temp file archive...')
	shutil.make_archive(output_temp_file_zip, 'zip', tmp_dir)
	print('temp file archive %s.zip created' % output_temp_file_zip)
	return
Example #19
0
    def test_json_file(self):
        settings = {'file_format': 'json', 'result_file_dir': 'test'}
        mkdir_p(settings['result_file_dir'])
        auto_var = AutoVar(
            settings=settings,
            after_experiment_hooks=[
                partial(save_result_to_file, get_name_fn=default_get_file_name)
            ],
        )
        auto_var.add_variable_class(OrdVarClass())
        auto_var.set_variable_value_by_dict({'ord': '1'})

        def experiment(auto_var):
            return {'test': auto_var.get_var('ord')}

        _ = auto_var.run_single_experiment(experiment, with_hook=True)

        with open("test/1.json", 'r') as f:
            ret = json.load(f)
        self.assertEqual(ret['test'], auto_var.get_var('ord'))
        shutil.rmtree(settings['result_file_dir'])
Example #20
0
 def make_crab_file(self, crabConfig):
     outputPath = crabConfig.crab_file_path
     mkdir_p(os.path.dirname(outputPath))
     crab_file_content = crab_file_template.format(
         taskName=crabConfig.taskName,
         JobType_plugName=crabConfig.JobType_plugName,
         JobType_psetName=crabConfig.JobType_psetName,
         JobType_scriptExe=crabConfig.JobType_scriptExe,
         JobType_inputFiles=crabConfig.JobType_inputFiles,
         JobType_outputFiles=crabConfig.JobType_outputFiles,
         JobType_maxMemoryMB=crabConfig.JobType_maxMemoryMB,
         Data_outputPrimaryDataset=crabConfig.Data_outputPrimaryDataset,
         Data_unitsPerJob=crabConfig.Data_unitsPerJob,
         Data_totalUnits=crabConfig.Data_totalUnits,
         Data_publication=crabConfig.Data_publication,
         Data_outputDatasetTag=crabConfig.Data_outputDatasetTag,
         Data_outLFNDirBase=crabConfig.Data_outLFNDirBase,
         Site_storageSite=crabConfig.Site_storageSite,
     )
     outputFile = open(outputPath, "w")
     outputFile.write(crab_file_content)
     outputFile.close()
Example #21
0
def draw_dag(nx_dag, path):
    if dirname(path) != '':
        mkdir_p(dirname(path))
    agraph = nx.nx_agraph.to_agraph(nx_dag)
    for edge in agraph.edges_iter():
        if edge.attr['nonskipped_data'] is None:
            edge.attr['label'] = edge.attr['data_definitions']
        else:
            edge.attr['label'] = ""
            if edge.attr['nonskipped_data'] not in ["set()", "set([])"]:
                edge.attr['label'] += edge.attr['nonskipped_data']
            if (edge.attr['skipped_data'] not in ["set()", "set([])"]
                    and edge.attr['skipped_data'] is not None):
                edge.attr['label'] += "(%s skipped)" % edge.attr['skipped_data']
    for node in agraph.nodes_iter():
        if node.attr['skipped'] == "True":
            node.attr['label'] = node.attr['func_name'] + " (skipped)"
            node.attr['fontcolor'] = 'grey'
        else:
            node.attr['label'] = node.attr['func_name']
    agraph.layout('dot')
    agraph.draw(path)
Example #22
0
 def __init__(self, hdf_path):
     hdf_dir = os.path.dirname(hdf_path)
     if hdf_dir != '':
         mkdir_p(hdf_dir)
     self.h5f = h5py.File(hdf_path, 'a')
Example #23
0
        os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
        os.environ["CUDA_VISIBLE_DEVICES"] = "-1"

    # Load Training Data
    X_train, y_train, X_val, y_val = load_training_data(args.track)

    print(X_train.shape[0], 'training samples.')
    print(X_val.shape[0], 'validation samples.')

    # Training loop variables
    epochs = 100
    batch_size = 50

    model = create_model()

    mkdir_p("weights")
    weights_file = "weights/{}.hdf5".format(args.track)
    if os.path.isfile(weights_file):
        model.load_weights(weights_file)

    model.compile(loss=customized_loss, optimizer=optimizers.adam(lr=0.0001))
    checkpointer = ModelCheckpoint(monitor='val_loss',
                                   filepath=weights_file,
                                   verbose=1,
                                   save_best_only=True,
                                   mode='min')
    earlystopping = EarlyStopping(monitor='val_loss', patience=20)
    model.fit(X_train,
              y_train,
              batch_size=batch_size,
              epochs=epochs,
Example #24
0
    def get_var_with_argument(self, var_name: str, argument: str, *args,
                              **kwargs):
        if self.variables[var_name]["type"] == "val":
            return argument
        else:
            cache_dir = None
            if argument in self.variables[var_name]["argument_fn"]:
                func = self.variables[var_name]["argument_fn"][argument]
                cache_dir = self.variables[var_name]['cache_dirs'][argument]
                required_vars = self.variables[var_name]['required_vars'][
                    argument]
            else:
                m = None
                for arg_template, func in self.variables[var_name][
                        "argument_fn"].items():
                    m = re.fullmatch(arg_template, argument)
                    if m is not None:
                        kwargs.update(m.groupdict())
                        cache_dir = self.variables[var_name]['cache_dirs'][
                            arg_template]
                        required_vars = self.variables[var_name][
                            'required_vars'][arg_template]
                        break
                if m is None:
                    raise ValueError('Argument "%s" not matched in Variable '
                                     '"%s".' % (argument, var_name))

            kwargs['auto_var'] = self
            named_args = inspect.getfullargspec(func)[0]
            if 'var_value' in named_args:
                kwargs['var_value'] = self.var_value
            if 'inter_var' in named_args:
                kwargs['inter_var'] = self.inter_var

            if cache_dir is not None:
                mkdir_p(cache_dir)
                #var_used = {var_name: self.var_value[var_name]}
                var_used = {var_name: argument}
                if required_vars is not None:
                    for var in required_vars:
                        if var not in self.var_value:
                            raise ValueError(
                                'Variable "%s" required by Variable '
                                '"%s" is not set.' % (var, argument))

                        var_used[var] = self.var_value[var]
                required_variables = [
                    str(v) for k, v in sorted(var_used.items())
                ]
                cache_filename = os.path.join(
                    cache_dir, '-'.join(required_variables) + '.pkl')

                if os.path.exists(cache_filename):
                    try:
                        func_outputs = joblib.load(cache_filename)
                        logger.info(
                            f"using result from cache file {cache_filename} ..."
                        )
                    except:
                        os.unlink(cache_filename)
                        func_outputs = func(*args, **kwargs)
                        logger.info(
                            f"dumping cache file to {cache_filename} ...")
                        joblib.dump(func_outputs, cache_filename)
                else:
                    func_outputs = func(*args, **kwargs)
                    logger.info(f"dumping cache file to {cache_filename} ...")
                    joblib.dump(func_outputs, cache_filename)
            else:
                func_outputs = func(*args, **kwargs)

            return func_outputs
Example #25
0
 def create_directory( self, d, name ):
     try:
         mkdir_p( d )
     except Exception, x:
         print >> sys.stderr, str(x)
         raise GlobalConfigError( 'Failed to create directory "' + name + '"' )
Example #26
0
def main(date_range=1):
    session = requests.Session()
    adapters = requests.adapters.HTTPAdapter(max_retries=3)
    session.mount("http://", adapters)
    session.mount("https://", adapters)

    pdf_root_dir = OUTPUT_DIR + '/pdf'
    xbrl_root_dir = OUTPUT_DIR + '/xbrl'
    mkdir_p(OUTPUT_DIR)
    mkdir_p(pdf_root_dir)
    mkdir_p(xbrl_root_dir)
    dbname = '%s/sqlite3.db' % (OUTPUT_DIR)
    with closing(sqlite3.connect(dbname)) as conn:
        cur = conn.cursor()
        query = '''
        CREATE TABLE IF NOT EXISTS td_net (
            id INTEGER PRIMARY KEY AUTOINCREMENT, 
            date DATETIME, 
            item_id text, 
            stock_code text, 
            stock_code_long text, 
            company_name text, 
            title text,
            content text,
            xbrl text,
            pdf text,
            security text,
            refresh_info text
        );
        '''
        cur.execute(query)

        now = datetime.datetime.now()
        start_date = datetime.datetime(now.year, now.month, now.day)
        current_date = start_date
        for i in range(date_range):
            current_date -= datetime.timedelta(days=1)
            logging.info('Collecting data on %s' % (current_date))
            datestring = current_date.strftime('%Y%m%d')
            result = session.get(
                'https://www.release.tdnet.info/inbs/I_list_001_%s.html' %
                (datestring))
            time.sleep(0.5)
            soup = BeautifulSoup(result.content, 'lxml')
            if soup.find(id='pager-box-top'):
                page_size = len(
                    soup.find(id='pager-box-top').find_all('div')) - 3
                pdf_dir = pdf_root_dir + '/%s' % (datestring)
                xbrl_dir = xbrl_root_dir + '/%s' % (datestring)
                mkdir_p(pdf_dir)
                mkdir_p(xbrl_dir)
                record_data(session, cur, conn, current_date, pdf_dir,
                            xbrl_dir, soup)
                for a in range(page_size - 1):
                    result = session.get(
                        'https://www.release.tdnet.info/inbs/I_list_0%02d_%s.html'
                        % (a + 2, datestring))
                    time.sleep(0.5)
                    soup = BeautifulSoup(result.content, 'lxml')
                    record_data(session, cur, conn, current_date, pdf_dir,
                                xbrl_dir, soup)
    BaseObject("phi3",xmin=-np.pi,xmax=np.pi,binwidth=np.pi/20,xlabel="#phi_{3}"+" [radians]",ylabel=str("%.5f" % (np.pi/20))+" [radians]",xminrange=-3.5,xmaxrange=3.5,ymaxrange=0.04),
    BaseObject("phi4",xmin=-np.pi,xmax=np.pi,binwidth=np.pi/20,xlabel="#phi_{4}"+" [radians]",ylabel=str("%.5f" % (np.pi/20))+" [radians]",xminrange=-3.5,xmaxrange=3.5,ymaxrange=0.04),
    BaseObject("phi5",xmin=-np.pi,xmax=np.pi,binwidth=np.pi/20,xlabel="#phi_{5}"+" [radians]",ylabel=str("%.5f" % (np.pi/20))+" [radians]",xminrange=-3.5,xmaxrange=3.5,ymaxrange=0.04),
    BaseObject("phi6",xmin=-np.pi,xmax=np.pi,binwidth=np.pi/20,xlabel="#phi_{6}"+" [radians]",ylabel=str("%.5f" % (np.pi/20))+" [radians]",xminrange=-3.5,xmaxrange=3.5,ymaxrange=0.04),
    BaseObject("pto1",xmin=0.0,xmax=120.0,binwidth=1.0,xlabel="p_{T,1} [GeV]",ylabel=str("%.5f" % 1.0)+" [GeV]",xminrange=0.0,xmaxrange=120.0,ymaxrange=0.1),
    BaseObject("pto2",xmin=0.0,xmax=120.0,binwidth=1.0,xlabel="p_{T,2} [GeV]",ylabel=str("%.5f" % 1.0)+" [GeV]",xminrange=0.0,xmaxrange=120.0,ymaxrange=0.1),
    BaseObject("pto3",xmin=0.0,xmax=120.0,binwidth=1.0,xlabel="p_{T,3} [GeV]",ylabel=str("%.5f" % 1.0)+" [GeV]",xminrange=0.0,xmaxrange=120.0,ymaxrange=0.1),
    BaseObject("pto4",xmin=0.0,xmax=120.0,binwidth=1.0,xlabel="p_{T,4} [GeV]",ylabel=str("%.5f" % 1.0)+" [GeV]",xminrange=0.0,xmaxrange=120.0,ymaxrange=0.1),
    BaseObject("costheta1",xmin=-1.0,xmax=1.0,binwidth=0.05,xlabel="cos(#theta_{1})",ylabel=str("%.5f" % 0.05),xminrange=-1.2,xmaxrange=1.2,ymaxrange=0.05),
    BaseObject("costheta2",xmin=-1.0,xmax=1.0,binwidth=0.05,xlabel="cos(#theta_{2})",ylabel=str("%.5f" % 0.05),xminrange=-1.2,xmaxrange=1.2,ymaxrange=0.05),
    BaseObject("costhetastar",xmin=-1.0,xmax=1.0,binwidth=0.05,xlabel="cos(#theta_{*})",ylabel=str("%.5f" % 0.05),xminrange=-1.2,xmaxrange=1.2,ymaxrange=0.05),
    BaseObject("phi",xmin=-np.pi,xmax=np.pi,binwidth=np.pi/20,xlabel="#phi"+" [radians]",ylabel=str("%.5f" % (np.pi/20))+" [radians]",xminrange=-3.5,xmaxrange=3.5,ymaxrange=0.04),
    BaseObject("phi1",xmin=-np.pi,xmax=np.pi,binwidth=np.pi/20,xlabel="#phi_{1}"+" [radians]",ylabel=str("%.5f" % (np.pi/20))+" [radians]",xminrange=-3.5,xmaxrange=3.5,ymaxrange=0.04),
    ]

mkdir_p(outPlotDir)
if extratitle != "": 
    extratitle = ", " + extratitle

f1 = TFile(inputFilePath1,"READ")
f2 = TFile(inputFilePath2,"READ")
t1 = f1.Get("lheEvents_tchan")
t2 = f2.Get("lheEvents_tchan")
for p in plotlist:
    kinem = p.name
    xmin = p.xmin
    xmax = p.xmax
    binwidth = p.binwidth
    xlabel = p.xlabel
    ylabel = p.ylabel
    xminrange = p.xmin
Example #28
0
 def __init__(self, pickle_dir):
     mkdir_p(pickle_dir)
     self.pickle_dir = pickle_dir
Example #29
0
def download_data(url, output_path):
    mkdir_p(dirname(output_path))
    os.system("wget {} -O {}".format(url, output_path))
Example #30
0
TARGET_DIR_TST = "/tmp2/RestrictedImageNet/val/"

ds = ImageNet("/tmp2/ImageNet")
meta = torch.load("/tmp2/ImageNet/meta.bin")

inv_map = {v: k for k, v in meta[0].items()}

class_dict = {
    "dog": (151, 269),
    "cat": (281, 286),
    "frog": (30, 33),
    "turtle": (33, 38),
    "bird": (80, 101),
    "primate": (365, 383),
    "fish": (389, 398),
    "crab": (118, 122),
    "insect": (300, 320),
}

for class_name, idx in class_dict.items():
    mkdir_p(join(TARGET_DIR_TRN, class_name))
    mkdir_p(join(TARGET_DIR_TST, class_name))
    for k in ds.classes[idx[0]:idx[1]]:
        fn = inv_map[k]
        os.system(
            f"cp -rf {join(IMGNET_DIR_TRN, fn)}/* {join(TARGET_DIR_TRN, class_name)}"
        )
        os.system(
            f"cp -rf {join(IMGNET_DIR_TST, fn)}/* {join(TARGET_DIR_TST, class_name)}"
        )
Example #31
0
 def __init__(self, hdf_path):
     hdf_dir = os.path.dirname(hdf_path)
     if hdf_dir != '':
         mkdir_p(hdf_dir)
     self.hdf_store = pd.HDFStore(hdf_path)
RESULT_DIR = "shake_results"

TOKENIZER_FILE = RESULT_DIR + "/tokenizer.pickle"
WEIGHTS_FILE = RESULT_DIR + "/shake-weights.hdf5"

if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument('data', help='Data file of (line, author, work) tuples. Use generate_data.py in data/shakespeare to generate new datasets.')
    parser.add_argument('--train', help='Run training.', action='store_true')
    parser.add_argument('--evaluate_test', help='Run evaluations on the test split.', action='store_true')
    parser.add_argument('--evaluate_val', help='Run evaluations on the val split.', action='store_true')
    parser.add_argument('--evaluate_disputed', help='Run model on disputed W.S. works.', action='store_true')
    args = parser.parse_args()

    mkdir_p(RESULT_DIR)

    if os.path.isfile(TOKENIZER_FILE):
        print("======= Loading Tokenizer =======")
        with open(TOKENIZER_FILE, 'rb') as handle:
            tokenizer, data_tuples, author_id_map, works_id_map, lines_by_author_and_work = pickle.load(handle)

    else:
        print("======= Loading Plays =======")
        print()

        with open(args.data, 'r') as data_handle:
            all_lines = [l.strip() for l in data_handle.readlines()]

        # strip "// Metadata", extract json metadata object, strip "\n // (<fields>)"
        metadata = json.loads(all_lines[1])
    def save(self, sha256sum, message, file):
        engine = create_engine('sqlite:///telescam.db')
        Base.metadata.bind = engine
        DBSession = sessionmaker(bind=engine)
        session = DBSession()
        data = self.koodous_link_existing_analysis_json(sha256sum)
        koodous_url = self.koodous_link_existing_analysis(sha256sum)
        if data == None:
            logger.debug(
                'Received empty json response at save from koodous_link_existing_analysis_json'
            )
            return False
        try:
            new_certificate = None
            try:
                new_certificate = session.query(Certificate).filter(
                    Certificate.sha1 == data['androguard']['certificate']
                    ['sha1']).first()
                logger.debug(
                    "Checking if current certificate exists in the database")
            except KeyError, e:
                logger.debug(
                    "Koodous couldn't exctract the certificate, Corrupted APK, using default certificate"
                )
                new_certificate = session.query(Certificate).filter(
                    Certificate.sha1 == '-').first()
            if new_certificate == None:
                logger.debug("Certificate didn't exist")
                new_certificate = Certificate(
                    sha1=data['androguard']['certificate']['sha1'],
                    not_before=data['androguard']['certificate']['not_before'],
                    not_after=data['androguard']['certificate']['not_after'],
                    subjectdn=data['androguard']['certificate']['subjectDN'],
                    issuerdn=data['androguard']['certificate']['issuerDN'],
                    serial=data['androguard']['certificate']['serial'])
                session.add(new_certificate)

            new_apk = session.query(APK).filter(
                APK.sha256 == data['sha256']).first()
            logger.debug("Checking if current apk exists in the database")
            if new_apk == None:
                logger.debug("apk didn't exist")
                # Save apk
                local_filename = self.FILES_DIR + message.document.file_id + '.apk'
                try:
                    logger.debug("Saving to disk")
                    mkdir_p(os.path.dirname(local_filename))
                    with open(local_filename, 'wb') as new_file:
                        new_file.write(file)
                except Exception as e:
                    logger.error('Failed to save apk to disk: %s' %
                                 local_filename,
                                 exc_info=True)
                    raise
                new_apk = APK(
                    app_name=data['androguard']['app_name'],
                    package_name=data['androguard']['package_name'],
                    version_code=data['androguard']['version_code'],
                    displayed_version=data['androguard']['displayed_version'],
                    local_package=local_filename,
                    koodous_url=koodous_url,
                    sha256=data['sha256'],
                    certificate=new_certificate)
                session.add(new_apk)

            new_submission = Submission(
                submitted_to_username=message.chat.username,
                submitted_to_title=message.chat.title,
                submitted_to_id=message.chat.id,
                forwarded_from_username=message.forward_from.username
                if message.forward_from != None else None,
                forwarded_from_firstname=message.forward_from.first_name
                if message.forward_from != None else None,
                forwarded_from_lastname=message.forward_from.last_name
                if message.forward_from != None else None,
                forwarded_from_id=message.forward_from.id
                if message.forward_from != None else None,
                submitted_by_username=message.from_user.username,
                submitted_by_firstname=message.from_user.first_name,
                submitted_by_lastname=message.from_user.last_name,
                submitted_by_id=message.from_user.id,
                message_text=message.text,
                filename=message.document.file_name,
                apk=new_apk)
            session.add(new_submission)
            logger.debug("Adding submission details to database")
            try:
                session.commit()
                logger.debug("Saved changes to database")
                return True
            except Exception as e:
                logger.error('Failed to save changes to the database',
                             exc_info=True)
                raise
def _register_experiment(auto_var, exp_name):
    mkdir_p(f"./results/{exp_name}")
    auto_var.register_experiment(f'{exp_name}', getattr(experiments, f'run_{exp_name}'),
            {'file_format': 'pickle', 'result_file_dir': f'./results/{exp_name}'})
Example #35
0
def createFolders_callback(data):
	
	params = dict(data);

	root = params.pop('moldb_root');
	prototype = params.pop('moldb_prototype');
	printOnly = params.pop('moldb_printOnly');
	project_name = params.pop('moldb_projectName');

	visited={}
	m = buildImplementation(params,prototype,visited);

	meth = [];

	for pid in sorted(visited.keys()):
		meth.append(visited[pid]);

	folder = root + '/' + m.getRef();

	relroot = '$MOLDB_PROJECTS/'+project_name+'/Methods';	
	relfolder = relroot + '/' + m.getRef();	

	print folder;

	if printOnly: return

	# mkdir
	mkdir_p(folder);

	#chain.implementation	
	chain = Chain();
	chain.fromMethods(meth,m.getHash(),project_name,False);
		
	f=open(folder + '/chain.imp','w');
	f.write( chain.toString() );
	f.close();



	#PARAMETERS
	paramStr = 'PROJECT='+project_name+'\n';
	paramStr += 'FOLDER='+relfolder+'\n'
	paramStr += 'PROTOTYPES=$MOLDB_PROJECTS/'+project_name+'/Prototypes\n';
	paramStr += paramsString(params);
	
	f=open(folder+'/PARAMETERS','w');
	f.write(paramStr);
	f.close();

	os.system('chmod +x '+folder+ '/PARAMETERS');
	

	#DEPENDENCIES
	depStr = dependenciesString(m.dependencies,relroot);
	f=open(folder+'/DEPENDENCIES','w');
	f.write(depStr);
	f.close();
	
	os.system('chmod +x '+ folder +'/DEPENDENCIES');

	#Scripts
	strPrm = '#!/bin/bash\n'
	strPrm += '# METHOD '+m.name+'\n';
	strPrm += '# ID' + str(m.getHash())+'\n';
	strPrm += '# --- Dependencies --- \n'
	strPrm += '. ./DEPENDENCIES\n'
#	strPrm += dependenciesString(m.dependencies,root);
	
	strPrm += '# --- Method Parameters ---\n'
	strPrm += '. ./PARAMETERS\n'
#	strPrm += 'PROJECT='+root+'\n'
#	strPrm += 'FOLDER=' + folder +'\n'

	strPrm += paramsString(params);	
	strPrm += '\n # ---- Script ----\n'



	for script in m.scripts:
		f=open(folder+'/'+script.name,'w');
		f.write(strPrm+script.text);
		f.close();

		os.system('chmod +x '+folder+'/'+script.name);
Example #36
0
def make_clean_dir(path):
	mkdir_p(path)
	shutil.rmtree(path) # dangerous
	mkdir_p(path)
	return
Example #37
0
                            mass = m,
                            ) 
                            for m in [4,5,7,15,20,25,30,] ],
                    color = ROOT.kBlue,
                    ),
                ]
treeName    = "lheEvents_tchan"
histName    = "acc"
n_tot_evts  = 10000.
outputPath  = "/home/lucien/public_html/Higgs/ALP/AcceptanceStudy/2019-11-26_hToZX/plot.pdf"
y_range     = [0.,1.]
selection   = "(massZ2 > %s) && (massZ2 < %s)"
width       = 0.02

# ________________________________________________________________________________________________________________________ ||
mkdir_p(os.path.dirname(outputPath))
c = ROOT.TCanvas()
leg = ROOT.TLegend(0.70,0.65,0.89,0.87)
for i,hist_cfg in enumerate(hist_cfgs):
    hist_cfg.hist = ROOT.TH1D(hist_cfg.name,"",len(hist_cfg.cfgs),-0.5,len(hist_cfg.cfgs)-0.5)
    leg.AddEntry(hist_cfg.hist,hist_cfg.name,"l")
    for ibin,cfg in enumerate(hist_cfg.cfgs):
        f = ROOT.TFile(cfg.inputPath,"READ")
        t = f.Get(treeName)
        #acc = float(t.GetEntries())/float(n_tot_evts)
        selectionStr = selection%(str(cfg.mass*(1.-width)),str(cfg.mass*(1.+width)))
        acc = float(t.GetEntries(selectionStr)/float(n_tot_evts))
        hist_cfg.hist.SetBinContent(ibin+1,acc)
        hist_cfg.hist.SetBinError(ibin+1,math.sqrt(1./float(t.GetEntries())+1./float(n_tot_evts))*acc)
        hist_cfg.hist.GetXaxis().SetBinLabel(ibin+1,cfg.x_label)
        f.Close()
Example #38
0
 def mkdir( self ):
     try:
         mkdir_p( self.dir )
     except Exception, x:
         # To Do: handle error 
         raise 
Example #39
0
# Copyright:   (c) Tiffany 2019
#-------------------------------------------------------------------------------
print("importing libraries ...")
import pandas as pd
import numpy as np
import csv
import sys
from mkdir_p import mkdir_p  # library for command mkdir -p (create folder if not existed)

iupac_code = {
    "A", "C", "G", "T", "W", "S", "M", "K", "R", "Y", "B", "D", "H", "V", "N"
}
length = sys.argv[1]

OUTPUT = "./IUPAC_input/"
mkdir_p(OUTPUT)


#read csv
def readcsv(infile):
    print("reading csv...")
    df = pd.read_csv(infile, index_col=0)
    return df


def gen_iupac_nt_input():
    for code1 in iupac_code:
        for code2 in iupac_code:
            for code3 in iupac_code:
                output_df = pd.DataFrame()
                iucode = code1 + code2 + code3