Ejemplo n.º 1
0
def tempdir() -> str:
    with tempfile.TemporaryDirectory() as name:
        yield name
Ejemplo n.º 2
0
        os.mkdir(workerrec['dbfarm'])
        workerrec['proc'] = process.server(mapiport=workerrec['port'], dbname=workerrec['dbname'], dbfarm=workerrec['dbfarm'], stdin=process.PIPE, stdout=process.PIPE)
        workerrec['conn'] = pymonetdb.connect(database=workerrec['dbname'], port=workerport, autocommit=True)
        filename = fn_template.format(workerrec['num'])
        t = threading.Thread(target=worker_load, args=[filename, workerrec, cmovies, ratings_table_def_fk])
        t.start()
        workerrec['loadthread'] = t

    for wrec in workers:
        wrec['loadthread'].join()

# Start supervisor database
supervisorport = freeport()
supervisorproc = None
workers = []
with tempfile.TemporaryDirectory() as TMPDIR:
    os.mkdir(os.path.join(TMPDIR, "supervisor"))
    with process.server(mapiport=supervisorport, dbname="supervisor", dbfarm=os.path.join(TMPDIR, "supervisor"), stdin=process.PIPE, stdout=process.PIPE) as supervisorproc:
        supervisorconn = pymonetdb.connect(database='supervisor', port=supervisorport, autocommit=True)
        supervisor_uri = "mapi:monetdb://localhost:{}/supervisor".format(supervisorport)
        c = supervisorconn.cursor()

        # Create the movies table and load the data
        movies_filename=os.getenv("TSTDATAPATH")+"/netflix_data/movies.csv"
        movies_create = "CREATE TABLE movies {}".format(MOVIES_TABLE_DEF)
        c.execute(movies_create)
        load_movies = "COPY INTO movies FROM '{}' USING DELIMITERS ',','\n','\"'".format(movies_filename)
        c.execute(load_movies)

        # Declare the ratings merge table on supervisor
        mtable = "CREATE MERGE TABLE ratings {}".format(RATINGS_TABLE_DEF)
Ejemplo n.º 3
0
    c.execute(screateq)
    c.execute(sloadq)

    rtable = repltable + workerrec['tpf']
    rcreateq = 'create table %s %s' % (rtable, replicatedtabledef)
    rloadq = 'copy into %s from \'%s\'' % (rtable, workerrec['repldata'].replace('\\', '\\\\'))
    c.execute(rcreateq)
    c.execute(rloadq)

ssbmpath = os.path.join(os.environ['TSTSRCBASE'], 'sql', 'benchmarks', 'ssbm', 'Tests')
ssbmdatapath = os.path.join(ssbmpath, 'SF-0.01')

masterport = freeport()
masterproc = None
workers = []
with tempfile.TemporaryDirectory() as tmpdir:
    os.mkdir(os.path.join(tmpdir, 'master'))
    with process.server(mapiport=masterport, dbname="master", dbfarm=os.path.join(tmpdir, 'master'), stdin = process.PIPE, stdout = process.PIPE) as masterproc:
        masterconn = pymonetdb.connect(database='', port=masterport, autocommit=True)

        # split lineorder table into one file for each worker
        # this is as portable as an anvil
        lineordertbl = os.path.join(ssbmdatapath, 'lineorder.tbl')
        lineorderdir = os.path.join(tmpdir, 'lineorder')
        if os.path.exists(lineorderdir):
            import shutil
            shutil.rmtree(lineorderdir)
        if not os.path.exists(lineorderdir):
            os.makedirs(lineorderdir)
        inputData = open(lineordertbl, 'r').read().split('\n')
        linesperslice = len(inputData) // nworkers + 1
Ejemplo n.º 4
0
    def __init__(self):
        self.logger = logging.getLogger(LB_Processor.__name__)

        self._log_dir = tempfile.TemporaryDirectory(prefix='lb.processor')
        self._log_dir_name = self._log_dir.name
Ejemplo n.º 5
0
def certificates_tempdir():
    custom_filepath = '/tmp/nucypher-test-certificates-'
    cert_tmpdir = tempfile.TemporaryDirectory(prefix=custom_filepath)
    yield cert_tmpdir.name
    cert_tmpdir.cleanup()
Ejemplo n.º 6
0
 def setUp(self):
     self.dir = tempfile.TemporaryDirectory()
 def setUp(self):
     self._directory = tempfile.TemporaryDirectory()
     self.directory = Path(self._directory.name).resolve(strict=True).absolute()
     self.file = self.directory / 'test'
     self.file.touch()
Ejemplo n.º 8
0
def get_plugin(cuda_file):
    cuda_file_base = os.path.basename(cuda_file)
    cuda_file_name, cuda_file_ext = os.path.splitext(cuda_file_base)

    # Already in cache?
    if cuda_file in _plugin_cache:
        return _plugin_cache[cuda_file]

    # Setup plugin.
    if verbose:
        print('Setting up TensorFlow plugin "%s": ' % cuda_file_base, end='', flush=True)
    try:
        # Hash CUDA source.
        md5 = hashlib.md5()
        with open(cuda_file, 'rb') as f:
            md5.update(f.read())
        md5.update(b'\n')

        # Hash headers included by the CUDA code by running it through the preprocessor.
        if not do_not_hash_included_headers:
            if verbose:
                print('Preprocessing... ', end='', flush=True)
            with tempfile.TemporaryDirectory() as tmp_dir:
                tmp_file = os.path.join(tmp_dir, cuda_file_name + '_tmp' + cuda_file_ext)
                _run_cmd(_prepare_nvcc_cli('"%s" --preprocess -o "%s" --keep --keep-dir "%s"' % (cuda_file, tmp_file, tmp_dir)))
                with open(tmp_file, 'rb') as f:
                    bad_file_str = ('"' + cuda_file.replace('\\', '/') + '"').encode('utf-8') # __FILE__ in error check macros
                    good_file_str = ('"' + cuda_file_base + '"').encode('utf-8')
                    for ln in f:
                        if not ln.startswith(b'# ') and not ln.startswith(b'#line '): # ignore line number pragmas
                            ln = ln.replace(bad_file_str, good_file_str)
                            md5.update(ln)
                    md5.update(b'\n')

        # Select compiler options.
        compile_opts = ''
        if os.name == 'nt':
            compile_opts += '"%s"' % os.path.join(tf.sysconfig.get_lib(), 'python', '_pywrap_tensorflow_internal.lib')
        elif os.name == 'posix':
            compile_opts += '"%s"' % os.path.join(tf.sysconfig.get_lib(), 'python', '_pywrap_tensorflow_internal.so')
            compile_opts += ' --compiler-options \'-fPIC -D_GLIBCXX_USE_CXX11_ABI=0\''
        else:
            assert False # not Windows or Linux, w00t?
        compile_opts += ' --gpu-architecture=%s' % _get_cuda_gpu_arch_string()
        compile_opts += ' --use_fast_math'
        nvcc_cmd = _prepare_nvcc_cli(compile_opts)

        # Hash build configuration.
        md5.update(('nvcc_cmd: ' + nvcc_cmd).encode('utf-8') + b'\n')
        md5.update(('tf.VERSION: ' + tf.VERSION).encode('utf-8') + b'\n')
        md5.update(('cuda_cache_version_tag: ' + cuda_cache_version_tag).encode('utf-8') + b'\n')

        # Compile if not already compiled.
        bin_file_ext = '.dll' if os.name == 'nt' else '.so'
        bin_file = os.path.join(cuda_cache_path, cuda_file_name + '_' + md5.hexdigest() + bin_file_ext)
        if not os.path.isfile(bin_file):
            if verbose:
                print('Compiling... ', end='', flush=True)
            with tempfile.TemporaryDirectory() as tmp_dir:
                tmp_file = os.path.join(tmp_dir, cuda_file_name + '_tmp' + bin_file_ext)
                _run_cmd(nvcc_cmd + ' "%s" --shared -o "%s" --keep --keep-dir "%s"' % (cuda_file, tmp_file, tmp_dir))
                os.makedirs(cuda_cache_path, exist_ok=True)
                intermediate_file = os.path.join(cuda_cache_path, cuda_file_name + '_' + uuid.uuid4().hex + '_tmp' + bin_file_ext)
                shutil.copyfile(tmp_file, intermediate_file)
                os.rename(intermediate_file, bin_file) # atomic

        # Load.
        if verbose:
            print('Loading... ', end='', flush=True)
        plugin = tf.load_op_library(bin_file)

        # Add to cache.
        _plugin_cache[cuda_file] = plugin
        if verbose:
            print('Done.', flush=True)
        return plugin

    except:
        if verbose:
            print('Failed!', flush=True)
        raise
Ejemplo n.º 9
0
 def setUp(self):
     self.tmp_test = tempfile.TemporaryDirectory()
Ejemplo n.º 10
0
 def test_workflow_loading_headless(self):
     with tempfile.TemporaryDirectory() as temp_dir:
         for wf in self.workflow_list:
             yield self.start_workflow_load_project_headless, wf, temp_dir
Ejemplo n.º 11
0
def test_minimal_save():
    signal = Signal1D([0, 1])
    with tempfile.TemporaryDirectory() as tmp:
        signal.save(os.path.join(tmp, 'testfile.emd'))
Ejemplo n.º 12
0
temp = tempfile.TemporaryFile()
print(temp)
print(temp.name)

temp = tempfile.NamedTemporaryFile()
print(temp)
print(temp.name)

temp = tempfile.NamedTemporaryFile(prefix='pre_', suffix='_suf')
print(temp.name)

temp = tempfile.TemporaryFile()
temp.write(b'foo bar')
temp.seek(0)
print(temp.read())
temp.close()

temp_dir = tempfile.TemporaryDirectory()
print(temp_dir)

secure_temp = tempfile.mkstemp(prefix="pre_", suffix="_suf")
print(secure_temp)

tempfile.tempdir = "/temp"
print(tempfile.gettempdir())

with tempfile.TemporaryFile() as fp:
    fp.write(b'Hello world!')
    fp.seek(0)
    fp.read()
Ejemplo n.º 13
0
 def test_hub_dir(self):
     with tempfile.TemporaryDirectory('hub_dir') as dirname:
         torch.hub.set_dir(dirname)
         self.assertEqual(torch.hub.get_dir(), dirname)
Ejemplo n.º 14
0
 def wrapper(*args, **kwargs):
     with tempfile.TemporaryDirectory(prefix='htx_') as temp_dir:
         return func(temp_dir=temp_dir, *args, **kwargs)
Ejemplo n.º 15
0
    def test_state_with_id_handler(self):
        num_cells = 20
        model_type = pt_gs_k.PTGSKModel
        model = self.build_model(model_type, pt_gs_k.PTGSKParameter, num_cells,
                                 2)
        cids_unspecified = api.IntVector()
        cids_1 = api.IntVector([1])
        cids_2 = api.IntVector([2])

        model_state_12 = model.state.extract_state(
            cids_unspecified)  # this is how to get all states from model
        model_state_1 = model.state.extract_state(
            cids_1)  # this is how to get only specified states from model
        model_state_2 = model.state.extract_state(cids_2)
        self.assertEqual(
            len(model_state_1) + len(model_state_2), len(model_state_12))
        self.assertGreater(len(model_state_1), 0)
        self.assertGreater(len(model_state_2), 0)
        for i in range(
                len(model_state_1)):  # verify selective extract catchment 1
            self.assertEqual(model_state_1[i].id.cid, 1)
        for i in range(
                len(model_state_2)):  # verify selective extract catchment 2
            self.assertEqual(model_state_2[i].id.cid, 2)
        for i in range(len(model_state_12)):
            model_state_12[i].state.kirchner.q = 100 + i
        model.state.apply_state(
            model_state_12,
            cids_unspecified)  # this is how to put all states into  model
        ms_12 = model.state.extract_state(cids_unspecified)
        for i in range(len(ms_12)):
            self.assertAlmostEqual(ms_12[i].state.kirchner.q, 100 + i)
        for i in range(len(model_state_2)):
            model_state_2[i].state.kirchner.q = 200 + i
        unapplied = model.state.apply_state(
            model_state_2,
            cids_2)  # this is how to put a limited set of state into model
        self.assertEqual(len(unapplied), 0)
        ms_12 = model.state.extract_state(cids_unspecified)
        for i in range(len(ms_12)):
            if ms_12[i].id.cid == 1:
                self.assertAlmostEqual(ms_12[i].state.kirchner.q, 100 + i)

        ms_2 = model.state.extract_state(cids_2)
        for i in range(len(ms_2)):
            self.assertAlmostEqual(ms_2[i].state.kirchner.q, 200 + i)

        # serialization support, to and from bytes

        bytes = ms_2.serialize_to_bytes(
        )  # first make some bytes out of the state
        with tempfile.TemporaryDirectory() as tmpdirname:
            file_path = str(path.join(tmpdirname, "pt_gs_k_state_test.bin"))
            api.byte_vector_to_file(file_path, bytes)  # stash it into a file
            bytes = api.byte_vector_from_file(
                file_path)  # get it back from the file and into ByteVector
        ms_2x = pt_gs_k.deserialize_from_bytes(
            bytes)  # then restore it from bytes to a StateWithIdVector

        self.assertIsNotNone(ms_2x)
        for i in range(len(ms_2x)):
            self.assertAlmostEqual(ms_2x[i].state.kirchner.q, 200 + i)
Ejemplo n.º 16
0
 def setUp(self):
     self._tempdir = tempfile.TemporaryDirectory()
     self._temp_filepath = path.join(self._tempdir.name, '000.jpg.kpt')
Ejemplo n.º 17
0
 def test_tempdirectory(self):
     '''Fake TemporaryDirectory class is bound'''
     with tempfile.TemporaryDirectory() as td:
         with open('%s/fake_file.txt' % td, 'w') as f:
             self.assertTrue(self.fs.Exists(td))
Ejemplo n.º 18
0
 def setUp(self):
     self._tempdir = tempfile.TemporaryDirectory()
     self._samples_dirpath = path.abspath(
         path.join(path.dirname(__file__), '..', 'samples', 'maupertuis'))
     self._kapture_dirpath = path.join(self._samples_dirpath, 'kapture')
Ejemplo n.º 19
0
def build(l4t_path, public_sources,
          # todo: xconfig=None,
          arch=ARCH,
          cross_prefix=tegrity.toolchain.get_cross_prefix(),
          load_kconfig=None,
          localversion=None,
          menuconfig=None,
          module_archive=None,
          public_sources_sha512=None,
          save_kconfig=None,
          ):

    logger.info("Preparing to build kernel")

    # set some envs
    os.environ['CROSS_COMPILE'] = cross_prefix
    logger.debug(f'CROSS_COMPILE: {cross_prefix}')
    localversion = localversion if localversion else DEFAULT_LOCALVERSION
    os.environ['LOCALVERSION'] = localversion
    logger.debug(f'LOCALVERSION: {localversion}')

    # set up some initial paths
    logger.debug(f"Linux_for_Tegra path: {l4t_path}")
    l4t_kernel_path = tegrity.utils.join_and_check(l4t_path, "kernel")
    logger.debug(f"L4T kernel path: {l4t_kernel_path}")

    # create a temporary folder that self destructs at the end of the context.
    with tempfile.TemporaryDirectory() as tmp:

        # set up a temporary rootfs folder instead of a real one just to create
        # the kernel_supplements which will be installed by apply_binaries.sh
        rootfs = os.path.join(tmp, 'rootfs')
        logger.debug(f"creating temporary rootfs at: {rootfs}")
        os.makedirs(rootfs, 0o755)

        # Obtaining the Kernel Sources
        _get_source(tmp, public_sources, public_sources_sha512)

        # Building the kernel

        # 1. set kernel out path
        kernel_out = os.path.join(tmp, "kernel_out")

        # 2.5 set common make arguments
        make_common = [f"ARCH={arch}", f"O={kernel_out}"]

        # 3. Create the initial config
        config(make_common, kernel_out, load_kconfig)
        os.chdir(kernel_out)

        # 3.5 Customize initial configuration interactively (optional)
        if menuconfig:
            make_menuconfig(make_common)

        # 4 Build the kernel and all modules
        make_kernel(make_common)

        # 5 Backup and replace old kernel with new kernel
        replace_kernel(kernel_out, l4t_kernel_path)

        # 6 Replace dtb folder with dts folder
        replace_dtb(kernel_out, l4t_kernel_path)

        # 7 Install kernel modules
        make_modules_install(make_common, rootfs)

        # 8 Archive modules
        archive_modules(rootfs, module_archive, l4t_kernel_path)

        # 8.5 Archive config
        archive_kconfig(kernel_out, save_kconfig)
Ejemplo n.º 20
0
def test_invalid_project_config_hooks():
    with tempfile.TemporaryDirectory() as tmpdir:
        config = {"foo": {HOOKS_PROPERTY: {"pre": "nonexistentfile.sh"}}}
        assert not check_project_configuration(config, hookdir=tmpdir)
 def setUp(self):
     self._tempdir = tempfile.TemporaryDirectory()
     self.tempdir = Path(self._tempdir.name).resolve(strict=True).absolute()
     self.existing_file = self.ensure_file(self.tempdir / 'test.py')
     self.nonexistent_file = (self.tempdir / 'does_not_exist.py').absolute()
     self.reloader = self.RELOADER_CLS()
Ejemplo n.º 22
0
def test_invalid_project_config_hooknames():
    with tempfile.TemporaryDirectory() as tmpdir:
        config = {"foo": {HOOKS_PROPERTY: {"blah": "value"}}}
        assert not check_project_configuration(config, hookdir=tmpdir)
Ejemplo n.º 23
0
def temp_dir_path():
    temp_dir = tempfile.TemporaryDirectory(prefix='nucypher-test-')
    yield temp_dir.name
    temp_dir.cleanup()
Ejemplo n.º 24
0
def test_invalid_project_config_nonexec_hook():
    with tempfile.TemporaryDirectory() as tmpdir:
        with open(os.path.join(tmpdir, "foo.sh"), 'w+') as tmpfile:
            tmpfile.write("foo\n")
            config = {"foo": {HOOKS_PROPERTY: {"pre": "foo.sh"}}}
            assert not check_project_configuration(config, hookdir=tmpdir)
    def test_pt_tf_model_equivalence(self):
        if not is_torch_available():
            return

        import torch
        import transformers

        config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()

        for model_class in self.all_model_classes:
            pt_model_class_name = model_class.__name__[2:]  # Skip the "TF" at the beggining
            pt_model_class = getattr(transformers, pt_model_class_name)

            config.output_hidden_states = True
            tf_model = model_class(config)
            pt_model = pt_model_class(config)

            # Check we can load pt model in tf and vice-versa with model => model functions
            tf_model = transformers.load_pytorch_model_in_tf2_model(tf_model, pt_model, tf_inputs=inputs_dict)
            pt_model = transformers.load_tf2_model_in_pytorch_model(pt_model, tf_model)

            # Check predictions on first output (logits/hidden-states) are close enought given low-level computational differences
            pt_model.eval()
            pt_inputs_dict = dict(
                (name, torch.from_numpy(key.numpy()).to(torch.long)) for name, key in inputs_dict.items()
            )
            with torch.no_grad():
                pto = pt_model(**pt_inputs_dict)
            tfo = tf_model(inputs_dict, training=False)
            tf_hidden_states = tfo[0].numpy()
            pt_hidden_states = pto[0].numpy()

            pt_hidden_states[np.isnan(tf_hidden_states)] = 0
            tf_hidden_states[np.isnan(tf_hidden_states)] = 0
            pt_hidden_states[np.isnan(pt_hidden_states)] = 0
            tf_hidden_states[np.isnan(pt_hidden_states)] = 0

            max_diff = np.amax(np.abs(tf_hidden_states - pt_hidden_states))
            # Debug info (remove when fixed)
            if max_diff >= 2e-2:
                print("===")
                print(model_class)
                print(config)
                print(inputs_dict)
                print(pt_inputs_dict)
            self.assertLessEqual(max_diff, 2e-2)

            # Check we can load pt model in tf and vice-versa with checkpoint => model functions
            with tempfile.TemporaryDirectory() as tmpdirname:
                pt_checkpoint_path = os.path.join(tmpdirname, "pt_model.bin")
                torch.save(pt_model.state_dict(), pt_checkpoint_path)
                tf_model = transformers.load_pytorch_checkpoint_in_tf2_model(tf_model, pt_checkpoint_path)

                tf_checkpoint_path = os.path.join(tmpdirname, "tf_model.h5")
                tf_model.save_weights(tf_checkpoint_path)
                pt_model = transformers.load_tf2_checkpoint_in_pytorch_model(pt_model, tf_checkpoint_path)

            # Check predictions on first output (logits/hidden-states) are close enought given low-level computational differences
            pt_model.eval()
            pt_inputs_dict = dict(
                (name, torch.from_numpy(key.numpy()).to(torch.long)) for name, key in inputs_dict.items()
            )
            with torch.no_grad():
                pto = pt_model(**pt_inputs_dict)
            tfo = tf_model(inputs_dict)
            tfo = tfo[0].numpy()
            pto = pto[0].numpy()
            tfo[np.isnan(tfo)] = 0
            pto[np.isnan(pto)] = 0
            max_diff = np.amax(np.abs(tfo - pto))
            self.assertLessEqual(max_diff, 2e-2)
Ejemplo n.º 26
0
def get_positives_DE(run_date) -> pandas.DataFrame:
    """ Retrieves table of positives & deaths for all German regions.

    Parameters
    ----------
    run_date : pandas.Timestamp
        use the data as it was release on that day

    Returns
    -------
    result : pandas.DataFrame
        [region, date]-indexed table that has rows for every region & date combination in [2020-03-01, run_date - 1]
        contains columns "positive" and "deaths" that are the number of NEW positives/deaths for each day/region
        "all" region is the sum over all states.
    """
    date_str = run_date.strftime('%Y-%m-%d')
    with tempfile.TemporaryDirectory() as td:
        fp_tempfile = pathlib.Path(td, 'data_arcgis.csv')
        if run_date.date() < datetime.datetime.utcnow().date():
            release_id = (run_date + pandas.DateOffset(1)).strftime('%Y-%m-%d')
            release_url = f'https://github.com/ihucos/rki-covid19-data/releases/download/{release_id}/data.csv'
            # For explanations of the columns, see https://www.arcgis.com/home/item.html?id=f10774f1c63e40168479a1feb6c7ca74
            # the CSV is automatically released at 01 AM Berlin local time, but has one day offset to the RKI data
            _log.info('Downloading German data from %s', release_url)
            with open(fp_tempfile, 'wb') as file:
                file.write(requests.get(release_url).content)
            encoding = 'utf-8'       
        else:
            _log.info('Downloading RKI COVID-19 dataset from ArcGIS')
            from arcgis.gis import GIS
            anon_gis = GIS()
            features = anon_gis.content.get('dd4580c810204019a7b8eb3e0b329dd6').tables[0].query()
            features.save(save_location=td, out_name='download.csv')
            shutil.copy2(os.path.join(td, 'download.csv'), fp_tempfile)
            encoding = 'unicode_escape'  if os.name == 'nt' else 'utf-8'
        if CSV_SAVEPATH:
            shutil.copy2(fp_tempfile, CSV_SAVEPATH)  
        df = pandas.read_csv(
            fp_tempfile,
            usecols=['Bundesland', 'Meldedatum', 'Datenstand', 'AnzahlFall', 'AnzahlTodesfall'],
            encoding=encoding
        )
    _log.info('Data was loaded for the following regions: %s', df.Bundesland.unique())
    df.Meldedatum = pandas.to_datetime(df.Meldedatum, unit='ms')
    assert len(set(df.Datenstand)) == 1
    datenstand = df.Datenstand[0]
    assert run_date.strftime('%d.%m.%Y') in df.Datenstand[0]

    # transform to multi-indexed dataframe with required columns
    _log.info('Transforming to multi-indexed dataframe')
    df_sparse = df.rename(columns={
        'Meldedatum': 'date',
        'Bundesland': 'region',
        'AnzahlFall': 'new_cases',
        'AnzahlTodesfall': 'new_deaths',
    }).replace(DE_REGION_CODES).groupby(['region', 'date']).sum().sort_index()

    # make sure that the result has rows for every region/date combination.
    _log.info('Inserting 0-counts for missing dates')
    full_index = pandas.date_range(
        '2020-03-01',
        run_date - pandas.DateOffset(2)
        # ToDo: use max(run_date-2, date in data)
        #max(run_date - pandas.DateOffset(2), 
    )
    df_full = pandas.concat({
        region : df_sparse.xs(region).reindex(full_index, fill_value=0)
        for region in DE_REGIONS
        if region != 'all'
    }, names=['region', 'date'])

    # add region "all" that is the sum over all states
    df_all = df_full.sum(level='date')
    df_all.insert(0, column='region', value='all')
    df_all = df_all.reset_index().set_index(['region', 'date'])
    df_merged = pandas.concat([df_full, df_all]).sort_index()

    return df_merged
Ejemplo n.º 27
0
    def tcp_listen_python3(self, listen_ssl=False):
        """Open a TCP socket on a given port and print incoming
        data to stdout."""
        class DeenTcpHandler(socketserver.StreamRequestHandler):
            def handle(self):
                self.data = self.rfile.readline().strip()
                print(self.data.decode())
        class TcpServerSsl(socketserver.TCPServer):
            def __init__(self,
                         server_address,
                         RequestHandlerClass,
                         certfile,
                         keyfile,
                         ssl_version=ssl.PROTOCOL_TLS_SERVER,
                         bind_and_activate=True):
                socketserver.TCPServer.__init__(self, server_address,
                                                RequestHandlerClass,
                                                bind_and_activate)
                self.certfile = certfile
                self.keyfile = keyfile
                self.ssl_version = ssl_version

            def get_request(self):
                newsocket, fromaddr = self.socket.accept()
                connstream = ssl.wrap_socket(newsocket,
                                             server_side=True,
                                             certfile=self.certfile,
                                             keyfile=self.keyfile,
                                             ssl_version=self.ssl_version)
                return connstream, fromaddr
        server = None
        if listen_ssl:
            # The certificate chain and private key need to
            # be available as actual files that can be opened
            # with fopen(3).
            with tempfile.TemporaryDirectory() as tmpdirname:
                cert_chain = tmpdirname + '/cert_chain.pem'
                server_key = tmpdirname + '/server_key.pem'
                with open(cert_chain, 'wb') as f:
                    f.write(self.server_cert_pem)
                with open(server_key, 'wb') as f:
                    f.write(self.server_key_pem)
                try:
                    server = TcpServerSsl(self.listen_socket,
                                          DeenTcpHandler,
                                          cert_chain,
                                          server_key)
                except OSError as e:
                    self.error = e
                    self.log.error(self.error)
                    self.log.debug(self.error, exc_info=True)
                    return

        else:
            try:
                server = socketserver.TCPServer(self.listen_socket,
                                                DeenTcpHandler)
            except OSError as e:
                self.error = e
                self.log.error(self.error)
                self.log.debug(self.error, exc_info=True)
                return
        message = 'Listening on TCP port ' + str(self.listen_port)
        if listen_ssl:
            message += ' (SSL)'
        print(message)
        try:
            server.serve_forever()
        except KeyboardInterrupt:
            pass
Ejemplo n.º 28
0
import unittest
import tempfile

from dicomml.cases.case import DicommlCase

from tests import sample_case_config


class TestDicommlCase(unittest.TestCase):

    def test_create(self):
        case = DicommlCase(**sample_case_config())
        self.assertIsInstance(case, DicommlCase)

    def test_save_load(self):
        case = DicommlCase(**sample_case_config())
        with tempfile.TemporaryDirectory() as temp_folder:
            zipfile = case.save(path=temp_folder)
            case_loaded = DicommlCase.load(zipfile)
        self.assertEqual(case, case_loaded)

    def test_export(self):
        case = DicommlCase(**sample_case_config())
        exports = case.export()
        self.assertCountEqual(
            list(exports.keys()),
            ['images', 'truth'])
        self.assertEqual(exports['images'].shape, (1, 10, 120, 120))
def main():
    if sys.platform.startswith("linux"):
        os_ = "linux"
    elif sys.platform == "darwin":
        os_ = "macos"
    elif sys.platform == "win32":
        os_ = "windows"
    else:
        raise NotImplementedError(f"sys.platform '{sys.platform}' is not supported yet.")

    p = argparse.ArgumentParser(description="Convert wheel to be independent of python implementation and ABI")
    p.set_defaults(prog=Path(sys.argv[0]).name)
    p.add_argument("WHEEL_FILE", help="Path to wheel file.")
    p.add_argument(
        "-w",
        "--wheel-dir",
        dest="WHEEL_DIR",
        help=('Directory to store delocated wheels (default: "wheelhouse/")'),
        default="wheelhouse/",
    )

    args = p.parse_args()

    file = Path(args.WHEEL_FILE).resolve(strict=True)
    wheelhouse = Path(args.WHEEL_DIR).resolve()
    wheelhouse.mkdir(parents=True, exist_ok=True)

    with tempfile.TemporaryDirectory() as tmpdir_:
        tmpdir = Path(tmpdir_)
        # use the platform specific repair tool first
        if os_ == "linux":
            subprocess.run(["auditwheel", "repair", "-w", str(tmpdir), str(file)], check=True, stdout=subprocess.PIPE)
        elif os_ == "macos":
            subprocess.run(
                ["delocate-wheel", "--require-archs", "x86_64,arm64", "-w", str(tmpdir), str(file)],
                check=True,
                stdout=subprocess.PIPE,
            )
        elif os_ == "windows":
            # no specific tool, just copy
            shutil.copyfile(file, tmpdir / file.name)
        files = list(tmpdir.glob("*.whl"))
        assert len(files) == 1, files
        file = files[0]

        # we need to handle macOS x86_64 & arm64 here for now, let's use additional_platforms for this.
        additional_platforms = []
        if os_ == "macos":
            # delocate-wheel --require-archs does not seem to check executables...
            with tempfile.TemporaryDirectory() as tmpdir2_:
                tmpdir2 = Path(tmpdir2_)
                with zipfile.ZipFile(file, 'r') as zip_ref:
                    zip_ref.extractall(tmpdir2)
                exe = list(tmpdir2.glob("**/bin/ninja"))
                assert len(exe) == 1, exe
                subprocess.run(["lipo", str(exe[0]), "-verify_arch", "x86_64", "arm64"], check=True, stdout=subprocess.PIPE)

            # first, get the target macOS deployment target from the wheel
            match = re.match(r"^.*-macosx_(\d+)_(\d+)_.*\.whl$", file.name)
            assert match is not None, f"Couldn't match on {file.name}"
            target = tuple(map(int, match.groups()))

            # given pip support for universal2 was added after x86_64 introduction
            # let's also add x86_64 platform.
            additional_platforms.append("macosx_{}_{}_x86_64".format(*target))

            # given pip support for universal2 was added after arm64 introduction
            # let's also add arm64 platform.
            arm64_target = target
            if arm64_target < (11, 0):
                arm64_target = (11, 0)
            additional_platforms.append("macosx_{}_{}_arm64".format(*arm64_target))

            if target < (11, 0):
                # They're were also issues with pip not picking up some universal2 wheels, tag twice
                additional_platforms.append("macosx_11_0_universal2")

        # make this a py2.py3 wheel
        convert_to_generic_platform_wheel(
            str(file),
            out_dir=str(wheelhouse),
            py2_py3=True,
            additional_platforms=additional_platforms,
        )
Ejemplo n.º 30
0
def before_scenario(context, current_scenario):
    context.temporary_directory = tempfile.TemporaryDirectory()