Ejemplo n.º 1
0
    def __init__(self, *args, **kwargs):
        if 'nmpi' in kwargs:
            self.nmpi = kwargs['nmpi']
            del kwargs['nmpi']
        else:
            self.nmpi = 1

        if 'mpicompiler' in kwargs:
            self.compiler = kwargs['mpicompiler']
            del kwargs['mpicompiler']
        else:
            self.compiler = None

        super().__init__(*args, **kwargs)

        environ_backup = dict(environ)

        self.set_compiler()
        super().build(*args)

        environ.clear()
        environ.update(environ_backup)

        self.executable = 'start.exe'
        self.create_executable(**kwargs)

        super().clean_module()
Ejemplo n.º 2
0
def source(script, update=True, clean=True):
    """
    Source variables from a shell script
    import them in the environment (if update==True)
    and report only the script variables (if clean==True)
    """
    global environ

    environ_back = None

    if clean:
        environ_back = dict(environ)
        environ.clear()

    pipe = Popen(". %s; env" % script, stdout=PIPE, shell=True)
    data = pipe.communicate()[0]

    env = dict(line.split("=", 1) for line in data.splitlines())

    if clean:
        # remove unwanted minimal vars
        env.pop('LINES', None)
        env.pop('COLUMNS', None)
        environ.update(environ_back)

    if update:
        environ.update(env)

    return env
Ejemplo n.º 3
0
    def on_context_leave(self):
        ''' Called when the context manager entered with
    :func:`magic.enter_context` is exited. Undos all of the stuff
    that :meth:`on_context_enter` did and more.

    * Stop the Craftr Runtime Server
    * Restore the :data:`os.environ` dictionary
    * Removes all ``craftr.ext.`` modules from :data:`sys.modules` and
      ensures they are in :attr:`Session.modules` (they are expected to
      be put there from the :class:`ext.CraftrImporter`).
    '''

        self._stop_server()

        # Restore the original values of os.environ.
        self.env = environ.copy()
        environ.clear()
        environ.update(self._old_environ)
        del self._old_environ

        sys.meta_path.remove(self.ext_importer)
        for key, module in list(sys.modules.items()):
            if key.startswith('craftr.ext.'):
                name = key[11:]
                assert name in self.modules and self.modules[
                    name] is module, key
                del sys.modules[key]
                try:
                    # Remove the module from the `craftr.ext` modules contents, too.
                    delattr(ext, name.split('.')[0])
                except AttributeError:
                    pass
Ejemplo n.º 4
0
    def on_context_enter(self, prev):
        ''' Called when entering the Session context with
    :func:`magic.enter_context`. Does the following things:

    * Sets up the :data`os.environ` with the values from :attr:`Session.env`
    * Adds the :attr:`Session.ext_importer` to :data:`sys.meta_path`
    * Starts the Craftr Runtime Server (:attr:`Session.server`) and sets
      the ``CRAFTR_RTS`` environment variable

    .. note:: A copy of the original :data:`os.environ` is saved and later
      restored in :meth:`on_context_leave`. The :data:`os.environ` object
      can not be replaced by another object, that is why we change its
      values in-place.
    '''

        if prev is not None:
            raise RuntimeError('session context can not be nested')

        # We can not change os.environ effectively, we must update the
        # dictionary instead.
        self._old_environ = environ.copy()
        environ.clear()
        environ.update(self.env)
        self.env = environ

        sys.meta_path.append(self.ext_importer)
Ejemplo n.º 5
0
def update_env(**kwargs):
    """Update environment and then set it back."""
    start = environ.copy()
    environ.update(**kwargs)
    yield
    environ.clear()
    environ.update(**start)
Ejemplo n.º 6
0
  def on_context_leave(self):
    ''' Called when the context manager entered with
    :func:`magic.enter_context` is exited. Undos all of the stuff
    that :meth:`on_context_enter` did and more.

    * Stop the Craftr Runtime Server
    * Restore the :data:`os.environ` dictionary
    * Removes all ``craftr.ext.`` modules from :data:`sys.modules` and
      ensures they are in :attr:`Session.modules` (they are expected to
      be put there from the :class:`ext.CraftrImporter`).
    '''

    self._stop_server()

    # Restore the original values of os.environ.
    self.env = environ.copy()
    environ.clear()
    environ.update(self._old_environ)
    del self._old_environ

    sys.meta_path.remove(self.ext_importer)
    for key, module in list(sys.modules.items()):
      if key.startswith('craftr.ext.'):
        name = key[11:]
        assert name in self.modules and self.modules[name] is module, key
        del sys.modules[key]
        try:
          # Remove the module from the `craftr.ext` modules contents, too.
          delattr(ext, name.split('.')[0])
        except AttributeError:
          pass
Ejemplo n.º 7
0
  def on_context_enter(self, prev):
    ''' Called when entering the Session context with
    :func:`magic.enter_context`. Does the following things:

    * Sets up the :data`os.environ` with the values from :attr:`Session.env`
    * Adds the :attr:`Session.ext_importer` to :data:`sys.meta_path`
    * Starts the Craftr Runtime Server (:attr:`Session.server`) and sets
      the ``CRAFTR_RTS`` environment variable

    .. note:: A copy of the original :data:`os.environ` is saved and later
      restored in :meth:`on_context_leave`. The :data:`os.environ` object
      can not be replaced by another object, that is why we change its
      values in-place.
    '''

    if prev is not None:
      raise RuntimeError('session context can not be nested')

    # We can not change os.environ effectively, we must update the
    # dictionary instead.
    self._old_environ = environ.copy()
    environ.clear()
    environ.update(self.env)
    self.env = environ

    sys.meta_path.append(self.ext_importer)
    self.update()
Ejemplo n.º 8
0
def env(**vars):
    original = dict(environ)
    environ.update(vars)
    try:
        yield
    finally:
        environ.clear()
        environ.update(original)
Ejemplo n.º 9
0
def tests_setup_and_teardown():
    original_environment = dict(environ)
    environ.update({
        "TWILIO_ACCOUNT_SID": "TEST",
        "TWILIO_AUTH_TOKEN": "TOKEN",
    })
    yield
    environ.clear()
    environ.update(original_environment)
Ejemplo n.º 10
0
 async def wrapper(*args, **kwargs):  # type: ignore
     _environ_copy = environ.copy()
     try:
         return await func(*args, **kwargs)
     finally:
         environ.clear()
         environ.update(_environ_copy)
         reload(consts)
         reload(utils)
def test_uploads_ods_metadata_when_date_anchor_month_asid_lookup_is_not_available(
):
    _disable_werkzeug_logging()

    fake_s3, fake_ods_portal, s3_client = _setup()
    fake_s3.start()
    fake_ods_portal.start()

    year = 2020
    current_month = 2
    previous_month = 1

    input_bucket = _build_fake_s3_bucket(S3_INPUT_ASID_LOOKUP_BUCKET_NAME,
                                         s3_client)
    input_asid_csv = _build_input_asid_csv()
    input_bucket.upload_fileobj(input_asid_csv,
                                f"{year}/{previous_month}/asidLookup.csv.gz")

    output_bucket = _build_fake_s3_bucket(S3_OUTPUT_ODS_METADATA_BUCKET_NAME,
                                          s3_client)

    try:
        environ["DATE_ANCHOR"] = "2020-02-27T18:44:49Z"

        main()

        output_path = f"v4/{year}/{current_month}/organisationMetadata.json"
        actual = _read_s3_json_file(output_bucket, output_path)

        assert actual["year"] == year
        assert actual["month"] == current_month
        assert actual["practices"] == EXPECTED_PRACTICES
        assert actual["ccgs"] == EXPECTED_CCGS

        expected_metadata = {
            "date-anchor": "2020-02-27T18:44:49+00:00",
            "asid-lookup-month": "2020-1",
            "build-tag": "61ad1e1c",
        }
        actual_s3_metadata = _read_s3_metadata(output_bucket, output_path)

        assert actual_s3_metadata == expected_metadata

    finally:
        input_bucket.objects.all().delete()
        input_bucket.delete()

        output_bucket.objects.all().delete()
        output_bucket.delete()

        fake_ods_portal.stop()
        fake_s3.stop()
        environ.clear()
Ejemplo n.º 12
0
def test_config_initialization():
    """Tests the config initialization."""
    cur_env = environ.copy()
    environ['IRC_ADMINS'] = 'somebody'
    config = Config()
    environ.clear()
    environ.update(cur_env)
    expected = [
        'host', 'port', 'nick', 'username', 'autojoins', 'includes',
        'database', 'irc3.plugins.command', 'irc3.plugins.command.masks'
    ]
    assert all(key in config for key in expected)
Ejemplo n.º 13
0
def search(text):
    """
    search for text in search widgets like on Problems page
    """
    tc('search ' + text)
    e = g.wait.until(EC.element_to_be_clickable((By.XPATH,\
        "//input[@name='keywords']")))
    e.clear()  #input may retain
    e.send_keys(text)
    g.wait.until(EC.element_to_be_clickable((By.XPATH,\
        "//input[@name='dosearch']"))).click()
    g.wait.until(EC.staleness_of(e))
Ejemplo n.º 14
0
def block_processor(tmpdir_factory):
    environ.clear()
    environ['DB_DIRECTORY'] = tmpdir_factory.mktemp('db',
                                                    numbered=True).strpath
    environ['DAEMON_URL'] = ''
    env = Env(LBC)
    bp = LBC.BLOCK_PROCESSOR(env, None, None)
    yield bp
    for attr in dir(bp):  # hack to close dbs on tear down
        obj = getattr(bp, attr)
        if isinstance(obj, Storage):
            obj.close()
def test_uploads_classified_transfers_given__no__start_and_end_datetimes_and_no_cutoff(
        datadir):
    fake_s3, s3_client = _setup()
    fake_s3.start()

    output_transfer_data_bucket = _build_fake_s3_bucket(
        S3_OUTPUT_TRANSFER_DATA_BUCKET_NAME, s3_client)
    input_spine_data_bucket = _build_fake_s3_bucket(
        S3_INPUT_SPINE_DATA_BUCKET_NAME, s3_client)
    input_ods_metadata_bucket = _build_fake_s3_bucket(
        S3_INPUT_ODS_METADATA_BUCKET_NAME, s3_client)

    _upload_files_to_spine_data_bucket(input_spine_data_bucket, datadir)
    _upload_files_to_ods_metadata_bucket(input_ods_metadata_bucket, datadir)

    try:
        main()

        expected_transfers_output_key = "transfers.parquet"
        expected_metadata = {
            "cutoff-days": "0",
            "build-tag": "abc456",
            "start-datetime": "2019-12-31T00:00:00+00:00",
            "end-datetime": "2020-01-01T00:00:00+00:00",
            "ods-metadata-month": "2019-12",
        }
        year = 2019
        month = 12
        day = 31

        expected_transfers = _read_parquet_columns_json(
            datadir / "expected_outputs" /
            f"{year}-{month}-{day}-transferParquet.json")

        s3_filename = f"{year}-{month}-{day}-{expected_transfers_output_key}"
        s3_output_path = f"v7/cutoff-0/{year}/{month}/{day}/{s3_filename}"

        actual_transfers = read_s3_parquet(output_transfer_data_bucket,
                                           s3_output_path)

        assert actual_transfers == expected_transfers

        actual_metadata = _read_s3_metadata(output_transfer_data_bucket,
                                            s3_output_path)

        assert actual_metadata == expected_metadata

    finally:
        output_transfer_data_bucket.objects.all().delete()
        output_transfer_data_bucket.delete()
        fake_s3.stop()
        environ.clear()
Ejemplo n.º 16
0
def activate_venv(venv_dir):
    """Set up the environment to use the provided venv (POSIX only!).

    :param Union[str, pathlib.Path] venv_dir: the virtual environment's
        root directory
    """
    start_env = environ.copy()
    environ['VIRTUAL_ENV'] = str(venv_dir)
    environ['__PYVENV_LAUNCHER__'] = '{}/bin/python'.format(venv_dir)
    with add_to_path(path.join(venv_dir, 'bin')):
        yield
    environ.clear()
    environ.update(start_env)
Ejemplo n.º 17
0
def text_convert(filepath: str) -> bool:
    if not use_text_converter:
        # passthrough
        return True

    mimetype = magic.from_file(filepath)

    if mimetype == "text/plain":
        # text is already plaintext, we're done!
        return True

    # always add an .txt-extension if it is missing
    head, ext = path.splitext(filepath)
    if ext.lower() == '.txt':
        # when unpacking a ZIP-archive, the filenames don't change
        head, ext = path.splitext(filepath[:-4])

    try:
        if ext.lower() == '.doc':
            # settings for Antiword
            old_environ = dict(environ)
            try:
                environ['ANTIWORDHOME'] = '/usr/share/antiword'
                environ['LC_ALL'] = 'nl_NL@euro IS-8859-15'
                text = textract.process(
                    filepath, extension=ext, encoding='utf_8')
            finally:
                environ.clear()
                environ.update(old_environ)
        else:
            text = textract.process(filepath, extension=ext, encoding='utf_8')
        success = True
    except Exception as error:
        text = f"Unexpected {type(error)}: {error}"
        success = False

    if type(text) == bytes:
        text = text.decode('utf-8')

    # combine consecutive newlines
    # 1 x \n = newline within paragraph
    # 2 x \n = paragraph separation
    text = re.sub(r'\n{3,}', '\n\n', text)

    # overwrite the source file, CLAM assumes the source and target
    # location are the same
    with open(filepath, 'wb') as target:
        target.write(text.encode('utf-8'))

    return success
def run_test(db_dir):
    environ.clear()
    environ['DB_DIRECTORY'] = db_dir
    environ['DAEMON_URL'] = ''
    environ['COIN'] = 'XRJV1Cash'
    env = Env()
    db = DB(env)
    # Test abstract compaction
    check_hashX_compaction(db)
    # Now test in with random data
    histories = create_histories(db)
    check_written(db, histories)
    compact_history(db)
    check_written(db, histories)
Ejemplo n.º 19
0
def run_test(db_dir):
    environ.clear()
    environ['DB_DIRECTORY'] = db_dir
    environ['DAEMON_URL'] = ''
    environ['COIN'] = 'BitcoinCash'
    env = Env()
    history = DB(env).history
    # Test abstract compaction
    check_hashX_compaction(history)
    # Now test in with random data
    histories = create_histories(history)
    check_written(history, histories)
    compact_history(history)
    check_written(history, histories)
Ejemplo n.º 20
0
def source_file(script, update=True, clean=True):
    global environ
    if clean:
        environ_back = dict(environ)
        environ.clear()
    pipe = Popen('. %s; env' % script, stdout=PIPE, shell=True)
    data = pipe.communicate()[0]
    env = dict(line.split('=', 1) for line in data.splitlines())
    if clean:
        env.pop('LINES', None)
        env.pop('COLUMNS', None)
        environ = dict(environ_back)
    if update:
        environ.update(env)
Ejemplo n.º 21
0
async def run_test(db_dir):
    environ.clear()
    environ['DB_DIRECTORY'] = db_dir
    environ['DAEMON_URL'] = ''
    environ['COIN'] = 'BitcoinSV'
    db = DB(Env())
    await db.open_for_serving()
    history = db.history

    # Test abstract compaction
    check_hashX_compaction(history)
    # Now test in with random data
    histories = create_histories(history)
    check_written(history, histories)
    compact_history(history)
    check_written(history, histories)
Ejemplo n.º 22
0
def modify_environ(values, keys_to_remove=None):
    """
    Modify the environment for a test, adding/updating values in dict `values` and
    removing any environment variables mentioned in list `keys_to_remove`.
    """
    old_environ = environ.copy()
    try:
        if values:
            environ.update(values)
        if keys_to_remove:
            for key in keys_to_remove:
                if key in environ:
                    del environ[key]
        yield
    finally:
        environ.clear()
        environ.update(old_environ)
Ejemplo n.º 23
0
def env_restored(unset = []):
    """
    Create a temporary directory, with support for cleanup.
    """

    # preserve the current environment
    from os import environ

    old = environ.copy()

    # arbitrarily modify it
    for name in unset:
        del environ[name]

    yield

    # then restore the preserved copy
    environ.clear()
    environ.update(old)
Ejemplo n.º 24
0
def env_restored(unset=[]):
    """
    Create a temporary directory, with support for cleanup.
    """

    # preserve the current environment
    from os import environ

    old = environ.copy()

    # arbitrarily modify it
    for name in unset:
        del environ[name]

    yield

    # then restore the preserved copy
    environ.clear()
    environ.update(old)
def test_uploads_classified_transfers_given_start_and_end_datetime_and_cutoff(
        datadir):
    fake_s3, s3_client = _setup()
    fake_s3.start()

    output_transfer_data_bucket = _build_fake_s3_bucket(
        S3_OUTPUT_TRANSFER_DATA_BUCKET_NAME, s3_client)
    input_spine_data_bucket = _build_fake_s3_bucket(
        S3_INPUT_SPINE_DATA_BUCKET_NAME, s3_client)
    input_ods_metadata_bucket = _build_fake_s3_bucket(
        S3_INPUT_ODS_METADATA_BUCKET_NAME, s3_client)

    _upload_files_to_spine_data_bucket(input_spine_data_bucket, datadir)
    _upload_files_to_ods_metadata_bucket(input_ods_metadata_bucket, datadir)

    try:
        environ["START_DATETIME"] = "2019-12-02T00:00:00Z"
        environ["END_DATETIME"] = "2020-01-01T00:00:00Z"
        environ["CONVERSATION_CUTOFF_DAYS"] = "14"

        main()

        days_with_data = [2, 3, 5, 19, 20, 30, 31]
        expected_days = [(2019, 12, day) for day in range(2, 32)]
        expected_transfers_output_key = "transfers.parquet"

        for (year, data_month, data_day) in expected_days:
            month = add_leading_zero(data_month)
            day = add_leading_zero(data_day)

            if data_day in days_with_data:
                expected_transfers = _read_parquet_columns_json(
                    datadir / "expected_outputs" /
                    f"{year}-{month}-{day}-transferParquet.json")
            else:
                expected_transfers = _read_parquet_columns_json(
                    datadir / "expected_outputs" /
                    "template-transferParquet.json")

            s3_filename = f"{year}-{month}-{day}-{expected_transfers_output_key}"
            s3_output_path = f"v7/cutoff-14/{year}/{month}/{day}/{s3_filename}"

            actual_transfers = read_s3_parquet(output_transfer_data_bucket,
                                               s3_output_path)

            assert actual_transfers == expected_transfers

            actual_metadata = _read_s3_metadata(output_transfer_data_bucket,
                                                s3_output_path)

            expected_metadata = {
                "cutoff-days": "14",
                "build-tag": "abc456",
                "start-datetime": f"{year}-{month}-{day}T00:00:00+00:00",
                "end-datetime": _end_datetime_metadata(year, data_month,
                                                       data_day),
                "ods-metadata-month": f"{year}-{month}",
            }

            assert actual_metadata == expected_metadata

    finally:
        output_transfer_data_bucket.objects.all().delete()
        output_transfer_data_bucket.delete()
        fake_s3.stop()
        environ.clear()
Ejemplo n.º 26
0
 def tearDown(self):
     # Restore environment variables
     environ.clear()
     for k, v in self.environ_save.items():
         environ[k] = v
Ejemplo n.º 27
0
def clear_env():
    environ.clear()
Ejemplo n.º 28
0
def reset_environment_after_test():
    old_environ = dict(environ)
    yield
    environ.clear()
    environ.update(old_environ)
Ejemplo n.º 29
0
 def tearDown(self):
     # Restore environment variables
     environ.clear()
     for k, v in self.environ_save.items():
         environ[k] = v
Ejemplo n.º 30
0
        make(f'ZIPFLAGS="{ZIPFLAGS}" LIBS="{LIBS}" CXX="{CXX} -std=c++11"')


def build_all():
    build_rar2fs()
    build_fusezip()


def copy_all():
    targetDir = f'{BUILT_PRODUCTS_DIR}/{FULL_PRODUCT_NAME}/Contents/Executables'
    run(f'mkdir -p "{targetDir}"')
    run(f'cp rar2fs/rar2fs "{targetDir}"')
    run(f'cp fuse-zip/fuse-zip "{targetDir}"')


if __name__ == '__main__':
    PATH = environ['PATH']
    HOME = environ['HOME']
    environ.clear()
    environ['PATH'] = PATH
    environ['HOME'] = HOME
    if len(argv) != 2:
        raise Exception('Wrong number of arguments')
    with cd(DERIVED_FILES_DIR):
        if argv[1] == 'build':
            build_all()
        elif argv[1] == 'copy':
            copy_all()
        else:
            raise Exception('Wrong arguments')
Ejemplo n.º 31
0
 def __clearEnviron(self):
     environ.clear()