Exemple #1
0
def initialize(driver_url):
    if len(Config.DESIRED_CAPABILITIES) == 0:
        options = webdriver.ChromeOptions()
        options.add_experimental_option('prefs',
                                        {'intl.accept_languages': 'ja_JP'})
        cap = options.to_capabilities()
    else:
        cap = {}
        for k, v in [cap.split("=") for cap in Config.DESIRED_CAPABILITIES]:
            k = k.strip("\"'")
            v = maybe_bool(v.strip("\"'"))
            merge(cap, construct_dict(k, v))

    if Config.HTTP_PROXY or Config.HTTPS_PROXY or Config.NO_PROXY:
        proxy = Proxy()
        proxy.sslProxy = Config.HTTPS_PROXY
        proxy.httpProxy = Config.HTTP_PROXY
        proxy.noProxy = Config.NO_PROXY
        proxy.proxyType = ProxyType.MANUAL
        proxy.add_to_capabilities(cap)

    driver = webdriver.Remote(command_executor=driver_url,
                              desired_capabilities=cap)

    return driver
Exemple #2
0
 def append(self, keypath: str, value: Any) -> None:
     '''Append to a list located at keypath.'''
     store = [value]
     keypath_dir = keypath.split(DpathMixin.separator)[1:]
     for x in reversed(keypath_dir):
         store = {x: store}  # type: ignore
     dpath.merge(self, store)
Exemple #3
0
    def read_params(self,
                    flatten: bool = True,
                    **kwargs: typing.Any) -> Dict[str, typing.Any]:
        try:
            config = self.read_file()
        except MissingParamsFile:
            config = {}

        if not self.params:
            return config

        ret = {}
        if flatten:
            for param in self.params:
                try:
                    ret[param] = dpath.util.get(config, param, separator=".")
                except KeyError:
                    continue
            return ret

        from dpath.util import merge

        for param in self.params:
            merge(
                ret,
                dpath.util.search(config, param, separator="."),
                separator=".",
            )
        return ret
Exemple #4
0
    def create(self, user_id, challenge_id, stack, service, flag=True):
        result = {'id': self.ids.encode(user_id, challenge_id)}
        name = stack_name(user_id, challenge_id)
        if name in self.stacks.ls():
            raise InstanceExistsError(
                f'An instance of challenge ID {challenge_id} already exists for user ID {user_id}'
            )

        self.ensure_gateway_up(user_id)

        stack_context = {
            'chad_id': result['id'],
            'chad_docker_registry': self.docker_registry
        }

        stack_template = Template(json.dumps(stack))
        stack = json.loads(stack_template.safe_substitute(**stack_context))

        # Docker Swarm overlay networks don't F*****G SUPPORT MULTICAST
        dpath.new(
            stack, f'networks/challenge', {
                'driver': self.network_plugin,
                'external': True,
                'name': f'chad_{user_id}'
            })
        if flag:
            if isinstance(flag, str):
                result['flag'] = flag
            elif flag is True:
                result['flag'] = self.flags.next_flag()
            elif isinstance(flag, int):
                result['flag'] = self.flags.next_flag(flag)
            secret_tmp = tempfile.NamedTemporaryFile('w',
                                                     prefix='flag',
                                                     suffix='.txt',
                                                     encoding='ascii')
            secret_tmp.write(f'{result["flag"]}\n')
            secret_tmp.flush()

            dpath.new(stack, 'secrets/flag/file', secret_tmp.name)
            dpath.merge(
                stack, {
                    'services': {
                        service: {
                            'secrets': [{
                                'source': 'flag',
                                'target': 'flag.txt',
                                'mode': 0o440
                            }]
                        }
                    }
                })

        self.redis.set(f'{name}_last_ping', int(time.time()))
        self.stacks.deploy(name, stack, registry_auth=True)
        if flag:
            secret_tmp.close()
        return result
Exemple #5
0
def load_config():
    """
    Loads the config from a set of urls using [geddit](https://pypi.org/project/geddit/) and merges
    them into a single dict.
    """
    config = {}
    for url in os.environ['CONFIG_URLS'].split():
        LOGGER.info(f'Loading conversions from {url}')
        config = dpath.merge(config, yaml.safe_load(geddit.geddit(url)))

    return config
def save_snp_500_tickers(tickers: list) -> None:
    """update the YAML market coordinates config with the SNP500 tickers"""
    mkt_class = "equity".upper()
    mkt_type = "single stock".upper()
    market_coordinates = mkt_classes.mkt_data_cfg()
    # lets load the defaults and then see if there is tsdb yaml to overwrite base defaults
    defaults = mkt_coord_defaults.defaults.copy()
    mkt_default_cfg_load = mkt_classes.mkt_defaults_cfg()
    dp.merge(defaults, mkt_default_cfg_load)

    equity_defaults = [
        i for i in dp.search(
            defaults, '{0}/{1}'.format(mkt_class, mkt_type), yielded=True)
    ].pop()[1]

    for ticker in tickers:
        mkt_asset = ticker
        points_default = [
            i for i in dp.search(market_coordinates,
                                 '{0}/{1}/{2}/points'.format(
                                     mkt_class, mkt_type, mkt_asset),
                                 yielded=True)
        ]
        points_default = points_default.pop()[1] if len(points_default) else []
        points = list(set(points_default))
        value = {'points': points}
        value.update(equity_defaults)

        xpath = '{0}/{1}/{2}'.format(mkt_class, mkt_type, mkt_asset)
        dp.new(market_coordinates, xpath, value)

    mkt_data_cfg = {
        'market_coordinates': market_coordinates,
        'defaults': defaults
    }

    with open(mkt_classes.tsdb_path() + 'market_coord_cfg.YAML', "w") as f:
        yaml.dump(mkt_data_cfg, f)

    "added snp500 tickers to the config"
Exemple #7
0
 def combine(self, document: Optional[Dict[str, Any]] = None) -> None:
     '''Combine document.'''
     dpath.merge(self, document, flags=2)
Exemple #8
0
def merge(src1, src2, *srcs):
    dst = type(src1)()
    srcs = (src1, src2) + srcs
    for o in srcs:
        util.merge(dst, o)
    return dst
Exemple #9
0
async def mix_gm_data(prepared_data, gm_borders):
    merge(prepared_data, gm_borders)
    with open("data_ladder_api_with_gm.json", "w") as f:
        json.dump(prepared_data, f, indent=4, sort_keys=True)
    return prepared_data
Exemple #10
0
def build(config: Dict) -> Package:
    """Builds a datapackage.Datapackage object from a config dictionary.

    The configuration dictionary should contain the following keys:
    "metadata", "files".

    Information about the corresponding study can be placed in metadata.
    Example:
        {
            'metadata': {
                'name': 'ddionrails-study',
                'id': 'doi'
            }
        }
    The desired files to be included in the Tabular Data Package can be placed in 'files':
    Example:
        {
            'files': [
                'concepts.csv'
            ]
        }

    See: examples/example-config.yml

    The resulting Tabular Data Package is written to disk as 'datapackage.json' in
    the directory the command line tool is run.

    Args:
        config: The configuration of the Datapackage to be created.

    """

    if "metadata" not in config or "files" not in config:
        raise ValueError("Config must contain 'metadata' and 'files'")

    # Read the descriptor base dictionary from disk
    # and update it with values from the config file
    descriptor = read_yaml(DATAPACKAGE_BASE_FILE)
    descriptor["name"] = config["metadata"].get("name")
    descriptor["id"] = config["metadata"].get("id")
    descriptor["title"] = config["metadata"].get("title")
    # Remove empty keys from the dictionary
    descriptor = {key: value for key, value in descriptor.items() if value}

    # Create a Datapackage object from the descriptor dictionary
    package = Package(descriptor=descriptor)
    wanted_files = [file.split(".")[0] for file in config["files"]]
    for file in wanted_files:
        # If a filename ends with "_strict"
        # create the basic Tabular Data Resource first
        # then add the "stricter" rules from the "_strict" file
        if "_strict" in file:
            basic_file = file.replace("_strict", "")
            resource = read_tabular_data_resource(basic_file)
            strict_resource = read_tabular_data_resource(file)
            merge(resource, strict_resource)
        else:
            resource = read_tabular_data_resource(file)
        package.add_resource(resource)
    package.commit()
    if not package.valid:
        for error in package.errors:
            LOGGER.error(error)
    return package
def _dictMergeWith(dst, src, **kwds):
    du.merge(dst, src, **kwds)
    return dst
def _dictMergeWith(dst, src, **kwds):
    du.merge(dst, src, **kwds)
    return dst