def get_context(self, context_name): url = 'http://{}:{}/v2/api/contexts/{}'.format( self.host, self.port, quote(context_name, safe='')) resp = requests.get(url) if resp.status_code == 200: return Context.from_json(resp.json()) return None
def parse(self, name, cfg): """ :type name str :param name: :type cfg pyhocon.config_tree.ConfigTree :param cfg: :return: """ def parse_spark_config(value, key_prefix): if isinstance(value, ConfigTree): res = dict() for k in value.keys(): v = value[k] new_key = k if key_prefix == '' else key_prefix + '.' + k res.update(parse_spark_config(v, new_key)) return res else: return {key_prefix: str(value)} return Context( name, cfg.get_int('max-parallel-jobs', 20), cfg.get_string('downtime', '120s'), parse_spark_config(cfg.get_config('spark-conf', ConfigTree()), ''), cfg.get_string('worker-mode', 'shared'), cfg.get_string('run-options', ''), cfg.get_bool('precreated', False), cfg.get_string('streaming-duration', '1s'))
def parse(self, name, cfg): """ :type name str :param name: :type cfg pyhocon.config_tree.ConfigTree :param cfg: :return: """ return Function(name, cfg.get_string('class-name'), Context(cfg.get_string('context', 'default')), cfg.get_string('path', None))
def parse(self, name, cfg): """ :type name str :param name: :type cfg pyhocon.config_tree.ConfigTree :param cfg: :return: """ # we have to cleanup keys for #16 spark_conf = self._extract_spark_conf(cfg) context_config = cfg.as_plain_ordered_dict() context_config['spark-conf'] = spark_conf return Context(name, context_config)
def update_context(self, context): url = 'http://{}:{}/v2/api/contexts'.format(self.host, self.port) data = context.to_json() resp = requests.post(url, json=data) resp.raise_for_status() return Context.from_json(resp.json())