Beispiel #1
0
def read():
    default_config = Config({
        'credentials': {
            'stack-exchange': {
                'email': '',
                'password': '',
            },
        },
        'resources': {
            'db': 'sqlite:///:memory:',
        },
        'state': {
            'params': {
                '--server': 'se',
                '--room': '70095',
                '--user': '******',
            },
            'room-tailed-times': {
                'se': {},
                'so': {},
                'mse': {},
            },
        },
    })
    global_config = Config(toml.loads(open('~/.stack.chat.toml')))
    local_config = Config(
        default_config, global_config, toml.loads(open('./.stack.chat.toml')))
    
    return local_config
Beispiel #2
0
    def _parse_pipfile(self, contents):
        # If any outline tables are present...
        if ("[packages." in contents) or ("[dev-packages." in contents):
            data = toml.loads(contents)
            # Convert all outline tables to inline tables.
            for section in ("packages", "dev-packages"):
                for package in data.get(section, {}):
                    # Convert things to inline tables — fancy :)
                    if hasattr(data[section][package], "keys"):
                        _data = data[section][package]
                        data[section][package] = toml._get_empty_inline_table(dict)
                        data[section][package].update(_data)
            # We lose comments here, but it's for the best.)
            try:
                return contoml.loads(toml.dumps(data, preserve=True))

            except RuntimeError:
                return toml.loads(toml.dumps(data, preserve=True))

        else:
            # Fallback to toml parser, for large files.
            try:
                return contoml.loads(contents)

            except Exception:
                return toml.loads(contents)
Beispiel #3
0
    def parsed_pipfile(self):
        # Open the pipfile, read it into memory.
        with open(self.pipfile_location) as f:
            contents = f.read()

        # If any outline tables are present...
        if ('[packages.' in contents) or ('[dev-packages.' in contents):

            data = toml.loads(contents)

            # Convert all outline tables to inline tables.
            for section in ('packages', 'dev-packages'):
                for package in data.get(section, {}):

                    # Convert things to inline tables — fancy :)
                    if hasattr(data[section][package], 'keys'):
                        _data = data[section][package]
                        data[section][package] = toml._get_empty_inline_table(dict)
                        data[section][package].update(_data)

            # We lose comments here, but it's for the best.)
            try:
                return contoml.loads(toml.dumps(data, preserve=True))
            except RuntimeError:
                return toml.loads(toml.dumps(data, preserve=True))

        else:
            # Fallback to toml parser, for large files.
            try:
                return contoml.loads(contents)
            except Exception:
                return toml.loads(contents)
    def test_normal(self, capsys, table_name, header, value, expected):
        writer = table_writer_class()
        writer.table_name = table_name
        writer.headers = header
        writer.value_matrix = value
        writer.write_table()

        out, err = capsys.readouterr()
        print_test_result(expected=expected, actual=out, error=err)

        assert toml.loads(out) == toml.loads(expected)
Beispiel #5
0
def get_server():
    config_file = os.path.join(config.get_config_dir(), 'ias_proxy.toml')
    LOGGER.info('Loading IAS Proxy config from: %s', config_file)

    # Lack of a config file is a fatal error, so let the exception percolate
    # up to caller
    with open(config_file) as fd:
        proxy_config = toml.loads(fd.read())

    # Verify the integrity (as best we can) of the TOML configuration file
    valid_keys = set(['proxy_name', 'proxy_port', 'ias_url', 'spid_cert_file'])
    found_keys = set(proxy_config.keys())

    invalid_keys = found_keys.difference(valid_keys)
    if invalid_keys:
        raise \
            ValueError(
                'IAS Proxy config file contains the following invalid '
                'keys: {}'.format(
                    ', '.join(sorted(list(invalid_keys)))))

    missing_keys = valid_keys.difference(found_keys)
    if missing_keys:
        raise \
            ValueError(
                'IAS Proxy config file missing the following keys: '
                '{}'.format(
                    ', '.join(sorted(list(missing_keys)))))

    return IasProxyServer(proxy_config)
Beispiel #6
0
    def __init__(self, context):
        self.context = context

        def get_env_bool(var, default):
            # Contents of env vars are strings by default. This returns the
            # boolean value of the specified environment variable, or the
            # speciried default if the var doesn't contain True or False
            return {'True': True, 'False': False}.get(os.environ.get(var), default)

        def resolverelative(category, key):
            # Allow ~
            self.config[category][key] = path.expanduser(self.config[category][key])
            # Resolve relative paths
            self.config[category][key] = path.join(context.topdir,
                                                   self.config[category][key])

        if not hasattr(self.context, "bootstrapped"):
            self.context.bootstrapped = False

        config_path = path.join(context.topdir, ".servobuild")
        if path.exists(config_path):
            with open(config_path) as f:
                self.config = toml.loads(f.read())
        else:
            self.config = {}

        # Handle missing/default items
        self.config.setdefault("tools", {})
        default_cache_dir = os.environ.get("SERVO_CACHE_DIR",
                                           path.join(context.topdir, ".servo"))
        self.config["tools"].setdefault("cache-dir", default_cache_dir)
        resolverelative("tools", "cache-dir")

        default_cargo_home = os.environ.get("CARGO_HOME",
                                            path.join(context.topdir, ".cargo"))
        self.config["tools"].setdefault("cargo-home-dir", default_cargo_home)
        resolverelative("tools", "cargo-home-dir")

        context.sharedir = self.config["tools"]["cache-dir"]

        self.config["tools"].setdefault("use-rustup", True)
        self.config["tools"].setdefault("rustc-with-gold", get_env_bool("SERVO_RUSTC_WITH_GOLD", True))

        self.config.setdefault("build", {})
        self.config["build"].setdefault("android", False)
        self.config["build"].setdefault("mode", "")
        self.config["build"].setdefault("debug-mozjs", False)
        self.config["build"].setdefault("ccache", "")
        self.config["build"].setdefault("rustflags", "")
        self.config["build"].setdefault("incremental", None)
        self.config["build"].setdefault("thinlto", False)
        self.config["build"].setdefault("webgl-backtrace", False)
        self.config["build"].setdefault("dom-backtrace", False)

        self.config.setdefault("android", {})
        self.config["android"].setdefault("sdk", "")
        self.config["android"].setdefault("ndk", "")
        self.config["android"].setdefault("toolchain", "")
        # Set default android target
        self.handle_android_target("armv7-linux-androideabi")
Beispiel #7
0
def load_toml_rest_api_config(filename):
    """Returns a RestApiConfig created by loading a TOML file from the
    filesystem.
    """
    if not os.path.exists(filename):
        LOGGER.info(
            "Skipping rest api loading from non-existent config file: %s",
            filename)
        return RestApiConfig()

    LOGGER.info("Loading rest api information from config: %s", filename)

    try:
        with open(filename) as fd:
            raw_config = fd.read()
    except IOError as e:
        raise RestApiConfigurationError(
            "Unable to load rest api configuration file: {}".format(str(e)))

    toml_config = toml.loads(raw_config)

    invalid_keys = set(toml_config.keys()).difference(
        ['bind', 'connect', 'timeout'])
    if invalid_keys:
        raise RestApiConfigurationError(
            "Invalid keys in rest api config: {}".format(
                ", ".join(sorted(list(invalid_keys)))))
    config = RestApiConfig(
        bind=toml_config.get("bind", None),
        connect=toml_config.get('connect', None),
        timeout=toml_config.get('timeout', None)
    )

    return config
Beispiel #8
0
def _get_dir(toml_config_setting,
             sawtooth_home_dir,
             windows_dir,
             default_dir):
    """Determines the directory path based on configuration.

    Arguments:
        toml_config_setting (str): The name of the config setting related
            to the directory which will appear in path.toml.
        sawtooth_home_dir (str): The directory under the SAWTOOTH_HOME
            environment variable.  For example, for 'data' if the data
            directory is $SAWTOOTH_HOME/data.
        windows_dir (str): The windows path relative to the computed base
            directory.
        default_dir (str): The default path on Linux.

    Returns:
        directory (str): The path.
    """
    conf_file = os.path.join(get_config_dir(), 'path.toml')
    if os.path.exists(conf_file):
        with open(conf_file) as fd:
            raw_config = fd.read()
        toml_config = toml.loads(raw_config)
        if toml_config_setting in toml_config:
            return toml_config[toml_config_setting]

    return _select_dir(sawtooth_home_dir, windows_dir, default_dir)
Beispiel #9
0
def main():
    
    import time
    
    conf_fn = "config/ak_client.toml"

    with open(conf_fn, "r") as conf_fh:

        cfg = toml.loads(conf_fh.read())

        conf = cfg["redis"]    

    
    rclient = RedisLua(conf)
    
    
    key = "eqm011"
    
    i = 0
    
    while 1:
        
        result = "info:%s" %(i)
        rclient.save(key, i%4, result)
        i = i +1
        print i
        time.sleep(5)
Beispiel #10
0
def check_lock(file_name, contents):
    def find_reverse_dependencies(name, content):
        for package in itertools.chain([content.get("root", {})], content["package"]):
            for dependency in package.get("dependencies", []):
                if dependency.startswith("{} ".format(name)):
                    yield package["name"], dependency

    if not file_name.endswith(".lock"):
        raise StopIteration

    # Package names to be neglected (as named by cargo)
    exceptions = config["ignore"]["packages"]

    content = toml.loads(contents)

    packages_by_name = {}
    for package in content.get("package", []):
        if "replace" in package:
            continue
        source = package.get("source", "")
        if source == r"registry+https://github.com/rust-lang/crates.io-index":
            source = "crates.io"
        packages_by_name.setdefault(package["name"], []).append((package["version"], source))

    for (name, packages) in packages_by_name.iteritems():
        has_duplicates = len(packages) > 1
        duplicates_allowed = name in exceptions

        if has_duplicates == duplicates_allowed:
            continue

        if duplicates_allowed:
            message = 'duplicates for `{}` are allowed, but only single version found'.format(name)
        else:
            message = "duplicate versions for package `{}`".format(name)

        packages.sort()
        packages_dependencies = list(find_reverse_dependencies(name, content))
        for version, source in packages:
            short_source = source.split("#")[0].replace("git+", "")
            message += "\n\t\033[93mThe following packages depend on version {} from '{}':\033[0m" \
                       .format(version, short_source)
            for name, dependency in packages_dependencies:
                if version in dependency and short_source in dependency:
                    message += "\n\t\t" + name
        yield (1, message)

    # Check to see if we are transitively using any blocked packages
    for package in content.get("package", []):
        package_name = package.get("name")
        package_version = package.get("version")
        for dependency in package.get("dependencies", []):
            dependency = dependency.split()
            dependency_name = dependency[0]
            whitelist = config['blocked-packages'].get(dependency_name)
            if whitelist is not None:
                if package_name not in whitelist:
                    fmt = "Package {} {} depends on blocked package {}."
                    message = fmt.format(package_name, package_version, dependency_name)
                    yield (1, message)
Beispiel #11
0
    def _reload_mirrors(self, signum, frame, force=False):
        print("reload mirror configs, force restart: {}".format(force))

        with open(self._config_file) as f:
            self._settings = toml.loads(f.read())

        for mirror_opt in self._settings["mirrors"]:
            name = mirror_opt["name"]
            newMirCfg = MirrorConfig(self, mirror_opt)

            if name in self._mirrors:
                if newMirCfg.compare(self._mirrors[name]):
                    continue

            self._mirrors[name] = newMirCfg

            hooks = newMirCfg.hooks() + self.hooks()
            newProvider = newMirCfg.to_provider(hooks)
            self._providers[name] = newProvider

            if name in self.processes:
                q, p = self.processes[name]

                if force:
                    p.terminate()
                    print("Terminated Job: {}".format(name))
                    self.run_provider(name)
                else:
                    q.put("terminate")
                    print("New configuration queued to {}".format(name))
            else:
                print("New mirror: {}".format(name))
                self.run_provider(name)
Beispiel #12
0
    def read_config(self, config_file):
        self._config_file = config_file
        with open(self._config_file) as f:
            self._settings = toml.loads(f.read())

        self._inited = True
        self._mirrors = {}
        self._providers = {}
        self.processes = {}
        self.semaphore = Semaphore(self._settings["global"]["concurrent"])
        self.channel = Queue()
        self._hooks = []

        self.mirror_root = self._settings["global"]["mirror_root"]

        self.use_btrfs = self._settings["global"]["use_btrfs"]
        self.btrfs_service_dir_tmpl = self._settings["btrfs"]["service_dir"]
        self.btrfs_working_dir_tmpl = self._settings["btrfs"]["working_dir"]
        self.btrfs_gc_dir_tmpl = self._settings["btrfs"]["gc_dir"]

        self.status_file = self._settings["global"]["status_file"]
        self.status_manager = StatusManager(self, self.status_file)

        self.ctrl_addr = self._settings["global"]["ctrl_addr"]
        self.ctrl_channel = Queue()
        p = Process(
            target=run_control_server,
            args=(self.ctrl_addr, self.channel, self.ctrl_channel),
        )
        p.start()
        self.processes["CTRL_SERVER"] = (self.ctrl_channel, p)
Beispiel #13
0
    def __init__(self, context):
        self.context = context

        def resolverelative(category, key):
            # Allow ~
            self.config[category][key] = path.expanduser(self.config[category][key])
            # Resolve relative paths
            self.config[category][key] = path.join(context.topdir,
                                                   self.config[category][key])

        if not hasattr(self.context, "bootstrapped"):
            self.context.bootstrapped = False

        config_path = path.join(context.topdir, ".servobuild")
        if path.exists(config_path):
            with open(config_path) as f:
                self.config = toml.loads(f.read())
        else:
            self.config = {}

        # Handle missing/default items
        self.config.setdefault("tools", {})
        default_cache_dir = os.environ.get("SERVO_CACHE_DIR",
                                           path.join(context.topdir, ".servo"))
        self.config["tools"].setdefault("cache-dir", default_cache_dir)
        resolverelative("tools", "cache-dir")

        self.config["tools"].setdefault("cargo-home-dir",
                                        path.join(context.topdir, ".cargo"))
        resolverelative("tools", "cargo-home-dir")

        context.sharedir = self.config["tools"]["cache-dir"]

        self.config["tools"].setdefault("system-rust", False)
        self.config["tools"].setdefault("system-cargo", False)
        self.config["tools"].setdefault("rust-root", "")
        self.config["tools"].setdefault("cargo-root", "")
        if not self.config["tools"]["system-rust"]:
            self.config["tools"]["rust-root"] = path.join(
                context.sharedir, "rust", self.rust_path())
        if not self.config["tools"]["system-cargo"]:
            self.config["tools"]["cargo-root"] = path.join(
                context.sharedir, "cargo", self.cargo_build_id())
        self.config["tools"].setdefault("rustc-with-gold", True)

        self.config.setdefault("build", {})
        self.config["build"].setdefault("android", False)
        self.config["build"].setdefault("mode", "")
        self.config["build"].setdefault("debug-mozjs", False)
        self.config["build"].setdefault("ccache", "")

        self.config.setdefault("android", {})
        self.config["android"].setdefault("sdk", "")
        self.config["android"].setdefault("ndk", "")
        self.config["android"].setdefault("toolchain", "")
        self.config["android"].setdefault("target", "arm-linux-androideabi")

        self.config.setdefault("gonk", {})
        self.config["gonk"].setdefault("b2g", "")
        self.config["gonk"].setdefault("product", "flame")
Beispiel #14
0
def main():


    conf_fn = "conf.toml"

    with open(conf_fn) as conf_fh:
        
        conf = toml.loads(conf_fh.read())


    with open(conf["app"]["csv"], 'rb') as fh:
        
        reader = csv.reader(fh)
        i = 0
        for row in reader:
            
            i = i +1
            
            if i > 1:
                pass
                #break
                
            title = row[0]

            img = gen_image(title.decode('utf-8'), conf)
            
            gevent.sleep(0.5)
            sendmail = Sendmail(conf)
            
            sendmail.send(row[1], title, img)
            print title, row[1]
            gevent.sleep(0.5)
Beispiel #15
0
def parse_config(content):
    config_file = toml.loads(content)
    exclude = config_file.get("ignore", {})
    # Add list of ignored directories to config
    config["ignore"]["directories"] += exclude.get("directories", [])
    # Add list of ignored files to config
    config["ignore"]["files"] += exclude.get("files", [])
    # Add list of ignored packages to config
    config["ignore"]["packages"] = exclude.get("packages", [])
    # Fix the paths (OS-dependent)
    config['ignore']['files'] = map(lambda path: os.path.join(*path.split('/')),
                                    config['ignore']['files'])
    config['ignore']['directories'] = map(lambda path: os.path.join(*path.split('/')),
                                          config['ignore']['directories'])

    # Add dict of dir, list of expected ext to config
    dirs_to_check = config_file.get("check_ext", {})
    # Fix the paths (OS-dependent)
    for path, exts in dirs_to_check.items():
        fixed_path = os.path.join(*path.split('/'))
        config['check_ext'][fixed_path] = exts

    # Override default configs
    user_configs = config_file.get("configs", [])
    for pref in user_configs:
        if pref in config:
            config[pref] = user_configs[pref]
Beispiel #16
0
    def __init__(self, context):
        self.context = context

        if not hasattr(self.context, "bootstrapped"):
            self.context.bootstrapped = False

        if not hasattr(self.context, "sharedir"):
            self.context.sharedir = path.join(path.expanduser("~/"), ".servo")

        config_path = path.join(context.topdir, ".servobuild")
        if path.exists(config_path):
            self.config = toml.loads(open(config_path).read())
        else:
            self.config = {}

        # Handle missing/default items
        self.config.setdefault("tools", {})
        self.config["tools"].setdefault("system-rust", False)
        self.config["tools"].setdefault("system-cargo", False)
        self.config["tools"].setdefault("rust-root", "")
        self.config["tools"].setdefault("cargo-root", "")
        if not self.config["tools"]["system-rust"]:
            self.config["tools"]["rust-root"] = path.join(
                context.sharedir, "rust", *self.rust_snapshot_path().split("/"))
        if not self.config["tools"]["system-cargo"]:
            self.config["tools"]["cargo-root"] = path.join(
                context.sharedir, "cargo", self.cargo_build_id())

        self.config.setdefault("build", {})
        self.config["build"].setdefault("android", False)

        self.config.setdefault("android", {})
        self.config["android"].setdefault("sdk", "")
        self.config["android"].setdefault("ndk", "")
        self.config["android"].setdefault("toolchain", "")
Beispiel #17
0
    def __init__(self):
        
        conf_fn = "conf.toml"

        with open(conf_fn) as conf_fh:   
            toml_str = conf_fh.read()
            self.conf = toml.loads(toml_str)
def main() -> None:
    data = toml.load(sys.stdin)

    assert list(data.keys()) == ["source"]

    # this value is non deterministic
    data["source"]["vendored-sources"]["directory"] = "@vendor@"

    lines = []
    inner = data["source"]
    for source, attrs in sorted(inner.items()):
        lines.append("[source.{}]".format(quote(source)))
        if source == "vendored-sources":
            lines.append('"directory" = "@vendor@"\n')
        else:
            for key, value in sorted(attrs.items()):
                attr = "{} = {}".format(quote(key), quote(value))
                lines.append(attr)
        lines.append("")

    result = "\n".join(lines)
    real = toml.loads(result)
    assert real == data, "output = {} while input = {}".format(real, data)

    print(result)
Beispiel #19
0
    def __parse_config(self, configfile):
        """
        Extracts the defined configuration parameters for the parser.
        Important pre-condition is the user must configure elements and rules against a predefined enumeration.

        :param configfile: configuration file
        :return None:
        """
        if os.path.exists(configfile):
            cfg = open(configfile)
            topconfig = toml.loads(cfg.read())
            cfg.close()

            if 'Filters' in topconfig:
                if 'sources' in topconfig['Filters'][0]:
                    self._sources = topconfig['Filters'][0]['sources']

                if 'rules' in topconfig['Filters'][0]:
                    temp = topconfig['Filters'][0]['rules']
                    for x in temp:
                        if x.lower() in self._accepted_rules:
                            self._rules.append(x.lower())
                        else:
                            self._logger.warning("Incompatible rule for parser " + x)

                if 'start_offset' in topconfig['Filters'][0]:
                    self._line_offset = topconfig['Filters'][0]['start_offset']
    def setUp(self):
        """
        Initialize the system configuration and the user configuration.

        Note the forward slash replacements for the user configuration. This is due to the forward slash being a
        restricted character in TOML(package used to parse configuration files in LQMT).
        """
        # relative pathing variables. Replace function calls for Windows compatibility.
        self.directory = os.path.dirname(__file__)
        self.alerts = self.directory + "/test_data/"
        self.alerts = self.alerts.replace("\\", "/")
        self.logging = self.directory + "/test_data/test-logs/lqmt"
        self.logging = self.logging.replace("\\", "/")
        self.whitelist = self.directory + "/test_data/whitelist/whitelist.txt"
        self.whitelist = self.whitelist.replace("\\", "/")
        self.whitelist_db = self.directory + "/test_data/whitelist/whitelist.db"
        self.whitelist_db = self.whitelist_db.replace("\\", "/")

        # configurations initialized
        sysconf = SystemConfig()
        self.sys_config = sysconf.getConfig()
        config = USERCONFIG.format(self.alerts, self.logging, self.whitelist, self.whitelist_db)
        self.toml_config = toml.loads(config)
        self.toml_config = self.toml_config["Tools"]["FlexText"][0]  # dirty way of parsing userconfig for ToolConfig
        self.user_config = LQMToolConfig(config)
        self.toolConfig = ToolConfig(self.toml_config, csvToolInfo={""}, unhandledCSV={""})
Beispiel #21
0
    def parse(self):
        # Open the Pipfile.
        with open(self.filename) as f:
            content = f.read()

        # Load the default configuration.
        default_config = {
            u'source': [{u'url': u'https://pypi.python.org/simple', u'verify_ssl': True, 'name': "pypi"}],
            u'packages': {},
            u'requires': {},
            u'dev-packages': {}
        }

        config = {}
        config.update(default_config)

        # Load the Pipfile's configuration.
        config.update(toml.loads(content))

        # Structure the data for output.
        data = {
            '_meta': {
                'sources': config['source'],
                'requires': config['requires']
            },
        }

        # TODO: Validate given data here.
        self.groups['default'] = config['packages']
        self.groups['develop'] = config['dev-packages']

        # Update the data structure with group information.
        data.update(self.groups)
        return data
Beispiel #22
0
def get_pages_from_dirs(dirs):
    pages = []
    for dir in dirs:
        page = {}

        path_to_metadata_file = path.join(dir, conf["metadata_file"])
        with open(path_to_metadata_file) as metadata_file:
            page = toml.loads(metadata_file.read())

        try:
            for key in conf["required_keys_in_page"]:
                page[key]
        except KeyError:
            message = "`{}` key is missing from metadata file!".format(key)
            logger.error(message)
            exit()

        content_path = path.join(dir, page["content_path"])
        content_path = glob.glob(content_path)
        if len(content_path) != 1:
            logger.error("Content path matched less or more than needed!")
            exit()
        content_path = content_path[0]

        with open(content_path) as content_file:
            page["content"] = content_file.read()

        pages.append(page)

    return pages
def get_config():
    # Read config parameters from a TOML file.
    config = None
    config_file_path = sys.argv[1]
    with open(config_file_path) as config_file:
        config = toml.loads(config_file.read())
    return config
Beispiel #24
0
def read_toml(content):
    check_section_header(content)
    for k, v in toml.loads(content, collections.OrderedDict).items():
        if len(v.values()) and isinstance(list(v.values())[0], dict):
            raise RuntimeError('malformed section header -- forgot quotes?', k)
        pname, version = split_name(k)
        yield WhitelistRule(pname=pname, version=version, **v)
Beispiel #25
0
def load_config(filename):
    if not path.exists(filename):
        print 'no such file: {0}'.format(filename)
        exit()
    with open(filename) as fid:
        config = toml.loads(fid.read())
    return config
Beispiel #26
0
def load_config():
    filename = sys.argv[1] if len(sys.argv) > 1 else 'benchmark.toml'
    with open(filename) as configfile:
        config = toml.loads(configfile.read())
    items = config['item']
    default = items['default']
    if 'options' in config:
        options = config['options']
    else:
        options = dict(ref_result="C",
                       time_field="elapsed",
                       show_diff_below=0.9,
                       verbose=True)
    ret_items = {}
    for name, item in zip(items, items.values()):
        if name == 'default':
            continue
        if name.startswith('"') and name.endswith('"'):
            import ast
            name = ast.literal_eval(name)
        profile = dict(default)
        profile.update(item)
        profile['name'] = name
        for k, v in zip(profile, profile.values()):
            if type(v) is str:
                profile[k] = Template(v).safe_substitute(**profile)
        ret_items[name] = profile
    return ret_items, options
Beispiel #27
0
def repack(host, targets, channel='stable', suffix=''):
  print("Repacking rust for %s..." % host)
  url = 'https://static.rust-lang.org/dist/channel-rust-' + channel + '.toml'
  req = requests.get(url)
  req.raise_for_status()
  manifest = toml.loads(req.content)
  if manifest['manifest-version'] != '2':
    print('ERROR: unrecognized manifest version %s.' % manifest['manifest-version'])
    return
  print('Using manifest for rust %s as of %s.' % (channel, manifest['date']))
  print('Fetching packages...')
  rustc = fetch_package(manifest, 'rustc', host)
  cargo = fetch_package(manifest, 'cargo', host)
  stds = fetch_std(manifest, targets)
  print('Installing packages...')
  tar_basename = 'rustc-' + host
  if suffix:
      tar_basename += '-' + suffix
  tar_basename += '-repack'
  install_dir = 'rustc'
  subprocess.check_call(['rm', '-rf', install_dir])
  install(os.path.basename(rustc['url']), install_dir)
  install(os.path.basename(cargo['url']), install_dir)
  for std in stds:
    install(os.path.basename(std['url']), install_dir)
    pass
  print('Tarring %s...' % tar_basename)
  tar_options, tar_ext = tar_for_host(host)
  subprocess.check_call(['tar', tar_options, tar_basename + tar_ext, install_dir])
  subprocess.check_call(['rm', '-rf', install_dir])
Beispiel #28
0
def _loadfile(fname, logger):
    conf_dict = {}

    if os.path.exists(fname):
        # file exists
        if HAVE_TOML:
            conf_dict = toml.loads(open(fname).read())
        else:
            raise ImportError("No module named toml")

        include_files = []
        if "include" in conf_dict.keys():
            if "file" in conf_dict["include"].keys():
                f = conf_dict["include"]["file"]
                include_files.append(os.path.expanduser(f))

            if "directory" in conf_dict["include"].keys():
                d = conf_dict["include"]["directory"]
                include_files = include_files + sorted([os.path.join(dp, f) for dp, dn, fn in os.walk(os.path.expanduser(d)) for f in fn])

        for f in include_files:
            try:
                _merge(conf_dict, _loadfile(f, logger))
            except Exception as e:
                logger.error("Config file parse error: " + str(f) + " " + str(e).split("\n")[0])

        if HAVE_JSONSCHEMA:
            validate(conf_dict, json.loads(_confspec))
        else:
            raise ImportError("No module named jsonschema")

    return conf_dict
Beispiel #29
0
def load_toml_path_config(filename):
    """Returns a PathConfig created by loading a TOML file from the
    filesystem.
    """
    if not os.path.exists(filename):
        LOGGER.info(
            "Skipping path loading from non-existent config file: %s",
            filename)
        return PathConfig()

    LOGGER.info("Loading path information from config: %s", filename)

    try:
        with open(filename) as fd:
            raw_config = fd.read()
    except IOError as e:
        raise LocalConfigurationError(
            "Unable to load path configuration file: {}".format(str(e)))

    toml_config = toml.loads(raw_config)

    invalid_keys = set(toml_config.keys()).difference(
        ['data_dir', 'key_dir', 'log_dir'])
    if invalid_keys:
        raise LocalConfigurationError("Invalid keys in path config: {}".format(
            ", ".join(sorted(list(invalid_keys)))))

    config = PathConfig(
        config_dir=None,
        data_dir=toml_config.get('data_dir', None),
        key_dir=toml_config.get('key_dir', None),
        log_dir=toml_config.get('log_dir', None)
    )

    return config
Beispiel #30
0
    def test_convert_example_test(self):

        example = textwrap.dedent('''\
            # This is a TOML document. Boom.

            title = "TOML Example"

            [owner]
            name = "Tom Preston-Werner"
            organization = "GitHub"
            bio = "GitHub Cofounder & CEO\\nLikes tater tots and beer."
            dob = 1979-05-27T07:32:00Z # First class dates? Why not?

            [database] # comment after a category
            server = "192.168.1.1" # comment after a key
            ports = [ 8001, 8001, 8002 ]
            connection_max = 5000
            enabled = true

            [servers]

                # You can indent as you please.
                [servers.alpha]
                ip = "10.0.0.1"
                dc = "eqdc10"

                # Tabs or spaces. TOML don't care.
            \t[servers.beta]
            \tip = "10.0.0.2"
            \tdc = "eqdc10"

            [clients]
            # Line breaks are OK when inside arrays
            data = [
                ["gamma", "delta"],
                [1, 2]
            ]
        ''')
        expected = dict(
            title = "TOML Example",
            owner = dict(
                name = "Tom Preston-Werner",
                organization = "GitHub",
                bio = "GitHub Cofounder & CEO\nLikes tater tots and beer.",
                dob = datetime.datetime(1979, 5, 27, 7, 32, 0)
            ),
            database = dict(
                server = "192.168.1.1",
                ports = [8001, 8001, 8002],
                connection_max = 5000,
                enabled = True,
            ),
            servers = dict(
                alpha = dict(ip = "10.0.0.1", dc = "eqdc10"),
                beta = dict(ip = "10.0.0.2", dc = "eqdc10")
            ),
            clients = dict(data = [["gamma", "delta"], [1, 2]]),
        )
        self.assertEqual(toml.loads(example), expected)
Beispiel #31
0
 def create_default_config(cls):
     """ Create default config file if not exists """
     if not os.path.exists(cls.get_config_file_path()):
         config = toml.loads(cls.default_config)
         with open(AppUtil.get_config_file_path(), "w") as f:
             toml.dump(config, f)
Beispiel #32
0
 def __init__(self):
     # config_dir = Path(__file__).resolve().parent.parent.joinpath("/config")
     # super().__init__(toml.load(config_dir.joinpath(default_config)))
     super().__init__(toml.loads(self.CONFIG))
Beispiel #33
0
from setuptools import setup, find_packages
from os import path

from io import open

here = path.abspath(path.dirname(__file__))

# Get the long description from the README file
with open(path.join(here, "README.md"), encoding="utf-8") as f:
    long_description = f.read()

with open(path.join(here, "Pipfile"), encoding="utf-8") as f:
    import toml  # requires pyproject.toml file
    tobeinstalled = toml.loads(f.read())

dct = tobeinstalled['packages']  # dictionary
install_requires = [
    x +
    dct[x] if dct[x] != "*" and not issubclass(dct[x].__class__, dict) else x
    for x in dct
]
dct = tobeinstalled['dev-packages']
extras_require = {
    'dev': [
        x + dct[x]
        if dct[x] != "*" and not issubclass(dct[x].__class__, dict) else x
        for x in dct
    ]
}
# Note: this requires a pyproject.toml file with the following contents:
# [build-system]
Beispiel #34
0
import sys
from logging.config import fileConfig

import toml
from alembic import context
from sqlalchemy import engine_from_config, pool

# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config

# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
with open("settings.toml") as conffile:
    toml_data = toml.loads(conffile.read())

config.set_main_option("sqlalchemy.url", toml_data["common"]["database_engine_dsn"])

# add your model's MetaData object here
# for 'autogenerate' support
sys.path.insert(0, os.path.realpath(os.path.join(os.path.dirname(__file__), '..')))
from db import Base

target_metadata = Base.metadata


# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
Beispiel #35
0
    def __init__(self, context):
        self.context = context

        def get_env_bool(var, default):
            # Contents of env vars are strings by default. This returns the
            # boolean value of the specified environment variable, or the
            # speciried default if the var doesn't contain True or False
            return {
                'True': True,
                'False': False
            }.get(os.environ.get(var), default)

        def resolverelative(category, key):
            # Allow ~
            self.config[category][key] = path.expanduser(
                self.config[category][key])
            # Resolve relative paths
            self.config[category][key] = path.join(context.topdir,
                                                   self.config[category][key])

        if not hasattr(self.context, "bootstrapped"):
            self.context.bootstrapped = False

        config_path = path.join(context.topdir, ".servobuild")
        if path.exists(config_path):
            with open(config_path) as f:
                self.config = toml.loads(f.read())
        else:
            self.config = {}

        # Handle missing/default items
        self.config.setdefault("tools", {})
        default_cache_dir = os.environ.get("SERVO_CACHE_DIR",
                                           path.join(context.topdir, ".servo"))
        self.config["tools"].setdefault("cache-dir", default_cache_dir)
        resolverelative("tools", "cache-dir")

        default_cargo_home = os.environ.get(
            "CARGO_HOME", path.join(context.topdir, ".cargo"))
        self.config["tools"].setdefault("cargo-home-dir", default_cargo_home)
        resolverelative("tools", "cargo-home-dir")

        context.sharedir = self.config["tools"]["cache-dir"]

        self.config["tools"].setdefault("use-rustup", True)
        self.config["tools"].setdefault(
            "rustc-with-gold", get_env_bool("SERVO_RUSTC_WITH_GOLD", True))

        self.config.setdefault("build", {})
        self.config["build"].setdefault("android", False)
        self.config["build"].setdefault("mode", "")
        self.config["build"].setdefault("debug-mozjs", False)
        self.config["build"].setdefault("ccache", "")
        self.config["build"].setdefault("rustflags", "")
        self.config["build"].setdefault("incremental", None)
        self.config["build"].setdefault("thinlto", False)
        self.config["build"].setdefault("webgl-backtrace", False)
        self.config["build"].setdefault("dom-backtrace", False)

        self.config.setdefault("android", {})
        self.config["android"].setdefault("sdk", "")
        self.config["android"].setdefault("ndk", "")
        self.config["android"].setdefault("toolchain", "")
        # Set default android target
        self.handle_android_target("armv7-linux-androideabi")
Beispiel #36
0
    def __init__(self, context):
        self.context = context

        def resolverelative(category, key):
            # Allow ~
            self.config[category][key] = path.expanduser(
                self.config[category][key])
            # Resolve relative paths
            self.config[category][key] = path.join(context.topdir,
                                                   self.config[category][key])

        if not hasattr(self.context, "bootstrapped"):
            self.context.bootstrapped = False

        config_path = path.join(context.topdir, ".servobuild")
        if path.exists(config_path):
            with open(config_path) as f:
                self.config = toml.loads(f.read())
        else:
            self.config = {}

        # Handle missing/default items
        self.config.setdefault("tools", {})
        default_cache_dir = os.environ.get("SERVO_CACHE_DIR",
                                           path.join(context.topdir, ".servo"))
        self.config["tools"].setdefault("cache-dir", default_cache_dir)
        resolverelative("tools", "cache-dir")

        self.config["tools"].setdefault("cargo-home-dir",
                                        path.join(context.topdir, ".cargo"))
        resolverelative("tools", "cargo-home-dir")

        context.sharedir = self.config["tools"]["cache-dir"]

        self.config["tools"].setdefault("system-rust", False)
        self.config["tools"].setdefault("system-cargo", False)
        self.config["tools"].setdefault("rust-root", "")
        self.config["tools"].setdefault("cargo-root", "")
        if not self.config["tools"]["system-rust"]:
            self.config["tools"]["rust-root"] = path.join(
                context.sharedir, "rust", self.rust_path())
        if not self.config["tools"]["system-cargo"]:
            self.config["tools"]["cargo-root"] = path.join(
                context.sharedir, "cargo", self.cargo_build_id())
        self.config["tools"].setdefault("rustc-with-gold", True)

        self.config.setdefault("build", {})
        self.config["build"].setdefault("android", False)
        self.config["build"].setdefault("mode", "")
        self.config["build"].setdefault("debug-mozjs", False)
        self.config["build"].setdefault("ccache", "")

        self.config.setdefault("android", {})
        self.config["android"].setdefault("sdk", "")
        self.config["android"].setdefault("ndk", "")
        self.config["android"].setdefault("toolchain", "")
        self.config["android"].setdefault("target", "arm-linux-androideabi")

        self.config.setdefault("gonk", {})
        self.config["gonk"].setdefault("b2g", "")
        self.config["gonk"].setdefault("product", "flame")
Beispiel #37
0
def read_conf(path=""):
    with open(path) as fd:
        parsed_toml = toml.loads(fd.read())
    return parsed_toml
Beispiel #38
0
from __future__ import print_function
from models.Dense3D import Dense3D
import torch
import toml
from training import Trainer
from validation import Validator
import torch.nn as nn
import os
from models.net2d import densenet121,densenet161,squeezenet1_1,vgg19_bn,resnet152,resnet152_plus



print("Loading options...")
with open('options_lip.toml', 'r') as optionsFile:
    options = toml.loads(optionsFile.read())

if(options["general"]["usecudnnbenchmark"] and options["general"]["usecudnn"]):
    print("Running cudnn benchmark...")
    torch.backends.cudnn.benchmark = True

os.environ['CUDA_VISIBLE_DEVICES'] = options["general"]['gpuid']
    
torch.manual_seed(options["general"]['random_seed'])

#Create the model.
if options['general']['use_3d']:
    model = Dense3D(options)##TODO:1
elif options['general']['use_slice']:
    if options['general']['use_plus']:
        model = resnet152_plus(options['general']['class_num'])
    else:
    def dump2netcdf(self, model_str='hysplit'):
        """
        dump the assembled data to a netcdf file repeatatly call :meth:`save_item`
        configuration (directories, names, etc) is given by the config file
        """

        timestamps = np.array([
            (dt - datetime.datetime(1970, 1, 1)).total_seconds()
            for dt in self.dt_list
        ])
        hours_cn = np.array([
            dt.hour + dt.minute / 60. + dt.second / 3600.
            for dt in self.dt_list
        ])

        if not os.path.isdir(self.config['output_dir']):
            os.makedirs(self.config['output_dir'])

        ncfile = self.config['output_dir'] +\
                '{}_{}_{}-output.nc'.format(
                    self.dt_list[0].strftime('%Y%m%d'), self.config['station']['short_name'], model_str)
        # ncfile = "/home/devel/" +\
        #          '{}_hysplit_output.nc'.format(self.config['time']['begin_dt'].strftime('%Y%m%d'))

        #dataset = netCDF4.Dataset(ncfile, 'w', format='NETCDF4')
        dataset = netCDF4.Dataset(ncfile, 'w', format='NETCDF3_CLASSIC')

        dim_time = dataset.createDimension('time', len(self.dt_list))
        dim_height = dataset.createDimension('height', len(self.height_list))
        dim_age = dataset.createDimension(
            'time_age',
            abs(self.config['time']['tr_duration']) + 1)
        dim_cat = dataset.createDimension('categories', 7)
        dim_regions = dataset.createDimension('regions',
                                              len(list(self.geo_names.keys())))
        dim_lats = dataset.createDimension('lat_thres',
                                           len(list(self.lat_names.keys())))

        # times_cn = dataset.createVariable('time', np.float32, ('time',))
        # times_cn[:] = hours_cn.astype(np.float32)
        # times_cn.units = "hours since " + self.begin_dt.strftime('%Y-%m-%d') + "00:00:00 +00:00"
        # times_cn.long_name = "Decimal hours from midnight UTC"
        # times_cn.axis = "T"

        save_item(
            dataset, {
                'var_name': 'timestamp',
                'dimension': ('time', ),
                'vartype': 'i4',
                'arr': timestamps.astype(np.int32),
                'long_name': "Unix timestamp",
                'units': "s",
                'axis': 'T'
            })
        save_item(
            dataset, {
                'var_name':
                'time',
                'dimension': ('time', ),
                'arr':
                hours_cn.astype(np.float32),
                'long_name':
                "Decimal hours from midnight UTC",
                'units':
                "hours since {} 00:00:00 +00:00".format(
                    self.dt_list[0].strftime('%Y-%m-%d')),
                'axis':
                'T'
            })
        save_item(
            dataset, {
                'var_name': 'range',
                'dimension': ('height', ),
                'arr': np.array(self.height_list).astype(np.float32) / 1000.,
                'long_name': "Height",
                'units': "km",
                'axis': 'Z'
            })
        save_item(
            dataset, {
                'var_name': 'age',
                'dimension': ('time_age', ),
                'arr': self.raw_dict['age'][0, 0],
                'long_name': "Age of trajectory",
                'units': "h"
            })

        save_item(
            dataset, {
                'var_name': 'no_part',
                'dimension': ('time', ),
                'arr': np.array(self.no_part),
                'long_name': "number particles/trajectories",
                'units': "no"
            })
        save_item(
            dataset, {
                'var_name': 'time_res',
                'dimension': ('time', ),
                'arr': np.array(self.time_res),
                'long_name': "backward time resolution",
                'units': "no"
            })

        for k in list(self.stat2d_dict.keys()):
            print(k, self.stat2d_dict.get(k).shape)
            dataset = save_item(
                dataset, {
                    'var_name': k,
                    'dimension': ('time', 'height'),
                    'arr': self.stat2d_dict.get(k).copy().astype(np.float32),
                    'long_name': k
                })

        # its sufficient to save the age of the trajectory once
        raw_data_keys = list(self.raw_dict.keys())
        raw_data_keys.remove('age')
        # chance to modify some parameter descriptions for better readability
        modified_params = {
            key: {
                'var_name': key.lower(),
                'long_name': "Hysplit " + key.lower()
            }
            for key in raw_data_keys
        }
        modified_params['height'] = {
            'var_name': 'traj_height',
            'long_name': "Hysplit height of air parcel"
        }
        if 'land_sfc_category' in list(modified_params.keys()):
            modified_params['land_sfc_category'][
                'long_name'] = "Modis land use category (simplified)"
        for k in raw_data_keys:
            print(k, self.raw_dict.get(k).shape)
            dataset = save_item(
                dataset, {
                    'var_name': modified_params[k]['var_name'],
                    'dimension': ('time', 'height', 'time_age'),
                    'arr': self.raw_dict.get(k).copy().astype(np.float32),
                    'long_name': modified_params[k]['long_name']
                })

        # save the land use
        ls_data_keys = list(self.statls_dict.keys())
        print('ls_data_keys ', ls_data_keys)
        modified_params = {
            key: {
                'var_name': key,
                'long_name': "land surface " + key.lower(),
                'comment': str(self.ls_categories)
            }
            for key in ls_data_keys
        }
        for k in ls_data_keys:
            print(k, self.statls_dict.get(k).shape)
            dataset = save_item(
                dataset, {
                    'var_name': modified_params[k]['var_name'],
                    'dimension': ('time', 'height', 'categories'),
                    'arr': self.statls_dict.get(k),
                    'long_name': modified_params[k]['long_name'],
                    'comment': modified_params[k]['comment']
                })

        for k in [ky for ky in ls_data_keys if 'ens' in ky]:
            rel = self.statls_dict.get(k)
            no_below = self.stat2d_dict.get(k + "_no_below")
            no_below = np.repeat(no_below[:, :, np.newaxis],
                                 rel.shape[-1],
                                 axis=2)
            no_below[no_below < 0] = np.nan
            norm = np.array(
                self.no_part) * 10 * (24. / np.array(self.time_res))
            norm = np.repeat(norm[:, np.newaxis], rel.shape[1], axis=1)
            norm = np.repeat(norm[:, :, np.newaxis], rel.shape[2], axis=2)

            normed_time = rel * no_below / norm
            normed_time[~np.isfinite(normed_time)] = -1
            str_below = modified_params[k]['var_name'].replace("occ_ens_", "")
            var_name = "rt_normed_landsfc_" + str_below
            long_name = "normed residence time land surface " + str_below
            dataset = save_item(
                dataset, {
                    'var_name': var_name,
                    'dimension': ('time', 'height', 'categories'),
                    'arr': normed_time,
                    'long_name': long_name,
                    'comment': modified_params[k]['comment']
                })

        # and the geo names
        gn_data_keys = list(self.statgn_dict.keys())
        print('gn_data_keys ', gn_data_keys)
        modified_params = {
            key: {
                'var_name': key,
                'long_name': "geography names " + key.lower(),
                'comment': str(self.geo_names)
            }
            for key in gn_data_keys
        }
        for k in gn_data_keys:
            print(k, self.statgn_dict.get(k).shape)
            dataset = save_item(
                dataset, {
                    'var_name': modified_params[k]['var_name'],
                    'dimension': ('time', 'height', 'regions'),
                    'arr': self.statgn_dict.get(k),
                    'long_name': modified_params[k]['long_name'],
                    'comment': modified_params[k]['comment']
                })

        for k in gn_data_keys:
            rel = self.statgn_dict.get(k)
            no_below = self.stat2d_dict.get(k + "_no_below")
            no_below = np.repeat(no_below[:, :, np.newaxis],
                                 rel.shape[-1],
                                 axis=2)
            no_below[no_below < 0] = np.nan
            norm = np.array(
                self.no_part) * 10 * (24. / np.array(self.time_res))
            norm = np.repeat(norm[:, np.newaxis], rel.shape[1], axis=1)
            norm = np.repeat(norm[:, :, np.newaxis], rel.shape[2], axis=2)

            normed_time = rel * no_below / norm
            normed_time[~np.isfinite(normed_time)] = -1
            str_below = modified_params[k]['var_name'].replace(
                "region_ens_", "")
            var_name = "rt_normed_region_" + str_below
            long_name = "normed residence time named region " + str_below
            dataset = save_item(
                dataset, {
                    'var_name': var_name,
                    'dimension': ('time', 'height', 'regions'),
                    'arr': normed_time,
                    'long_name': long_name,
                    'comment': modified_params[k]['comment']
                })

        # TODO make statlat optional statlat_dict
        lat_data_keys = list(self.statlat_dict.keys())
        print('lat_data_keys ', lat_data_keys)
        modified_params = {
            key: {
                'var_name': key,
                'long_name': "lat_thres " + key.lower(),
                'comment': str(self.lat_names)
            }
            for key in lat_data_keys
        }
        for k in lat_data_keys:
            print("self.statlat_dict.keys()", self.statlat_dict.keys())
            print(k, self.statlat_dict.get(k).shape)
            dataset = save_item(
                dataset, {
                    'var_name': modified_params[k]['var_name'],
                    'dimension': ('time', 'height', 'lat_thres'),
                    'arr': self.statlat_dict.get(k),
                    'long_name': modified_params[k]['long_name'],
                    'comment': modified_params[k]['comment']
                })

        for k in lat_data_keys:
            rel = self.statlat_dict.get(k)
            no_below = self.stat2d_dict.get(k + "_no_below")
            no_below = np.repeat(no_below[:, :, np.newaxis],
                                 rel.shape[-1],
                                 axis=2)
            no_below[no_below < 0] = np.nan
            norm = np.array(
                self.no_part) * 10 * (24. / np.array(self.time_res))
            norm = np.repeat(norm[:, np.newaxis], rel.shape[1], axis=1)
            norm = np.repeat(norm[:, :, np.newaxis], rel.shape[2], axis=2)

            normed_time = rel * no_below / norm
            normed_time[~np.isfinite(normed_time)] = -1
            str_below = modified_params[k]['var_name'].replace("lat_ens_", "")
            var_name = "rt_normed_lat_" + str_below
            long_name = "normed residence time latitude " + str_below
            dataset = save_item(
                dataset, {
                    'var_name': var_name,
                    'dimension': ('time', 'height', 'lat_thres'),
                    'arr': normed_time,
                    'long_name': long_name,
                    'comment': modified_params[k]['comment']
                })

        # save_item(dataset, {'var_name': 'width', 'dimension': ('time', 'height'),
        #                     'arr': .corr_width_reg[:].filled(), 'long_name': "Spectral width",
        #                     'comment': "Wind profiler spectral width (standard deviation) corrected by cloud radar (only Bragg contribution)",
        #                     'units': "m s-1", 'units_html': "m s<sup>-1</sup>",
        #                     'missing_value': -99., 'plot_range': (0.01, 4.),
        #                     'plot_scale': "logarithmic"})

        with open('output_meta.toml') as output_meta:
            meta_info = toml.loads(output_meta.read())

        dataset.description = meta_info['description'][model_str]
        dataset.location = self.config['station']['name']
        if "moving" in self.config['station'].keys(
        ) and self.config['station']['moving'] == True:
            dataset.coordinates = "Moving Platform!"
        else:
            dataset.coordinates = (self.config['station']['lat'],
                                   self.config['station']['lon'])
        dataset.institution = meta_info["institution"]
        dataset.authors = meta_info["authors"]
        dataset.contact = meta_info["contact"]
        dataset.creation_time = datetime.datetime.utcnow().strftime(
            "%Y-%m-%d %H:%M UTC")
        dataset.day = self.dt_list[0].day
        dataset.month = self.dt_list[0].month
        dataset.year = self.dt_list[0].year
        dataset.git_commit, dataset.git_branch = get_git_hash()
        dataset.close()
        gc.collect()
Beispiel #40
0
def create_job(analysis,
               user=None,
               json_text='',
               json_data={},
               name=None,
               state=Job.QUEUED,
               uid=None,
               save=True,
               fill_with={}):
    """
    Note: Parameter 'fill_with' needs to be a flat key:value dictionary.
    """
    state = state or Job.QUEUED
    owner = user or analysis.project.owner
    project = analysis.project

    if json_data:
        json_text = hjson.dumps(json_data)
    else:
        json_text = json_text or analysis.json_text

    # Needs the json_data to set the summary.
    json_data = hjson.loads(json_text)

    # Generate a meaningful job title.
    name = make_job_title(recipe=analysis, data=json_data)
    uid = uid or util.get_uuid(8)

    # Create the job instance.
    job = Job(name=name,
              state=state,
              json_text=json_text,
              security=Job.AUTHORIZED,
              project=project,
              analysis=analysis,
              owner=owner,
              template=analysis.template,
              uid=uid)

    # Fill the json data.
    json_data = fill_json_data(job=job,
                               source_data=json_data,
                               project=project,
                               fill_with=fill_with)

    # Generate a meaningful job title.
    name = make_job_title(recipe=analysis, data=json_data)
    # Update the json_text and name
    job.json_text = hjson.dumps(json_data)
    job.name = name

    if save:
        job.save()

        # Update the projects lastedit user when a job is created
        #job_count = project.job_set.filter(deleted=False).count()
        job_count = Job.objects.filter(deleted=False, project=project).count()
        Project.objects.filter(uid=project.uid).update(lastedit_user=owner,
                                                       lastedit_date=now(),
                                                       jobs_count=job_count)
        logger.info(f"Created job id={job.id} name={job.name}")

    return job
Beispiel #41
0
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with fedimg; if not, see http://www.gnu.org/licenses,
# or write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Authors:  David Gay <*****@*****.**>
#           Sayan Chowdhury <*****@*****.**>

import toml

# Read in config file
with open("/etc/fedimg/fedimg-conf.toml") as conffile:
    config = toml.loads(conffile.read())

# Fedimg Consumer configurations
PROCESS_COUNT = config['general']['process_count']
STATUS_FILTER = ('FINISHED_INCOMPLETE', 'FINISHED')

ACTIVE_SERVICES = config['general']['active_services']
CLEAN_UP_ON_FAILURE = config['general']['clean_up_on_failure']
DELETE_IMAGES_ON_FAILURE = config['general']['delete_images_on_failure']

AWS_UTIL_USER = config.get('aws', {}).get('util_username', {})
AWS_TEST_USER = config.get('aws', {}).get('test_username', {})
AWS_ACCESS_ID = config.get('aws', {}).get('access_id')
AWS_SECRET_KEY = config.get('aws', {}).get('secret_key')
AWS_VOLUME_SIZE = config.get('aws', {}).get('volume_size')
AWS_VOLUME_TYPES = config.get('aws', {}).get('volume_types')
Beispiel #42
0
from pathlib import Path
from time import time

from toml import TomlPreserveInlineDictEncoder as tpide
from toml import dumps, loads

PYPROJECT = "pyproject.toml"

p = Path(PYPROJECT)
pyproject = loads(p.read_text())

v = pyproject["tool"]["poetry"]["version"]

parts = v.split(".")[:2]
unix = str(int(time()))

parts.append(unix)

v_with_timestamp = ".".join(parts)
pyproject["tool"]["poetry"]["version"] = v_with_timestamp
p.write_text(dumps(pyproject, tpide()))
Beispiel #43
0
def loads(text, tool='baldrick'):
    return Config(toml.loads(text)['tool'][tool])
Beispiel #44
0
 def loads(obj: str):
     return toml.loads(obj)
Beispiel #45
0
def load():
    global engine
    engine = inflect.engine()
    plurals = toml.loads(pkg_resources.read_text(sizebot.data, "plurals.ini"))
    for s, p in plurals["plurals"].items():
        engine.defnoun(s, p)
Beispiel #46
0
def main():
    args = sys.argv
    #options supported by Armature
    long_options = ["drawstuff", "sdf"]

    #Get all the options, if there is an invalid option, quit
    try:
        opts, args = getopt.gnu_getopt(sys.argv, "", long_options)
    except getopt.GetoptError as e:
        print(e)
        quit()

    file_to_parse = None
    where_to_save = None

    #If no input file was given
    if len(args) == 1:
        print("\
Please give one argument as the ARMS file to parse, and a second, optional, argument as the save location.\n\
\ni.e. >python Armature.py my/super/cool/file.arms where/i/want/to/save")
        quit()
    else:
        file_to_parse = Path(args[1])

    #If no options are given, ask for them
    if len(opts) < 1:
        print("\
Please give options to select the type of file you'd like Armature to produce. You may do multiple options.\n\
\ni.e. >python Armature.py --drawstuff --sdf my/super/cool/file.arms where/i/want/to/save\n\
\nAvailible Options:")
        for option in long_options:
            print(f'--{option}')
        quit()

    #Translate the ARMS syntax into a dictionary
    global arms
    arms = toml.loads(file_to_parse.read_text())

    convert_macros()
    convert_constants()

    make_shape_positions_relative_to_parents()
    make_joint_positions_relative_to_parent_shape()

    #Get all the creators
    creators = ArmsCreatorProvider.AllArmsCreators()

    #For every option
    for opt in opts:
        opt = opt[0]
        for creator in creators:
            #Write to a file if creator was requested in the command line options
            if creator.can_create(opt):
                #Get the save location
                if len(args) > 2:
                    where_to_save = Path(
                        f"{args[2]}/{file_to_parse.stem}{creator.file_extension}"
                    )
                else:
                    where_to_save = Path(
                        f"./{file_to_parse.stem}{creator.file_extension}")
                where_to_save.write_text(creator.create_file(arms))
Beispiel #47
0
 def deserialize(cls, s, **opts):
     return toml.loads(s, **opts)
Beispiel #48
0
def main(argv=()) -> None:
    """Program entry point for all of the sail/sailboat terminal program."""
    # Move to project root, check for sailboat.toml, and then read it.
    if argv is None:
        argv = []
    if not argv:
        argv = sys.argv
    ordinal_path = os.getcwd()
    while not os.path.isfile('sailboat.toml') and os.getcwd().count(
            os.sep) > 2:
        os.chdir('..')
    if not os.path.isfile('sailboat.toml'):
        needs_setup = True
        data = {}
        os.chdir(ordinal_path)
    else:
        needs_setup = False
        with open('sailboat.toml', 'r', encoding="utf8") as file:
            data = toml.loads(file.read())
            if data == {}:
                needs_setup = True

    # Add required sections to data.
    for required_section in ("command", "build", "release", "git"):
        if required_section not in data:
            data[required_section] = {}

    # Search for options in argv
    switches = []
    for index, value in enumerate(argv[1:]):
        if value.startswith('--'):
            switches.append(value[2:])
        elif value.startswith('-'):
            switches.extend(value[1:])
        else:
            start_index = index
            options = [argv[0], *argv[1:][start_index:]]
            break
    else:
        options = [argv[0]]

    plugins = json.loads(open(prefix + 'plugins.json').read())

    if 'version' in switches or 'v' in switches:
        print(
            'Sailboat version {}\n\nHelp:\ncontact [email protected]\nor make an issue at cole-wilson/sailboat'
            .format(sailboat.__version__))
        sys.exit(0)
    if 'refresh' in switches or 'r' in switches or plugins == {} or (len(
            plugins.keys()) != 4):
        print("Refreshing plugins list (this could take a couple seconds...)")
        plugins = sailboat.refresh_plugins()
        print('Done!\n')
    helptext = "usage: " + sys.argv[0].split(
        os.sep
    )[-1] + " [options ...] [subcommand] [subcommand options ...]\n\n\tcore commands:"
    for command in plugins['core'].keys():
        if plugins['core'][command]['show']:
            helptext += "\n\t\t- " + term.cyan + command + term.normal + ": " + plugins[
                'core'][command]['description'].capitalize()
    helptext += "\n\t\t- " + term.cyan + "help" + term.normal + ": Display this message."
    if len(plugins['command'].keys()) != 0:
        helptext += "\n\n\tother commands:"
    for command in plugins['command'].keys():
        if plugins['command'][command]['show'] and command in data['command']:
            helptext += "\n\t\t- " + term.cyan + command + term.normal + ": " + plugins[
                'command'][command]['description'].capitalize()

    helptext += "\n"

    if len(options) < 2:
        print(helptext)
        return
    if needs_setup:
        command = 'quickstart'
    else:
        command = options[1]
    if command == 'help':  # or 'help' in options or 'h' in options:
        print(helptext)
        return

    try:
        plugin_type, temp = sailboat.get_plugin(command)
    except sailboat.PluginNotFound:
        print(
            'sailboat: error: {} is not a valid command. Please make sure you have installed it.'
            .format(command))
        return
    if plugin_type != 'core' and command not in data['build'] and command not in data['release'] and command not in \
            data['command']:
        print(
            'sailboat: error: {} *is* a valid command, but it isn\'t installed on this project. Install it with the '
            '`add` command.'.format(command))
        return
    temp = temp(data=data, options=options[2:], name=command, prefix=prefix)
    ##############################
    temp.run()  # Run the plugin!
    ##############################
    if plugin_type == 'core':
        data = temp.data
    else:
        data[plugin_type][command] = temp.data[plugin_type][command]

    basic_data = {}
    resources = {'resources': {}}
    commands = {'command': {}}
    builds = {'build': {}}
    release = {'release': {}}
    other = {}

    with open('sailboat.toml', 'w+') as f:
        for key in data.keys():
            if not isinstance(data[key], dict):
                basic_data[key] = data[key]
            elif key == 'resources' and len(data[key].values()) > 0:
                resources['resources'] = data[key]
            elif key == 'command' and len(data[key].values()) > 0:
                commands['command'] = data[key]
            elif key == 'build' and len(data[key].values()) > 0:
                builds['build'] = data[key]
            elif key == 'release' and len(data[key].values()) > 0:
                release['release'] = data[key]
            elif key not in ('build', 'release', 'command', 'resources'):
                other[key] = data[key]
        resources = resources if resources != {'resources': {}} else {}
        commands = commands if commands != {'command': {}} else {}
        builds = builds if builds != {'build': {}} else {}
        release = release if release != {'release': {}} else {}
        other = other if other != {'other': {}} else {}
        o = [
            *map(toml.dumps,
                 [basic_data, resources, commands, builds, release, other])
        ]
        out = r"""#            _  _  _                _   
#  ___ __ _ (_)| || |__  ___  __ _ | |_ 
# (_-</ _` || || || '_ \/ _ \/ _` ||  _|
# /__/\__,_||_||_||_.__/\___/\__,_| \__|
                                      
# Basic Setup:
{}

# Resource Setup:
{}

# Plugin Commands:
{}

# Build Routines:
{}

# Release Routines:
{}

# Other:
{}

# Thank you for using Sailboat!"""
        if '_comments' in data and not data['_comments']:
            print()
            out = "{}\n{}\n{}\n{}\n{}\n{}\n"
        out = out.format(o[0], o[1], o[2], o[3], o[4], o[5])
        f.write(out)
Beispiel #49
0
### Initialization
# load libraries
import arcpy
import toml
import os
import sys

### Preliminary processing
# load parameters
with open("code/parameters/general.toml") as conffile:
    general_params = toml.loads(conffile.read())

with open("code/parameters/remove-slivers.toml") as conffile:
    slivers_params = toml.loads(conffile.read())

# set environmental variables
arcpy.env.parallelProcessingFactor = general_params['threads']
arcpy.env.overwriteOutput = True
arcpy.env.workspace = os.path.dirname(sys.argv[1])

# storing system cmds
input_PTH = sys.argv[1]
output_PTH = sys.argv[2]

### Main processing
# calculate area
arcpy.AddField_management(input_PTH, 'AREA', 'DOUBLE')
arcpy.CalculateField_management(input_PTH, 'AREA',
                                '!shape.area@SQUAREKILOMETERS!', 'PYTHON_9.3')

# select slivers
Beispiel #50
0
try:
    from importlib_resources import read_text
except ModuleNotFoundError:
    from importlib.resources import read_text

try:
    import progressbar
except ImportError:
    PROGRESSBAR_EXISTS = False
else:
    PROGRESSBAR_EXISTS = True

logger = logging.getLogger(__name__)

indices_urls = toml.loads(
    read_text("planetpy.pdstools.data", "indices_paths.toml"))


def list_available_index_files():
    print(yaml.dump(indices_urls, default_flow_style=False))
    print(
        "Use indices.download('mission:instrument:index') to download in index file."
    )
    print("For example: indices.download('cassini:uvis:moon_summary'")


def replace_url_suffix(url, new_suffix=".tab"):
    """Cleanest way to replace the suffix in an URL.

    Sometimes the indices have upper case filenames, this is taken care of here.
Beispiel #51
0
def test_array_sep():
    encoder = toml.TomlArraySeparatorEncoder(separator=",\t")
    d = {"a": [1, 2, 3]}
    o = toml.loads(toml.dumps(d, encoder=encoder))
    assert o == toml.loads(toml.dumps(o, encoder=encoder))
Beispiel #52
0
GITHUB = "https://github.com"
LAB_ORG = "jupyterlab"
REPO_JUPYTERLAB = f"{GITHUB}/{LAB_ORG}/jupyterlab"
REPO_LUMINO = f"{GITHUB}/{LAB_ORG}/lumino"

# don't pollute the global state
LINKS = (HERE / "repos/.yarn-links").resolve()
YARN = ["yarn", "--link-folder", LINKS]
PIP = ["python", "-m", "pip"]

LAB_APP_DIR = pathlib.Path(sys.prefix) / "share/jupyter/lab"
LAB_APP_STATIC = LAB_APP_DIR / "static"
LAB_APP_INDEX = LAB_APP_STATIC / "index.html"

REPOS_TOML = HERE / "repos.toml"
REPOS = toml.loads(REPOS_TOML.read_text())["repos"]
PATHS = {url: HERE / "repos" / pathlib.Path(url).name for url in REPOS}
HOST = "127.0.0.1"
PORT = 8080
LAB_PORT = 9999

MISSING_LUMINO_DOCS = [
    "default-theme",
    # TODO: https://github.com/jupyterlab/lumino/issues/154
    "polling",
]


def task_lint():
    """lint the source in _this_ repo"""
    all_py = [*HERE.glob("*.py"), *PA11Y.glob("*.py")]
 def load(self):
     with open(self.fileName) as f:
         self.loaded_results = toml.loads(f.read())
     self.loaded = True
Beispiel #54
0
def parse_with_toml(data):
    """Uses TOML syntax to parse data"""
    try:
        return toml.loads(f"key={data}")["key"]
    except (toml.TomlDecodeError, KeyError):
        return data
Beispiel #55
0
    def __init__(self, context):
        self.context = context

        def get_env_bool(var, default):
            # Contents of env vars are strings by default. This returns the
            # boolean value of the specified environment variable, or the
            # speciried default if the var doesn't contain True or False
            return {
                'True': True,
                'False': False
            }.get(os.environ.get(var), default)

        def resolverelative(category, key):
            # Allow ~
            self.config[category][key] = path.expanduser(
                self.config[category][key])
            # Resolve relative paths
            self.config[category][key] = path.join(context.topdir,
                                                   self.config[category][key])

        if not hasattr(self.context, "bootstrapped"):
            self.context.bootstrapped = False

        config_path = path.join(context.topdir, ".servobuild")
        if path.exists(config_path):
            with open(config_path) as f:
                self.config = toml.loads(f.read())
        else:
            self.config = {}

        # Handle missing/default items
        self.config.setdefault("tools", {})
        default_cache_dir = os.environ.get("SERVO_CACHE_DIR",
                                           path.join(context.topdir, ".servo"))
        self.config["tools"].setdefault("cache-dir", default_cache_dir)
        resolverelative("tools", "cache-dir")

        default_cargo_home = os.environ.get(
            "CARGO_HOME", path.join(context.topdir, ".cargo"))
        self.config["tools"].setdefault("cargo-home-dir", default_cargo_home)
        resolverelative("tools", "cargo-home-dir")

        context.sharedir = self.config["tools"]["cache-dir"]

        self.config["tools"].setdefault("system-rust", False)
        self.config["tools"].setdefault("system-cargo", False)
        self.config["tools"].setdefault("rust-root", "")
        self.config["tools"].setdefault("cargo-root", "")
        if not self.config["tools"]["system-cargo"]:
            self.config["tools"]["cargo-root"] = path.join(
                context.sharedir, "cargo", self.cargo_build_id())
        self.config["tools"].setdefault(
            "rustc-with-gold", get_env_bool("SERVO_RUSTC_WITH_GOLD", True))

        # https://github.com/rust-lang/rust/pull/39754
        platforms_with_rustc_alt_builds = [
            "unknown-linux-gnu", "apple-darwin", "pc-windows-msvc"
        ]
        llvm_assertions_default = ("SERVO_RUSTC_LLVM_ASSERTIONS" in os.environ
                                   or host_platform()
                                   not in platforms_with_rustc_alt_builds)

        self.config.setdefault("build", {})
        self.config["build"].setdefault("android", False)
        self.config["build"].setdefault("mode", "")
        self.config["build"].setdefault("llvm-assertions",
                                        llvm_assertions_default)
        self.config["build"].setdefault("debug-mozjs", False)
        self.config["build"].setdefault("ccache", "")
        self.config["build"].setdefault("rustflags", "")
        self.config["build"].setdefault("incremental", False)

        self.config.setdefault("android", {})
        self.config["android"].setdefault("sdk", "")
        self.config["android"].setdefault("ndk", "")
        self.config["android"].setdefault("toolchain", "")
        self.config["android"].setdefault("platform", "android-18")
        self.config["android"].setdefault("target", "arm-linux-androideabi")

        self.set_use_stable_rust(False)
Beispiel #56
0
def parse_toml(filename):
    toml_string = toml_file(filename)
    parsed_toml = toml.loads(toml_string)
    return parsed_toml
Beispiel #57
0
def check_config_file(config_file, print_text=True):
    # Check if config file exists
    if not os.path.exists(config_file):
        print("%s config file is required but was not found" % config_file)
        sys.exit(1)

    # Load configs from servo-tidy.toml
    with open(config_file) as content:
        conf_file = content.read()
        lines = conf_file.splitlines(True)

    if print_text:
        print '\rChecking the config file...'

    config_content = toml.loads(conf_file)
    exclude = config_content.get("ignore", {})

    # Check for invalid listed ignored directories
    exclude_dirs = exclude.get("directories", [])
    skip_dirs = ["./target", "./tests"]
    invalid_dirs = [
        d for d in exclude_dirs
        if not os.path.isdir(d) and not any(s in d for s in skip_dirs)
    ]

    # Check for invalid listed ignored files
    invalid_files = [
        f for f in exclude.get("files", []) if not os.path.exists(f)
    ]

    current_table = ""
    for idx, line in enumerate(lines):
        # Ignore comment lines
        if line.strip().startswith("#"):
            continue

        # Check for invalid tables
        if re.match("\[(.*?)\]", line.strip()):
            table_name = re.findall(r"\[(.*?)\]", line)[0].strip()
            if table_name not in ("configs", "blocked-packages", "ignore",
                                  "check_ext"):
                yield config_file, idx + 1, "invalid config table [%s]" % table_name
            current_table = table_name
            continue

        # Print invalid listed ignored directories
        if current_table == "ignore" and invalid_dirs:
            for d in invalid_dirs:
                if line.strip().strip('\'",') == d:
                    yield config_file, idx + 1, "ignored directory '%s' doesn't exist" % d
                    invalid_dirs.remove(d)
                    break

        # Print invalid listed ignored files
        if current_table == "ignore" and invalid_files:
            for f in invalid_files:
                if line.strip().strip('\'",') == f:
                    yield config_file, idx + 1, "ignored file '%s' doesn't exist" % f
                    invalid_files.remove(f)
                    break

        # Skip if there is no equal sign in line, assuming it's not a key
        if "=" not in line:
            continue

        key = line.split("=")[0].strip()

        # Check for invalid keys inside [configs] and [ignore] table
        if (current_table == "configs" and key not in config
                or current_table == "ignore" and key not in config["ignore"] or
                # Any key outside of tables
                current_table == ""):
            yield config_file, idx + 1, "invalid config key '%s'" % key

    # Parse config file
    parse_config(config_content)
Beispiel #58
0
def check_lock(file_name, contents):
    def find_reverse_dependencies(name, content):
        for package in itertools.chain([content.get("root", {})],
                                       content["package"]):
            for dependency in package.get("dependencies", []):
                if dependency.startswith("{} ".format(name)):
                    yield package["name"], dependency

    if not file_name.endswith(".lock"):
        raise StopIteration

    # Package names to be neglected (as named by cargo)
    exceptions = config["ignore"]["packages"]

    content = toml.loads(contents)

    packages_by_name = {}
    for package in content.get("package", []):
        if "replace" in package:
            continue
        source = package.get("source", "")
        if source == r"registry+https://github.com/rust-lang/crates.io-index":
            source = "crates.io"
        packages_by_name.setdefault(package["name"], []).append(
            (package["version"], source))

    for (name, packages) in packages_by_name.iteritems():
        has_duplicates = len(packages) > 1
        duplicates_allowed = name in exceptions

        if has_duplicates == duplicates_allowed:
            continue

        if duplicates_allowed:
            message = 'duplicates for `{}` are allowed, but only single version found'.format(
                name)
        else:
            message = "duplicate versions for package `{}`".format(name)

        packages.sort()
        packages_dependencies = list(find_reverse_dependencies(name, content))
        for version, source in packages:
            short_source = source.split("#")[0].replace("git+", "")
            message += "\n\t\033[93mThe following packages depend on version {} from '{}':\033[0m" \
                       .format(version, short_source)
            for name, dependency in packages_dependencies:
                if version in dependency and short_source in dependency:
                    message += "\n\t\t" + name
        yield (1, message)

    # Check to see if we are transitively using any blocked packages
    for package in content.get("package", []):
        package_name = package.get("name")
        package_version = package.get("version")
        for dependency in package.get("dependencies", []):
            dependency = dependency.split()
            dependency_name = dependency[0]
            whitelist = config['blocked-packages'].get(dependency_name)
            if whitelist is not None:
                if package_name not in whitelist:
                    fmt = "Package {} {} depends on blocked package {}."
                    message = fmt.format(package_name, package_version,
                                         dependency_name)
                    yield (1, message)
import random
from random import randint
import os
import time
from pymongo import MongoClient
import commands
import json
import re
import pdb
import requests, json
import sys
import urllib
global reg, a, b

with open("/var/chandni-chowk/configs/app.development.toml") as conffile:
    config = toml.loads(conffile.read())
mongo_client = MongoClient(config['app']['mongo_conn_str'])
db = mongo_client.dsp

with open("/var/chandni-chowk/configs/app.test.toml") as conffile1:
    config1 = toml.loads(conffile1.read())
reg = config1["app"]["regions"]
#print reg

for i in range(0, 1000):

    def mul_random_regs():
        regions = [
            'Delhi NCR', 'Hyderabad', 'Bangalore', 'TN/Pondicherry', 'Kerala',
            'Pun/Har/Cha/HP/J%26K', 'Uttar Pradesh', 'West Bengal',
            'North East', 'Orissa', 'Jharkhand', 'Bihar', 'Maharashtra/Goa',
Beispiel #60
0
 def from_content(cls, content: str) -> Any:
     """Load toml from string."""
     cls._check_toml()
     return toml.loads(content)