def test_cmdline_vars(self, monkeypatch):
     monkeypatch.setattr(loader, "get_tasks", mock_get_tasks)
     mock_run = Mock()
     monkeypatch.setattr(doit_cmd, "doit_run", mock_run)
     doit_cmd.cmd_main(['x=1', 'y=abc'])
     assert '1' == get_var('x')
     assert 'abc' == get_var('y')
Beispiel #2
0
def get_k8s_config():
    global _k8s_config

    if _k8s_config is not None:
        return dict(_k8s_config)

    tier = get_var('tier', None)
    storage_driver = get_var('storage_driver', 'overlay2')
    if tier is None:
        return {
            'error':
            "Please set the tier on the command-line, for "
            "example `tier=dev` or `tier=prod`"
        }

    if os.path.exists('config.yml'):
        import yaml

        with open('config.yml', encoding='utf-8') as fp:
            config = yaml.safe_load(fp)

        if tier not in config:
            return {
                'error': "config.yml doesn't have an entry for tier=%s" % tier
            }
        _k8s_config = config[tier]
    else:
        sys.stderr.write("config.yml doesn't exist, using default values\n")
        _k8s_config = {}
    _k8s_config['tier'] = tier
    _k8s_config['storage_driver'] = storage_driver
    if TAG:
        _k8s_config['tag'] = TAG
    return dict(_k8s_config)
Beispiel #3
0
def task_config():
    '''
    write config.yml -> .config.yml
    '''
    log_level = 'WARNING'
    filename = PROJDIR + '/LOG_LEVEL'
    if os.path.isfile(filename):
        log_level = open(filename).read().strip()
    log_level = get_var('LOG_LEVEL', log_level)
    if log_level not in LOG_LEVELS:
        raise UnknownLogLevelError(log_level)
    punch = f'''
    logging:
        loggers:
            api:
                level: {log_level}
        handlers:
            console:
                level: {log_level}
    '''
    return {
        'actions': [
            f'echo "cp {CONFIG_YML}\n-> {DOT_CONFIG_YML}"',
            f'echo "setting LOG_LEVEL={log_level}"',
            f'cp {CONFIG_YML} {DOT_CONFIG_YML}',
            lambda: _update_config(DOT_CONFIG_YML, yaml.safe_load(punch)),
        ]
    }
Beispiel #4
0
def task_config():
    '''
    write config.yml -> .config.yml
    '''
    log_level = 'WARNING'
    filename = '{0}/LOG_LEVEL'.format(os.path.dirname(__file__))
    if os.path.isfile(filename):
        log_level = open(filename).read().strip()
    log_level = get_var('LOG_LEVEL', log_level)
    if log_level not in LOG_LEVELS:
        raise UnknownLogLevelError(log_level)
    punch = '''
    logging:
        loggers:
            api:
                level: {log_level}
        handlers:
            console:
                level: {log_level}
    '''.format(**locals())
    return {
        'actions': [
            'echo "cp {CONFIG_YML}\n-> {DOT_CONFIG_YML}"'.format(**globals()),
            'echo "setting LOG_LEVEL={log_level}"'.format(**locals()),
            'cp {CONFIG_YML} {DOT_CONFIG_YML}'.format(**globals()),
            lambda: _update_config(DOT_CONFIG_YML, yaml.safe_load(punch)),
        ]
    }
Beispiel #5
0
def task_couchapp_push():
    """
    Push a couchapp to a CouchDB server
    """

    config = {"app": doit.get_var('app', '')}
    return {
        'actions': ['couchapp push simpleshelf/ %s' % config.get('app')]
    }
Beispiel #6
0
def task_push():
    for name in ['web', 'builder', 'runner']:
        args = dict(prefix=PREFIX,
                    image=name,
                    registry=get_var('registry', 'unset.example.org'),
                    tag=TAG)
        yield {
            'name':
            name,
            'actions': [
                'docker tag {prefix}{image} {registry}/{image}{tag}'.format(
                    **args),
                'docker push {registry}/{image}{tag}'.format(**args),
            ],
            'task_dep': ['build:{0}'.format(name)],
        }
Beispiel #7
0
def compile_mql(module):
    mt4_compiler = get_var('mt4_compiler', None)
    if mt4_compiler:
        if os.path.exists(mt4_compiler):
            logging.info('mt4 compile with %s' % mt4_compiler)
        else:
            download_mt4_compiler_if_not_exists('mql.exe')
            mt4_compiler = 'mql.exe'
    else:
        download_mt4_compiler_if_not_exists('mql.exe')
        mt4_compiler = 'mql.exe'
    p = subprocess.Popen([mt4_compiler, module+'.mq4'])
    p.wait()
    if p.returncode is 0:
        logging.info('%s compile success' % module)
    else:
        logging.error('%s compile error' % module)
Beispiel #8
0
MODEL = model_name
SDIR = os.path.dirname(cocopod.__file__)
print(SDIR)
print(os.getcwd())

DOIT_CONFIG = {
    'default_tasks': ['make_homology_models'],
    'num_process': multiprocessing.cpu_count(),
    'par_type': 'thread',
}

from doit.tools import create_folder
from doit import get_var

N_fold = int(get_var('N_fold', N_fold))
N_homology = int(get_var('N_homology', N_homology))


def task_make_config():
    return {
        'file_dep': ['make_config.py'],
        'targets': [MODEL + ".json"],
        'actions': ["python %(dependencies)s"],
        'clean': True
    }


def task_make_helix():
    return {
        'file_dep': [MODEL + ".json"],
Beispiel #9
0
 def test_cmdline_vars(self, monkeypatch):
     mock_run = Mock()
     monkeypatch.setattr(Run, "execute", mock_run)
     cmd_main(['x=1', 'y=abc'])
     assert '1' == get_var('x')
     assert 'abc' == get_var('y')
Beispiel #10
0
import matplotlib.pyplot as plt
import seaborn
seaborn.set(style="white")
seaborn.set_context(rc={'lines.markeredgewidth': 0.1})
from matplotlib.markers import MarkerStyle
import subprocess
import shutil
import re
import time
from doit import get_var

from utils import write_to_csv

# Modify these by supplying key=val at the end of a doit-command
params = {
    'n': get_var('n', ''),
    't': get_var('t', 1.0),
    'seed': get_var('seed', 10),
    'n_copies': get_var('n_copies', 1),
    'max_procs': get_var('max_procs', 8)}


network = params['n']
t = float(params['t'])
seed = int(params['seed'])
n_copies = int(params['n_copies'])
max_procs = int(params['max_procs'])

bench_loc = os.getenv('NENGO_MPI_BENCH_HOME')
nengo_mpi_loc = os.path.join(os.getenv("HOME"), "nengo_mpi")
print ("NENGO_MPI_BENCH_HOME: %s" % bench_loc)
Beispiel #11
0
with open('logging.yml') as cfg:
    logging.config.dictConfig(yaml.safe_load(cfg))

logger = logging.getLogger()

from doit import get_var
from roald import Roald
from rdflib.graph import Graph, URIRef
import csv
import time
import json
import data_ub_tasks

config = {
    'dumps_dir': get_var('dumps_dir', '/opt/data.ub/www/default/dumps'),
    'dumps_dir_url': get_var('dumps_dir_url', 'http://data.ub.uio.no/dumps'),
    'graph': 'http://data.ub.uio.no/realfagstermer',
    'fuseki': 'http://*****:*****@gmail.com',
    'es_index': 'authority'
}

DOIT_CONFIG = {
    'default_tasks': [
        'fetch_core:src/sonja_todo.json',
        'git-push',
        'build-solr-json',
        'publish-dumps',
 def test_cmdline_vars_not_opts(self, monkeypatch):
     monkeypatch.setattr(loader, "get_tasks", mock_get_tasks)
     mock_run = Mock()
     monkeypatch.setattr(doit_cmd, "doit_run", mock_run)
     doit_cmd.cmd_main(['--z=5'])
     assert None == get_var('--z')
Beispiel #13
0
# Doit configuration
#
DOIT_CONFIG = {
    # 0 := dont' print anything
    # 1 := print stderr only
    # 2 := print stderr and stdout
    'verbosity': 1,

    # Use multi-processing / parallel execution of tasks
    # Better to let Phrasal pipeline run sequentially so that
    # each task can use all cores.
    'num_process': 1
}

# Get conf file from command-line arguments
ARGS = {"conf": get_var('conf', None)}
err = lambda x : sys.stderr.write(x + os.linesep)
if not ARGS['conf']:
    err('Usage: %s conf=<file>' % (basename(sys.argv[2])))
    sys.exit(-1)

conf_file = ARGS['conf']
if not os.path.exists(conf_file):
    # Relative path
    conf_file = os.path.join(doit.get_initial_workdir(), conf_file)
    if not os.path.exists(conf_file):
        err('Configuration file not found: ' + conf_file)
        sys.exit(-1)

# Conf file format is YAML. Parse it.
with open(conf_file) as fd:
Beispiel #14
0
from doit import get_var

config = {"abc": get_var('abc', 'NO')}

def task_echo():
    return {'actions': ['echo hi %s' % config],
            'verbosity': 2,
            }
Beispiel #15
0
from doit import get_var

conf = configparser.ConfigParser()
""" The path to the config file is usually read from
the application argument "conf=<file>" - this is done in the Try block.

When we are in a subprocess (especially in windows) this argument is not available
and get_var does not work properly. So when get_var fails we are (usually) in a
subprocess and the parent process has stored the conf path in a file ".<parent_pid>.txt"
so we use that to determine the conf path.

We use the parent_pid as the identifier for the file to allow multiple instances of
uristmaps to be running at the same time.

TODO: Delete the pidfile when done!

"""

try:
    conf_path = get_var("conf", "config.cfg")
except:
    # Reading conf path from pid file
    with open(".{}.txt".format(os.getppid())) as pidfile:
        conf_path = pidfile.read()

if not os.path.exists(conf_path):
    print("Config file '{}' not found!".format(conf_path))
    sys.exit(1)

conf.read(conf_path)
Beispiel #16
0
import json
import os
from pathlib import Path

from doit import get_var
from doit.tools import config_changed

BASE_DIR = Path(__file__).parent
BUILD_DIR = BASE_DIR / "build"

VILLES_FICHIER = get_var("villes", BASE_DIR / "villes.json")
SVG_FILE = get_var("svg", BASE_DIR / "carte.svg")
WIDTH = get_var("width", 1000)
HEIGHT = get_var("height", 1000)

FILTER_IDF = (
    '["75", "77", "78", "91", "92", "93", "94", "95"].includes({0}.properties.code)'
)
FILTER_CORSE = '["2A", "2B"].includes({0}.properties.code)'
FILTER_RESTE = f"!({FILTER_IDF} || {FILTER_CORSE})"

os.environ["PATH"] = (str(BASE_DIR / "node_modules" / ".bin") + os.pathsep +
                      os.environ["PATH"])

with open(VILLES_FICHIER) as f:
    VILLES = json.load(f)

PROJECTION = "d3.geoConicConformal().parallels([44, 49]).rotate([-3, 0])"


def ensure_dir(dir):
Beispiel #17
0
#Action = Union[str, Callable[[List[Type[P]], List[FPath]], None]]
Action = Union[str, Callable[[List[FPath], List[FPath]], ActionReturn]]
ActionReturn = Union[Failure, None, Dict[str, bool]]

# could use "Old" and f"" style string will work 
# TODO: the below doesn't work@
DOIT_CONFIG = {'action_string_formatting': 'both'}

Args = TypedDict('Args', 
        { 'config' : str,
          'outdir' : str,
          'ref'    : Optional[str],
          'R1'     : Optional[str],
          'R2'     : Optional[str],
          'sample' : Optional[str]})
args: Args = {"config": get_var('config', 'config.yaml'),
          "outdir": get_var('outdir', './re-out'),
          "ref"   : get_var('ref', None),
          "R1"    : get_var('r1', None),
          'R2'    : get_var('r2', None),
          'sample': get_var('sample', None)}

# these file names could be dynamically named after the sample by adding a "sample" value to
# the `Args` dict and using that within each relevant task's closure! 
[trimmed_r1, trimmed_r2, up_trimmed1, up_trimmed2] = \
        [ FPath('trimmed').fastq, FPath('Trimmed.R2').fastq, \
        Fastq('unpaired_fw.fq'), Fastq('unpaired_rev.fq')]
ref = Fasta(args['ref']) # type: ignore
index_files: List[FPath] = [ref.add_suffix(suf) for suf in  { '.amb', '.sa', '.ann', '.bwt', '.pac'} ]
unsorted_bam = Bam('bwa.bam')
sorted_bam = Bam("sorted.bam")
Beispiel #18
0
def _get_passphrase():
    if get_var('passphrase'):
        return get_var('passphrase')
    return None
Beispiel #19
0
def render_layer(level):
    """ Render all image tiles for the specified level.
    """
    biomes = load_biomes_map()
    structures = load_structures_map()

    # Determine wich will be the first zoom level to use graphic tiles
    # bigger than 1px:
    zoom_offset = 0
    mapsize = 256
    while mapsize < biomes["worldsize"]:
        mapsize *= 2
        zoom_offset += 1

    # Zoom level 'zoom_offset' will be the first in which the world can
    # be rendered onto the map using 1px sized tiles.

    tile_amount = int(math.pow(2,level))

    graphic_size = int(math.pow(2, level - zoom_offset))

    # Dont render this layer when the world would not even fit if the tiles were 1px big
    # TODO: Find way to render this: Only draw every <n> world tiles or render big and scale down
    if graphic_size == 0:
        return

    # Read max number of processes
    process_count = conf.getint("Performance", "processes")

    # Chunk the amount of tiles to render in equals parts
    # for each process. This would be the ideal chunk size to keep
    # processes from coming back the pool to get more work. That just
    # costs time apparently.
    chunk = tile_amount ** 2

    # Limiting the chunksize helps getting more frequent updates for the progress bar
    # This slows the operation a bit down, though (about 1.5sek for zoom lvl 6...)
    chunk = min(chunk, 2048)
    
    # Have maximum as many processes as there are chunks so we don't have more
    # processes than there is work available.
    process_count = min(process_count, chunk)
    chunk //= process_count

    # Setup multiprocessing pool
    pool = Pool(process_count)

    # Load the tilesheet
    TILES = tilesets.get_tileset(graphic_size)

    # Save the path to the config file in a pid file for this process' children
    with open(".{}.txt".format(os.getpid()), "w") as pidfile:
        pidfile.write(get_var("conf", "config.cfg"))

    # Send the tile render jobs to the pool. Generates the parameters for each tile
    # with the get_tasks function.
    a = pool.imap_unordered(render_tile_mp, get_tasks(tile_amount, level, zoom_offset, biomes, structures, TILES), chunksize=chunk)

    counter = 0
    total = tile_amount**2

    # Show a nice progress bar with integrated ETA estimation
    with progress.Bar(label="Using {}px sized tiles ".format(graphic_size), expected_size=total) as bar:
        for b in a:
            counter += 1
            bar.show(counter)

    pool.close()
    pool.join()

    # Remove the pidfile containing the config path
    if os.path.exists(".{}.txt".format(os.getpid())):
        os.remove(".{}.txt".format(os.getpid()))
}

import sys
if sys.version_info >= (3, 6):
    pass
else:
    sys.stdout.write("Sorry, requires Python 3.6+\n")
    sys.exit(1)

import configparser
from pathlib import Path

## read in parameters

from doit import get_var
sample = get_var('sample', None)
print("sample = {}".format(sample))

if sample == 'lowz':
    boxsize = 720.0
    working_directory = Path('./AbacusCosmos/AbacusCosmos_720box')
    redshift = 'z0.300'
    rmax = 110.0
    rmin = 0.01
    nbins = 80
    param_dir = str(Path('./Params/LOWZ_HOD'))

elif sample == 'lowz-phases':
    boxsize = 720.0
    working_directory = Path('./AbacusCosmos/AbacusCosmos_phases')
    redshift = 'z0.300'
Beispiel #21
0
 def test_cmdline_vars_not_opts(self, monkeypatch):
     mock_run = Mock()
     monkeypatch.setattr(Run, "execute", mock_run)
     cmd_main(['--z=5'])
     assert None == get_var('--z')
Beispiel #22
0
from ngs_doit.tools import FqPair
from typing import Optional, List, Tuple, Union, Any, Dict, Callable
from typing_extensions import TypedDict, Literal
from mypy_extensions import (Arg, DefaultArg, NamedArg, DefaultNamedArg,
                             VarArg, KwArg)
from pathlib import PurePosixPath
from ngs_doit import plotting
from ngs_doit import tools
from ngs_doit.custom_types import (Args, Job, IdxBamJob, TaskDepJob, Targets,
                                   FileDeps, Actions, Failure, PyAction,
                                   PathLike, Args)

# TODO: lofreq is python 3.6 so it won't install properly,it's also not in the conda requirements
# also lofreq comes with a suggested snakemake pipline
args: Args = {
    "config": get_var('config', ''),
    "outdir": get_var('outdir', ''),
    "ref": get_var('ref', ''),
    "r1": get_var('r1', ''),
    "r2": get_var('r2', ''),
    "sample": get_var('sample', '')
}

DOIT_CONFIG = {'action_string_formatting': 'old'}

ref = args['ref']
inputR1_fq, inputR2_fq = args['r1'], args['r2']
# pipeline output
filtered1_fq, filtered2_fq = 'filtered.r1.fq', 'filtered.r2.fq'
ref_fai = 'ref.fasta.fai'
up1_fq, up2_fq, p1_fq, p2_fq = ('up1.fq', 'up2.fq', 'p1.fq', 'p2.fq')
"""
from pathlib import Path
from src.movie import make_movie

from doit import get_var

from ruamel import yaml

DOIT_CONFIG = {
    'verbosity': 2,
    'backend': 'json',
    'dep_file': '.doit_analyse.db',
}

config_file = get_var('config_file', 'configure.yaml')

with open(config_file, 'r') as f:
    conf = yaml.safe_load(f)


sequence_index = int(get_var('sequence_index', 0))
sequence_dir = Path(conf['run_dir']) / f"{sequence_index:04d}"

filenames = list(sequence_dir.iterdir())


def task_movie():
    for filename in filenames:
        run_number = int(filename.stem[-4:])
        yield {
Beispiel #24
0
    'default_tasks': [
        'phase_one',
    ],
    'verbosity': 2,
}

#
# CONFIG (END)
###

###
# VARIABLES (START)
#

# CLI
config = {'issue': get_var('issue', '2021_02')}
issue = config['issue']

# Directories
# (1) Base
assets = 'assets'
# (2) Per-issue
home_dir = 'issues/' + issue
meta_dir = home_dir + '/meta'
conf_dir = home_dir + '/config'
src_dir = home_dir + '/src'
dist_dir = home_dir + '/dist'

# Time
now = datetime.datetime.now()
Beispiel #25
0
from doit import get_var
from doit.exceptions import TaskFailed
from doit.tools import config_changed
import json
import os
import subprocess
import sys

DOIT_CONFIG = {
    'default_tasks': ['build', 'pull'],
    'continue': True,
}

PREFIX = get_var('prefix', 'reproserver-')
TAG = get_var('tag', '')
if TAG:
    TAG = ':%s' % TAG


def merge(*args):
    ret = {}
    for dct in args:
        ret.update(dct)
    return ret


def exists(object, type):
    def wrapped():
        proc = subprocess.Popen(
            ['docker', 'inspect', '--type={0}'.format(type), '--', object],
            stdout=subprocess.PIPE,
Beispiel #26
0
 def test_cmdline_vars(self, monkeypatch):
     mock_run = Mock()
     monkeypatch.setattr(Run, "execute", mock_run)
     cmd_main(['x=1', 'y=abc'])
     assert '1' == get_var('x')
     assert 'abc' == get_var('y')
Beispiel #27
0
import os
pjoin = os.path.join
from shutil import rmtree
from doit import get_var

from scripts.git_helper import save_changed_images, save_git_info
from scripts.docker_builder import run_build
from scripts.docker_tester import run_test
from scripts.docker_pusher import run_push
from scripts.manifests import run_manifests

DOIT_CONFIG = dict(verbosity=2)

# get_var(<key>, <default_val>)
USE_STACK = get_var('stack_dir', 'images')


def task_prep():
    """Prep directory for logs and artifacts"""
    def _prep():
        if not os.path.exists('artifacts'):
            os.mkdir('artifacts')
            open(pjoin('artifacts', '.empty'), 'a').close()
        if not os.path.exists('logs'):
            os.mkdir('logs')
            open(pjoin('logs', '.empty'), 'a').close()
        if not os.path.exists('manifests'):
            os.mkdir('manifests')
            open(pjoin('manifests', '.empty'), 'a').close()

    return {
BUILD_CONFIG_NAME = os.path.join(os.curdir, "build_config.yml")
SKIP_PACKAGES = [
    "cmake_installer",
]

# this should be configurable in build_spec
if platform.system().startswith("Darwin"):
    SKIP_PACKAGES.append("cuda_dev_config")
    SKIP_PACKAGES.append("nvpipe")

#
# These are commandline variables that are specified as follows:
# doit varname=value varname=value ...
#
global_config = {
    "build_spec": get_var("build_spec", "default_build.yml"),
    "build_folder": get_var("build_folder", "build"),
    "upload": get_var("upload", "false").lower() == "true",
    "profile_name": get_var("profile_name", "default"),
    "workspace": get_var("workspace", "false").lower() == "true",
    "deps_build_filter": get_var("deps_build_filter", "*"),
}


class Git(ConanGit):
    def update(self):
        if os.path.exists(self.folder):
            output = self.run("pull")
            return output
        else:
            raise ConanException(
Beispiel #29
0
# Doit configuration
#
DOIT_CONFIG = {
    # 0 := dont' print anything
    # 1 := print stderr only
    # 2 := print stderr and stdout
    'verbosity': 2,

    # Use multi-processing / parallel execution of tasks
    # Better to let Phrasal pipeline run sequentially so that
    # each task can use all cores.
    'num_process': 1
}

# Get conf file from command-line arguments
ARGS = {"conf": get_var('conf', None)}
err = lambda x: sys.stderr.write(x + os.linesep)
if not ARGS['conf']:
    err('Usage: %s conf=<file>' % (basename(sys.argv[2])))
    sys.exit(-1)

conf_file = ARGS['conf']
if not os.path.exists(conf_file):
    # Relative path
    conf_file = os.path.join(doit.get_initial_workdir(), conf_file)
    if not os.path.exists(conf_file):
        err('Configuration file not found: ' + conf_file)
        sys.exit(-1)

# Conf file format is YAML. Parse it.
with open(conf_file) as fd:
Beispiel #30
0
from doit import get_var
from doit.action import CmdAction

VERSION = get_var('version', 'test')


def title(name):
    def r(f):
        def call():
            d = f()
            d['title'] = lambda _: name
            return d

        return call

    return r


@title('[git] update')
def task_pull():
    return {'actions': ['cd view; git pull', 'cd server; git pull']}


@title('[view] dependencies')
def task_install():
    return {
        'actions': ['cd view; npm install'],
        'targets': ['view/node_modules'],
        'file_dep': ['view/package.json'],
        'task_dep': ['pull']
    }
import numpy as np
import h5py

from ruamel import yaml
from copy import deepcopy

DOIT_CONFIG = {
    'verbosity': 2,
    'backend': 'json',
    # 'default_tasks': [
    #     'merge',
    # ],
    'dep_file': '.doit_continue.db',
}

config_file = get_var('config_file', 'configure.yaml')

with open(config_file, 'r') as f:
    conf = yaml.safe_load(f)

build_dir = Path(conf['build_dir'])
build_dir.mkdir(parents=True, exist_ok=True)

run_dir = Path(conf['run_dir'])

sequence_index = int(
    get_var('sequence_index',
            None))  # this will raise a TypeError unless you specify it
run_number = int(get_var('run_number', 0))

h5filepath = run_dir / \
Beispiel #32
0
#
INCLUDE_DIR = os.path.join('include', PKG)
SOURCE_DIR = os.path.join('src', PKG)

HEADER_FILES = walk_ext(INCLUDE_DIR, '.hh')
HEADER_NAMES = [header for d, header in HEADER_FILES]

SOURCE_FILES = [(d, f) for d, f in walk_ext(SOURCE_DIR, '.cc') \
                if not d.endswith('benchmarks')]
SOURCE_NAMES = [source for d, source in SOURCE_FILES]

LIB_VERSION = '1'
LIBBUILDDIR = '_libbuild'
#LIBDIR = os.path.join('src', PKG)

CXX = get_var('CXX', os.environ.get('CXX', 'c++'))
INCLUDES = [
    '-I' + os.path.abspath('include/'), '-I.',
    '-I' + os.path.abspath('include/sourmash/')
]

# for anaconda
PREFIX = get_var('PREFIX', sysconfig.get_config_var('prefix'))
if PREFIX is not None:
    INCLUDES += ['-I' + os.path.join(PREFIX, 'include')]

if sys.platform == 'darwin':
    SHARED_EXT = 'dylib'
    SONAME = 'lib{0}.{1}.{2}'.format(PKG, SHARED_EXT, LIB_VERSION)
    SONAME_FLAGS = [
        '-install_name',
Beispiel #33
0
from doit import get_var

config = {"abc": get_var('abc', 'NO')}


def task_echo():
    return {
        'actions': ['echo hi %s' % config],
        'verbosity': 2,
    }
Beispiel #34
0
from doit import get_var

import os
import logging
import logging.config

logging.config.fileConfig('logging.cfg', )
logger = logging.getLogger(__name__)
logging.getLogger('requests').setLevel(logging.WARNING)
logging.getLogger('rdflib').setLevel(logging.WARNING)
logging.getLogger('urllib3').setLevel(logging.WARNING)

import data_ub_tasks

config = {
    'dumps_dir': get_var('dumps_dir', '/opt/data.ub/www/default/dumps'),
    'dumps_dir_url': get_var('dumps_dir_url', 'http://data.ub.uio.no/dumps'),
    'graph': 'http://data.ub.uio.no/acm-ccs-ubo',
    'fuseki': 'http://*****:*****@gmail.com',
}


def task_txt2ttl():
    return {
        'doc':
        'Convert txt files to ttl',
        'actions': [
            'mkdir -p dist',
Beispiel #35
0
 def test_cmdline_vars_not_opts(self, monkeypatch):
     mock_run = Mock()
     monkeypatch.setattr(Run, "execute", mock_run)
     cmd_main(['--z=5'])
     assert None == get_var('--z')
Beispiel #36
0
import copy
import os
import itertools
from doit import get_var
from doit.tools import run_once

import run
import plot
from mytools.bootstrap import Bootstrapper
from mytools import git
import numpy as np

DOIT_CONFIG = {'verbosity': 2}

if __name__ != "__main__":
    DOIT_CONFIG['datetime'] = get_var('datetime', '')

num_subtasks = 4

test_names = ['jump', 'hierarchical', 'deep']

date_time_string = DOIT_CONFIG.get('datetime', '')

if not date_time_string:
    date_time_string = str(datetime.datetime.now()).split('.')[0]
    date_time_string = reduce(
        lambda y, z: string.replace(y, z, "_"),
        [date_time_string, ":", " ", "-"])

# experiment_directory = '/home/e2crawfo/hrr-scaling/experiments'
#experiment_directory = '/data/e2crawfo/hrr-scaling/experiments'
Beispiel #37
0
#!/usr/bin/env doit

import sys
import ConfigParser
from doit import get_var
from doit.action import CmdAction

config = dict(
    cfgfile=get_var("cfgfile", "cowbells.cfg"),
    geosec=get_var("geosec", "geo"),
    simsec=get_var("simsec", "sim"),
    geofile=get_var("geofile", "geo.json"),
    simfile=get_var("simfile", "sim.root"),
)


def geometry(cfgfile, section, targets):
    from cowbells import geom, default

    default.all()

    cfg = ConfigParser.SafeConfigParser()
    cfg.read(cfgfile)

    bmodname = cfg.get(section, "builder")
    bmodoptsec = cfg.get(section, "builder_options")
    bmodargs = dict(cfg.items(bmodoptsec))

    exec ("import " + bmodname)
    bmod = sys.modules[bmodname]
Beispiel #38
0
DOIT_CONFIG = {
    "action_string_formatting": "old",
}

#
# CONFIG (END)
###

###
# VARIABLES (START)
#

# CLI
config = {
    'cat': get_var('cat', 'Name'),
    'acc': get_var('acc', 90),
}

category = str(config['cat'])
accuracy = int(config['acc'])

# Time
now = datetime.now()
timestamp = str(now.timestamp()).replace(".", "")

# Directories
backup_dir = "backups/"
src = "src/"
customers_dir = src + "customers/"
invoices_dir = src + "invoices/"
Beispiel #39
0
from doit import get_var

from ruamel import yaml

DOIT_CONFIG = {
    'verbosity': 2,
    'backend': 'json',
    # 'default_tasks': [
    #     'groundstate',
    #     'realtime',
    #     'collect'
    # ],
    'dep_file': '.doit.db',
}

config_file = get_var('config_file', 'configure.yaml')

with open(config_file, 'r') as f:
    conf = yaml.safe_load(f)

build_dir = Path(conf['build_dir'])
build_dir.mkdir(parents=True, exist_ok=True)

run_dir = Path(conf['run_dir'])

sequence_index = int(get_var('sequence_index', h5tools.autosequence(run_dir)))
run_number = int(get_var('run_number', 0))

h5filepath = run_dir / \
    conf['h5filepath'].format(
        sequence_index=sequence_index, run_number=run_number)
Beispiel #40
0
else:  # Darwin and Linux
    default['qt-static-qmake'] = os.path.join(default['qt-static-dir'], "bin",
                                              "qmake")
    default[
        'sip-static-url'] = "https://sourceforge.net/projects/pyqt/files/sip/sip-4.18.1/sip-4.18.1.tar.gz"
    default[
        'pyqt5-static-url'] = "https://sourceforge.net/projects/pyqt/files/PyQt5/PyQt-5.7/PyQt5_gpl-5.7.tar.gz"
    if default['target-system'] == "Darwin":
        default['pyqtdeploy-target'] = 'osx-64'
    else:  # Linux
        default['pyqtdeploy-target'] = 'linux-{}'.format(
            default['target-arch'])

config = {
    'pandoc':
    get_var('pandoc', default['pandoc']),
    'pylupdate5':
    get_var('pylupdate5', default['pylupdate5']),
    'lrelease':
    get_var('lrelease', default['lrelease']),
    'pyrcc5':
    get_var('pyrcc5', default['pyrcc5']),
    'pyuic5':
    get_var('pyuic5', default['pyuic5']),
    'pip':
    get_var('pip', default['pip']),
    'pyenv':
    get_var('pyenv', default['pyenv']),
    'pyqtdeploycli':
    get_var('pyqtdeploycli', default['pyqtdeploycli']),
    'gist':