Ejemplo n.º 1
0
def postgres(schema, tables, data_directory, psql_path, **params):
    psql = local.get('psql', psql_path)
    data_directory = Path(data_directory)
    logger.info('Initializing PostgreSQL...')
    engine = init_database(
        'postgresql', params, schema, isolation_level='AUTOCOMMIT'
    )

    query = "COPY {} FROM STDIN WITH (FORMAT CSV, HEADER TRUE, DELIMITER ',')"
    database = params['database']
    for table in tables:
        src = data_directory / '{}.csv'.format(table)
        load = psql[
            '--host',
            params['host'],
            '--port',
            params['port'],
            '--username',
            params['user'],
            '--dbname',
            database,
            '--command',
            query.format(table),
        ]
        with local.env(PGPASSWORD=params['password']):
            with src.open('r') as f:
                load(stdin=f)

    engine.execute('VACUUM FULL ANALYZE')
Ejemplo n.º 2
0
def get_gpus():
    gpu_count = local["lspci"]().count(
        "VGA"
    )  #BEWARE this doesn't reflect true gpu count remember the ASPEED device in lspci
    gpu_array = []
    if gpu_count == 1:
        return [], [], []
    for gpu_num in range(0, gpu_count - 1):
        gpu_fields = {}
        arg_arr = [
            '-i',
            str(gpu_num),
            '--query-gpu=gpu_name,gpu_bus_id,vbios_version,serial,memory.total',
            '--format=csv,noheader,nounits'
        ]
        gpu_info = local.get("nvidia-smi").run(arg_arr)[1].encode("ascii")

        gpu_arr = gpu_info.strip().split(", ")
        gpu_fields["Name"] = gpu_arr[0]
        gpu_fields['PCIID'] = gpu_arr[1]
        gpu_fields['Bios Version'] = gpu_arr[2]
        gpu_fields['Serial'] = gpu_arr[3]
        gpu_fields['Memory'] = gpu_arr[4]

        gpu_array.append(gpu_fields)

    return {
        "GPUs": gpu_array
    }, gpu_array, {
        "Name": "GPUs",
        "children": add_tree_specific_fields(gpu_array, 'gpu')
    }
Ejemplo n.º 3
0
    def test_get(self):
        assert str(local['ls']) == str(local.get('ls'))
        assert str(local['ls']) == str(local.get('non_exist1N9', 'ls'))

        with pytest.raises(CommandNotFound):
            local.get("non_exist1N9")
        with pytest.raises(CommandNotFound):
            local.get("non_exist1N9", "non_exist1N8")
        with pytest.raises(CommandNotFound):
            local.get("non_exist1N9", "/tmp/non_exist1N8")
Ejemplo n.º 4
0
 def __init__(self, fzf_path=None):
     '''
     A systemError will be raised if fzf is not found.
     '''
     try:
         if fzf_path:
             self.fzf = local.get(fzf_path)
         else:
             self.fzf = local['fzf']
     except plumbum.CommandNotFound:
         raise SystemError("Cannot find 'fzf' ( {0} )".format(FZF_URL))
Ejemplo n.º 5
0
    def run(self):
        from plumbum import local, FG
        with local.cwd("rust"):
            try:
                cargo = local.get('cargo')
                rustc = local.get('rustc')
            except Exception as e:
                print("Installing Rust...")
                rustup = local['curl']['--proto']['=https']['--tlsv1.2'][
                    '-sSf']['https://sh.rustup.rs']
                sh = local['sh']
                if os.isatty(sys.stdout.fileno()):
                    (rustup | sh) & FG
                else:
                    (rustup | sh['-s']['--']['-y'])()
                cargo = local.get(
                    'cargo',
                    os.environ.get('HOME', 'root') + '/.cargo/bin/cargo')
            finally:
                if debug:
                    cargo["build"]["--features"][
                        "bindings cli null-plugins"] & FG
                else:
                    cargo["build"]["--release"]["--features"][
                        "bindings cli null-plugins"] & FG

        local['mkdir']("-p", py_target_dir)
        sys.path.append("python/tools")
        import add_swig_directives
        add_swig_directives.run(include_dir + "/dqcsim-py.h",
                                py_target_dir + "/dqcsim.i")

        local["swig"]["-v"]["-python"]["-py3"]["-outdir"][py_target_dir]["-o"][
            py_target_dir + "/dqcsim.c"][py_target_dir + "/dqcsim.i"] & FG

        _build.run(self)
Ejemplo n.º 6
0
def postgres(schema, tables, data_directory, psql_path, **params):
    psql = local.get('psql', psql_path)
    data_directory = Path(data_directory)
    logger.info('Initializing PostgreSQL...')
    engine = init_database('postgresql', params, schema,
                           isolation_level='AUTOCOMMIT')

    query = "COPY {} FROM STDIN WITH (FORMAT CSV, HEADER TRUE, DELIMITER ',')"
    database = params['database']
    for table in tables:
        src = data_directory / '{}.csv'.format(table)
        load = psql['--host', params['host'], '--port', params['port'],
                    '--username', params['user'], '--dbname', database,
                    '--command', query.format(table)]
        with local.env(PGPASSWORD=params['password']):
            with src.open('r') as f:
                load(stdin=f)

    engine.execute('VACUUM FULL ANALYZE')
Ejemplo n.º 7
0
 def main(self, *svc_names):
     logs = [
         self.parent.logs_dir / svc.name / 'current'
         for svc in self.parent.svc_map(svc_names or self.parent.svcs)
     ]
     if self.debug:
         logs.append(self.parent.logs_dir / '.s6-svscan' / 'current')
     if self.follow:
         with suppress(KeyboardInterrupt):
             try:
                 mtail = local.get('lnav', 'multitail')
             except CommandNotFound:
                 tail[['-n', self.lines, '-F'] + logs].run_fg()
             else:
                 mtail[logs].run_fg()
     else:
         for log in logs:
             if log.is_file():
                 tail['-vn', self.lines, log].run_fg()
                 print('\n')
Ejemplo n.º 8
0
 def test_get(self):
     self.assertEqual(str(local['ls']),str(local.get('ls')))
     self.assertEqual(str(local['ls']),str(local.get('non_exist1N9', 'ls')))
     self.assertRaises(CommandNotFound, lambda: local.get("non_exist1N9"))
     self.assertRaises(CommandNotFound, lambda: local.get("non_exist1N9", "non_exist1N8"))
     self.assertRaises(CommandNotFound, lambda: local.get("non_exist1N9", "/tmp/non_exist1N8"))
Ejemplo n.º 9
0
DATA = {
    "py3": True,
    "make_secret": lambda: sha1(os.urandom(48)).hexdigest(),
    "myvar": "awesome",
    "what": "world",
    "project_name": "Copier",
    "version": "2.0.0",
    "description": "A library for rendering projects templates",
}

COPIER_CMD = local.get(
    # Allow debugging in VSCode
    # HACK https://github.com/microsoft/vscode-python/issues/14222
    str(Path(sys.executable).parent / "copier.cmd"),
    str(Path(sys.executable).parent / "copier"),
    # Poetry installs the executable as copier.cmd in Windows
    "copier.cmd",
    "copier",
)

# Executing copier this way allows to debug subprocesses using debugpy
# See https://github.com/microsoft/debugpy/issues/596#issuecomment-824643237
COPIER_PATH = (sys.executable, "-m", "copier")

# Helpers to use with tests designed for old copier bracket envops defaults
BRACKET_ENVOPS = {
    "autoescape": False,
    "block_end_string": "%]",
    "block_start_string": "[%",
    "comment_end_string": "#]",
Ejemplo n.º 10
0
DYN_O_MITE_DEFAULTS = dict(
    secure_server_option='datacenter',
    pem_key_file='conf/dynomite.pem',
    data_store=0,
    datastore_connections=1,
)
INTERNODE_LISTEN = 8101
CLIENT_LISTEN = 8102
REDIS_PORT = 1212
STATS_PORT = 22222
BASE_IPADDRESS = quad2int('127.0.1.1')
RING_SIZE = 2**32

SETTLE_TIME = 5

redis = local.get('./test/_binaries/redis-server', 'redis-server')
with local.cwd('src'):
    # Removed './test/_binaries/dynomite'
    dynomite = local.get('./dynomite')


@contextmanager
def launch_redis(ip):
    logfile = 'logs/redis_{}.log'.format(ip)
    f = (redis['--requirePass', 'testpass', '--masterauth', 'testpass',
               '--bind', ip, '--port', REDIS_PORT] > logfile) & BG(-9)
    try:
        #print("Redis future: {}".format(str(f)))
        #print("Redis started, pid: {}".format(f.proc.pid))
        yield RedisNode(ip, REDIS_PORT)
    finally:
DYN_O_MITE_DEFAULTS = dict(
    secure_server_option='datacenter',
    pem_key_file='conf/dynomite.pem',
    data_store=0,
    datastore_connections=1,
)
INTERNODE_LISTEN = 8101
CLIENT_LISTEN = 8102
REDIS_PORT = 1212
STATS_PORT = 22222
BASE_IPADDRESS = quad2int('127.0.1.1')
RING_SIZE = 2**32

SETTLE_TIME = 5

redis = local.get('./test/_binaries/redis-server', 'redis-server')
with local.cwd('src'):
    # Removed './test/_binaries/dynomite'
    #dynomite = local.get('./dynomite')

@contextmanager
def launch_redis(ip):
    logfile = 'logs/redis_{}.log'.format(ip)
    f = (redis['--requirePass', 'testpass', '--masterauth', 'testpass', '--bind', ip, '--port', REDIS_PORT] > logfile) & BG(-9)
    try:
        yield RedisNode(ip, REDIS_PORT)
    finally:
        f.proc.kill()
        f.wait()

def pick_tokens(count, start_offset):
Ejemplo n.º 12
0
# pansearch_full_path = "{{ pansearch_full_path }}"
pansearch_full_path="/var/log"

catalogs_list = pansearch_full_path.split(' ')

#working_dir="/wrk/pansearch"
working_dir="./"

mask_finded_results = False  
scanning_only_gz = False #"{{ PANSEARCH_SCANNING_ONLY_GZ }}" 
unzip_before = False
send_email_with_results = True 
write_pan_to_log = False 
write_cvv_to_log = True 
current_hostname = local.get("hostname")()

stoplist = ["pareq","pares","audispd","panHash","INT_REF","</MD>","a3=","irn"]
cc_validator = """4[0-9]{12}([0-9]{3})|[25][1-7][0-9]{14}|6(011|5[0-9][0-9])[0-9]{12}|3[47][0-9]{13}|3(0[0-5]|[68][0-9])[0-9]{11}|(2131|1800|35\d{3})\d{11}"""

scan_failed = { 1: "not running at this time", 2: "completed"}
scan_status = 0

overall_before_status_lock = join(working_dir, "pansearch.lock")

log_file = join(working_dir, "pansearch_scanning.log")
timestamp_file = join(working_dir, "pansearch_timestamp.conf")
files_to_current_scan = join(working_dir, "pansearch_files_to_scan.db")
pansearch_files_list = join(working_dir, "pansearch.db")

log_file_gz = join(working_dir, "pansearch_scanning.log.gz")
Ejemplo n.º 13
0
def haup(name):
    local.get('haup')[name] & FG
Ejemplo n.º 14
0
def hadown(name):
    local.get('hadown')[name] & FG
Ejemplo n.º 15
0
        INTERFACE64='1',
        BINARY='64'),
    Job(TOOLCHAIN='mingw',
        TARGET='NEHALEM',
        ARCH='x86',
        INTERFACE64='0',
        BINARY='32'),
    Job(TOOLCHAIN='mingw',
        TARGET='HASWELL',
        ARCH='x86-64',
        INTERFACE64='1',
        BINARY='64')
]

with local.cwd(local.cwd / 'OpenBLAS'):
    for job in jobs:
        with local.env(PATH=(mingw64 if job.ARCH == 'x86-64' else mingw32) +
                       ';C:/msys64/usr/bin/'):
            make = local.get('make')

            make['TARGET={0}'.format(job.TARGET), 'USE_THREAD=0',
                 'NO_LAPACK=1', 'NO_LAPACKE=1', 'ONLY_CBLAS=1',
                 'INTERFACE64={0}'.format(job.INTERFACE64),
                 'BINARY={0}'.format(job.BINARY)] & FG

            make['PREFIX=../opt/{0}/{1}/{2}/{3}'.
                 format(platform.system().lower(), job.TOOLCHAIN, job.
                        TARGET, job.ARCH), 'install'] & FG

            make['clean'] & FG