Ejemplo n.º 1
0
def write_stats(stats, filename):
    """Write stats file."""
    try:
        with open(filename, 'w') as fd:
            json.dump(stats, fd)
    except:
        fatal('Cannot write stats file: {}'.format(filename))
Ejemplo n.º 2
0
def hits(json_file):
    """Extract the blast hits from the blast json output file."""
    hits_list = []

    with open(json_file) as blast_file:
        raw = blast_file.read()

        # Allow empty results
        if not raw:
            return []

        # Do not allow bad json
        try:
            obj = json.loads(raw)
        except json.decoder.JSONDecodeError:
            err = ('Blast output is not in JSON format. '
                   'You may need to upgrade blast.')
            log.fatal(err)

    raw_hits = obj['BlastOutput2'][0]['report']['results']['search'].get(
        'hits', [])

    for raw in raw_hits:
        for i, desc in enumerate(raw['description']):
            hit = dict(desc)
            hit['len'] = raw['len']
            hit.update(raw['hsps'][i])
            hits_list.append(hit)

    return hits_list
Ejemplo n.º 3
0
    def set_ssh_keys(self, ssh_keys):
        """Load ssh public keys from file if needed."""
        self.ssh_keys = {}
        self.ssh_keys_private = {}
        for user_name in ssh_keys:
            key = ssh_keys[user_name]
            if key.startswith('file:'):
                public_key_file = key.split('file:')[1]
                with open(public_key_file) as fd:
                    key = fd.read()
                # try to open private key
                private_key_file = public_key_file.split('.pub')[0]
                try:
                    with open(private_key_file) as fd:
                        self.ssh_keys_private[user_name] = private_key_file
                except FileNotFoundError:
                    pass

            self.ssh_keys[user_name] = key.strip()
            if user_name == 'root':
                # check if the private key is available:
                # (1) check ssh-agent
                # (2) check for private key file
                command = "echo {} | ssh-keygen -l -f - | awk '{{ print $2 }}'"
                finger = check_output(command.format(self.ssh_keys[user_name]),
                                      shell=True,
                                      encoding='ascii')
                try:
                    command = 'ssh-add -l | grep -q {}'
                    check_call(command.format(finger), shell=True)
                    return
                except CalledProcessError:
                    if user_name not in self.ssh_keys_private:
                        fatal('Could not find matching ssh key for root -',
                              'neither in ssh-agent nor on disk.')
Ejemplo n.º 4
0
 def set_parameters(self):
     """Set parameters for templating."""
     self.name = self.parameters['deployment_name']
     self.license_file = self.parameters['license_file']
     if not os.path.isfile(self.license_file):
         fatal('License file "{}" could not be found.'.format(
             self.license_file))
     self.domain_name = self.parameters['domain_name']
     self.high_available = self.parameters.get('high_available', False)
Ejemplo n.º 5
0
def all_shard_paths(blast_db):
    """Get all of the BLAST shard names built by the preprocessor."""
    pattern = '{}.*.blast.nhr'.format(blast_db)

    files = glob.glob(pattern)
    if not files:
        err = ('No blast shards found. Looking for "{}"\n'
               'Verify the --work-dir and --file-prefix options.').format(
                   pattern[:-4])
        log.fatal(err)

    return sorted(f[:-4] for f in files)
Ejemplo n.º 6
0
def all_shard_paths(blast_db):
    """Get all of the BLAST shard names built by the preprocessor."""
    pattern = '{}.*.blast.nhr'.format(blast_db)

    files = glob.glob(pattern)
    if not files:
        err = ('No blast shards found. Looking for "{}"\n'
               'Verify the --work-dir and --file-prefix options.').format(
                   pattern[:-4])
        log.fatal(err)

    return sorted(f[:-4] for f in files)
Ejemplo n.º 7
0
 def get(self, location):
     """Get data per REST API."""
     headers = {
         'Content-Type': 'application/json',
         'Authorization': 'Bearer {}'.format(self.api_key),
     }
     url = 'https://{}/api/v1{}'.format(self.host, location)
     try:
         response = requests.get(url, headers=headers, verify=False)
         response.raise_for_status()
     except requests.exceptions.HTTPError:
         fatal('REST API credentials are invalid.')
     return response
Ejemplo n.º 8
0
def default_cov_cutoff(cov_cutoff):
    """Calculate default coverage cutoff argument."""
    if cov_cutoff not in ['off', 'auto']:
        err = ('Read coverage cutoff value. Must be a positive '
               'float value, or "auto", or "off"')
        try:
            value = float(cov_cutoff)
        except ValueError:
            log.fatal(err)
        if value < 0:
            log.fatal(err)

    return cov_cutoff
Ejemplo n.º 9
0
 def init_api(self):
     node = ''
     if ':' in self.host:
         self.host, node = self.host.split(':')
     self.proxmox = ProxmoxAPI(host=self.host,
                               user=self.config['username'],
                               password=self.config['password'],
                               verify_ssl=self.config.get(
                                   'verify_ssl', True))
     nodes = [d['node'] for d in self.proxmox.nodes.get()]
     if node:
         if node not in nodes:
             fatal('Specified node {} not configured on host {}'.format(
                 self.host, node))
         self.default_node = node
     else:
         self.default_node = nodes[0]
Ejemplo n.º 10
0
def get_raw_hits(json_file):
    """Extract the raw blast hits from the blast json output file."""
    with open(json_file) as blast_file:
        raw = blast_file.read()

        # Allow empty results
        if not raw:
            return []

        # Do not allow bad json
        try:
            obj = json.loads(raw)
        except json.decoder.JSONDecodeError:
            err = ('Blast output is not in JSON format. '
                   'You may need to upgrade blast.')
            log.fatal(err)

    return obj['BlastOutput2'][0]['report']['results']['search'].get(
        'hits', [])
Ejemplo n.º 11
0
def get_raw_hits(json_file):
    """Extract the raw blast hits from the blast json output file."""
    with open(json_file) as blast_file:
        raw = blast_file.read()

        # Allow empty results
        if not raw:
            return []

        # Do not allow bad json
        try:
            obj = json.loads(raw)
        except json.decoder.JSONDecodeError:
            err = ('Blast output is not in JSON format. '
                   'You may need to upgrade blast.')
            log.fatal(err)

    return obj['BlastOutput2'][0]['report']['results']['search'].get(
        'hits', [])
Ejemplo n.º 12
0
def collect_stats(config, node_info):
    """Collect stats and update files."""
    router = node_info['router']
    node = node_info['node']
    interface = node_info['interface']

    api_key = config.get('api_key')
    if not api_key:
        fatal('No api_key has been specified in config file.')

    # create stats_dir if missing
    stats_dir = config.get('stats_dir',
                           '/var/lib/128technology/lte_quota_info')
    if stats_dir and not os.path.isdir(stats_dir):
        os.mkdir(stats_dir)
    else:
        cleanup_stats(stats_dir, config.get('retention_days', 60))

    conductor = config.get('conductor', 'localhost')
    stats = [int(time.time())]
    # Get current kpi for received/sent via GraphQL
    # (values during last 10 seconds interval)
    query = '''{ metrics { interface { %(kpi)s {
        bytes(router: "%(router)s", node: "%(node)s", port: "%(interface)s") {
          timeseries(startTime: "now-10") { timestamp value } } } } } }'''
    graphql = GraphQL(api_key, host=conductor)
    for kpi in ('received', 'sent'):
        result = graphql.query(query % locals())
        try:
            value = int(extract(result, 'value'))
        except TypeError:
            return None
        stats.append(value)

    stats_file = os.path.join(
        stats_dir,
        'lte_quota_info_{}_{}_{}_{}.stats'.format(router, node, interface,
                                                  time.strftime('%Y-%m')))
    stats_log = read_stats(stats_file)
    stats_log.append(stats)
    write_stats(stats_log, stats_file)
    return stats_log
Ejemplo n.º 13
0
def extractFilesToHome():
    from shutil import copyfile, copytree
    from sys import exit
    from lib import log

    # Copy .vimrc into root directory
    try:
        copyfile(
            VIM_DIR + '.vimrc',
            HOME_DIR + '.vimrc',
        )
    except FileNotFoundError:
        log.fatal('.vimrc file not found in repository\'s files directory')

    # Copy .vim directory into root directory
    try:
        copytree(
            VIM_DIR + '.vim',
            HOME_DIR + '.vim',
        )

    except FileNotFoundError:
        log.fatal('.vim directory not found in repository\'s files directory')

    except FileExistsError:
        proceed = input(
            log.getWarningMessage(
                'Directory ' + '.vim' +
                ' already exists, should I proceed? Yy/Nn: '))
        if proceed.lower() != 'y':
            return
        copytree(
            VIM_DIR + '.vim',
            HOME_DIR + '.vim',
            dirs_exist_ok=True,
        )

    try:
        copytree(
            VIM_DIR + '.config/nvim',
            HOME_DIR + '.config/nvim',
        )

    except FileNotFoundError:
        log.fatal(
            '.config/nvim directory not found in repository\'s files directory'
        )

    except FileExistsError:
        proceed = input(
            log.getWarningMessage(
                'Directory ' + '.config/nvim' +
                ' already exists, should I proceed? Yy/Nn: '))
        if proceed.lower() != 'y':
            return
        copytree(
            VIM_DIR + '.config/nvim',
            HOME_DIR + '.config/nvim',
            dirs_exist_ok=True,
        )
Ejemplo n.º 14
0
    def download_one_video(self, vid, url):
        try:
            print("vid={} url={}".format(vid, url))
            vdir = "{}/{}".format(self.root_dir, vid)
            cmd = "mkdir -p {}".format(vdir)
            log.notice("vid={} cmd={}".format(vid, cmd))
            os.system(cmd)
            cmd = "wget -c {} -O {}/{}.mp4 -o {}/{}.wget.log".format(
                url, vdir, vid, vdir, vid)
            log.notice("vid={} cmd={}".format(vid, cmd))
            os.system(cmd)

            wavfile = "{}/{}.wav".format(vdir, vid)
            if os.path.exists(wavfile):
                os.system("rm -f {}".format(wavfile))
            cmd = "ffmpeg -i {}/{}.mp4 -f wav -ar 16000 {}/{}.wav".format(
                vdir, vid, vdir, vid)
            log.notice("vid={} cmd={}".format(vid, cmd))
            os.system(cmd)
            print("vid={} url={} success".format(vid, url))
        except Exception as e:
            print("vid={} e={}".format(vid, e))
            log.fatal("vid={} e={}".format(vid, e))
Ejemplo n.º 15
0
 def retrieve_pci_addresses(self):
     """Retrieve pci addresses for network interfaces."""
     debug('Retrieve PCI addresses...')
     try:
         lshw_json = self.run_ssh('lshw -json').stdout
     except SSHError:
         fatal('Cannot connect to node:', self.ip_address)
     lshw = json.loads(lshw_json)
     pci_addresses = []
     for component in lshw["children"][0]["children"]:
         if component["class"] == "bridge":
             for subsystem in component["children"]:
                 if subsystem["class"] == "network":
                     index = int(subsystem["id"].split(':')[1])
                     pci_addresses.append((index, subsystem["businfo"]))
     pci_addresses = [v.strip('pci@') for k, v in sorted(pci_addresses)]
     # iterate over interfaces and set pci address
     i = 0
     for interface in self.interfaces:
         self.interfaces[interface]['pci_address'] = pci_addresses[i]
         i += 1
         if i >= len(pci_addresses):
             break
Ejemplo n.º 16
0
def get_lte_nodes(config):
    """Get LTE routers connected to conductor."""
    INTERFACE_TYPE = 'lte'
    nodes = []
    api_key = config.get('api_key')
    if not api_key:
        fatal('No api_key has been specified in config file.')
    quotas = config.get('quotas')
    default_quota = human_to_size(config.get('default_quota', '5 GB'))
    conductor = config.get('conductor', 'localhost')

    api = RestApi(api_key, host=conductor)
    for router in api.get_routers():
        router = router['name']
        for node in api.get_nodes(router):
            node = node['name']
            if '-conductor' in node:
                continue
            if not api.node_is_deployed(router, node):
                continue

            for device_interface in api.get_device_interfaces(router, node):
                if device_interface.get('type') == INTERFACE_TYPE:
                    quota = human_to_size(quotas.get(router))
                    if not quota:
                        quota = default_quota

                    # populate lte_node_config
                    node_config = {
                        'router': router,
                        'node': node,
                        'interface': device_interface.get('name'),
                        'quota': quota,
                    }
                    debug(node_config)
                    nodes.append(node_config)
    return nodes
Ejemplo n.º 17
0
def addNvimToVimAlias():
    from lib import log
    source_file = None
    try:
        source_file = open(HOME_DIR + '.zshrc', mode='r+')
    except OSError:
        try:
            source_file = open(HOME_DIR + '.bashrc', mode='r+')
        except OSError:
            log.fatal("Could not find zshrc or bashrc file in home")

    found_vim_alias = False
    found_vi_alias = False
    lines = source_file.readlines()
    for line in lines:
        if line == "alias vim=nvim\n":
            found_vim_alias
        if line == "alias vi=nvim\n":
            found_vi_alias

    if not found_vi_alias:
        source_file.write('alias vi=nvim\n')
    if not found_vim_alias:
        source_file.write('alias vim=nvim\n')
Ejemplo n.º 18
0
def main():
    """Call all functions needed to create a deployment."""
    args = parse_arguments()
    log_level = 'INFO'
    if args.debug:
        log_level = 'DEBUG'
    set_log_level(log_level)

    config = read_config(args.config_file)
    for index, host in enumerate(config.get('hypervisors')):
        node = ''
        if ':' in host:
            host, node = host.split(':')
        proxmox = ProxmoxAPI(
            host=host,
            user=config['username'],
            password=config['password'],
            verify_ssl=config.get('verify_ssl', True))
        nodes = [d['node'] for d in proxmox.nodes.get()]
        if node:
            if node not in nodes:
                fatal('Specified node {} not configured on host {}'.format(
                    host, node))
            default_node = node
        else:
            default_node = nodes[0]
        new_id = get_free_vmid(proxmox, default_node, (index+1)*1000)
        iso_image = get_iso_image(proxmox, default_node)
        version = iso_image.replace(
            'local:iso/128T-', '').replace(
            '-cloudinit.x86_64.iso', '')
        template_name = TEMPLATE_NAME.format(datetime.now(), version)
        info('Creating a new template:\n * ID: {}\n * Name: {}\n * Host: {}'.format(
             new_id, template_name, host))
        if not args.assumeyes:
            yn = input('Continue [yN]? ')
            if yn != 'y' and yn != 'Y':
                continue

        vm_options = DEFAULTS.copy()
        if 'vm_options' in config:
            vm_options.update(config['vm_options'])
        vm_options['vmid'] = new_id
        vm_options['name'] = template_name
        vm_options['ide2'] = iso_image + ',media=cdrom'
        proxmox.nodes(default_node).qemu.create(**vm_options)

        vm = proxmox.nodes(default_node).qemu(new_id)
        info('Waiting until vm is stopped.')
        running = True
        while running:
            print('.', end='', flush=True)
            time.sleep(30)
            status = vm.status.current().get().get('status')
            running = (status == 'running')
        print('')
        info('VM has been stopped.')
        info('Adding CloudInit.')
        vm.config.set(ide0='local:cloudinit')
        vm.config.set(delete='ide2')
        info('Migrating to template.')
        vm.template().post()
Ejemplo n.º 19
0
from cmd.argParser import ARGS

destinationFile = ARGS.dest

from lib.log import fatal
if not destinationFile:
    fatal('''
    Empty destination,
    try defining a different destination
    with `--dest`\ndefault is `/home/<user>/.zshrc`
    ''')

import sys, os
from lib.os import sudoScript

# Get path (eg '/home/joe/something/quitup/quitup.py')
appScriptPath = sys.argv[0]
alias = '\'alias quitup=\"/usr/bin/python ' + appScriptPath + '\"\''
addAliasCmd = sudoScript('echo ' + alias + ' >> ' + destinationFile)
os.system(addAliasCmd)
Ejemplo n.º 20
0
def check_query_args(args):
    """Validate the query arguments."""
    if not args['query'] and not args['query_split']:
        err = 'You must have at least one --query or --query-split argument.'
        log.fatal(err)
Ejemplo n.º 21
0
def check_query_args(args):
    """Validate the query arguments."""
    if not args['query'] and not args['query_split']:
        err = 'You must have at least one --query or --query-split argument.'
        log.fatal(err)
Ejemplo n.º 22
0
def parse_command_line(temp_dir_default):
    """Process command-line arguments."""
    description = """
        This is the aTRAM script. It takes a query sequence and a blast
        database built with the atram_preprocessor.py script and builds an
        assembly.

        If you specify more than one query sequence and/or more than one blast
        database then aTRAM will build one assembly for each query/blast
        DB pair.

        NOTE: You may use a text file to hold the command-line arguments
        like: @/path/to/args.txt. This is particularly useful when specifying
        multiple blast databases or multiple query sequences.
        """
    parser = argparse.ArgumentParser(
        fromfile_prefix_chars='@',
        formatter_class=argparse.RawDescriptionHelpFormatter,
        description=textwrap.dedent(description))

    parser.add_argument('--version',
                        action='version',
                        version='%(prog)s {}'.format(db.ATRAM_VERSION))

    required_command_line_args(parser)
    optional_command_line_args(parser)
    filter_command_line_args(parser)
    blast.command_line_args(parser)
    assembly.command_line_args(parser)

    args = vars(parser.parse_args())

    # Check query arguments
    if not args['query'] and not args['query_split']:
        err = 'You must have at least one --query or --query-split argument.'
        log.fatal(err)

    # Set defaults and adjust arguments based on other arguments
    args['cov_cutoff'] = assembly.default_cov_cutoff(args['cov_cutoff'])
    args['blast_db'] = blast.touchup_blast_db_names(args['blast_db'])
    args['kmer'] = assembly.default_kmer(args['kmer'], args['assembler'])
    args['max_target_seqs'] = blast.default_max_target_seqs(
        args['max_target_seqs'], args['blast_db'], args['max_memory'])

    # Setup temp dir
    if not args['temp_dir']:
        args['temp_dir'] = temp_dir_default
    else:
        os.makedirs(args['temp_dir'], exist_ok=True)

    if args['no_filter']:
        args['bit_score'] = 0
        args['contig_length'] = 0

    if not args['protein'] and args['query']:
        args['protein'] = bio.fasta_file_has_protein(args['query'])

    # Prepend to PATH environment variable if requested
    if args['path']:
        os.environ['PATH'] = '{}:{}'.format(args['path'], os.environ['PATH'])

    find_programs(args['assembler'], args['no_long_reads'], args['bowtie2'])

    return args
Ejemplo n.º 23
0
def parse_command_line():
    """Process command-line arguments."""
    description = """
        This program will find and stitch together exons from targeted
        assemblies using amino acid targets and DNA assemblies.
        """

    parser = argparse.ArgumentParser(
        fromfile_prefix_chars='@',
        formatter_class=argparse.RawDescriptionHelpFormatter,
        description=textwrap.dedent(description))

    parser.add_argument('--version',
                        action='version',
                        version='%(prog)s {}'.format(db.ATRAM_VERSION))

    parser.add_argument('-T',
                        '--taxa',
                        metavar='TAXA',
                        required=True,
                        help="""A text file of all of your taxon names.""")

    parser.add_argument(
        '-r',
        '--reference-genes',
        '--refs',
        metavar='FASTA',
        required=True,
        help="""Reference amino acid sequences in a FASTA file.""")

    parser.add_argument('-a',
                        '--assemblies-dir',
                        metavar='PATH',
                        required=True,
                        help="""The path to the DNA contigs.""")

    parser.add_argument(
        '-O',
        '--overlap',
        type=int,
        default=10,
        help="""Contigs must overlap by this many codons before it is
            considered a real overlap.""")

    parser.add_argument(
        '-t',
        '--temp-dir',
        metavar='DIR',
        help="""Place temporary files in this directory. All files will be
            deleted after aTRAM completes. The directory must exist.""")

    parser.add_argument(
        '--keep-temp-dir',
        action='store_true',
        help="""This flag will keep the temporary files in the --temp-dir
        around for debugging.""")

    parser.add_argument('-l',
                        '--log-file',
                        help="""Log file (full path). The default is
            "atram_stitcher_<date>.log".""")
    parser.add_argument(
        '--log-level',
        choices=['debug', 'info', 'error'],
        default='info',
        help="""Log messages of the given level (or above). 'debug' shows the
            most messages and 'error' shows the least. The default is
            'info'""")

    parser.add_argument(
        '-i',
        '--iterations',
        type=int,
        default=2,
        metavar='N',
        help="""The number of times to run the main stitcher loop. This
            must be either 1 or 2, the default is 2.""")

    parser.add_argument(
        '-o',
        '--output-prefix',
        help="""This is the prefix of all of the output files. So you can
            identify different stitcher output file sets. You may include a
            directory as part of the prefix. The stitcher will add suffixes to
            differentiate output files.""")

    parser.add_argument(
        '-f',
        '--file-filter',
        default='*.fasta',
        help="""Use this to filter files in the assemblies directory. For
            example '*filtered*.fasta' will select all fasta files in the
            assemblies directory with the word filtered in them. The default
            is to select all fasta files in the assemblies directory
            '*.fasta'.""")

    parser.add_argument(
        '--reference-name',
        action='store_true',
        help="""Prepend the reference name to the final assembled gene name?
            if false the gene name in the reference file with just be the
            <taxon-name> if you select this then the assembled gene name
            will be <reference-name>.<taxon-name>.""")

    args = parser.parse_args()

    util.temp_dir_exists(args.temp_dir)

    if not args.output_prefix:
        args.output_prefix = join('.',
                                  'atram_stitcher_' + date.today().isoformat())

    if not args.log_file and args.output_prefix[-1] == '/':
        args.log_file = join(
            args.output_prefix,
            'atram_stitcher_' + date.today().isoformat() + '.log')
    else:
        args.log_file = args.output_prefix + '.log'

    if 1 > args.iterations > 2:
        log.fatal('The iterations must be either 1 or 2.')

    return args
Ejemplo n.º 24
0
    def deploy_vm(self, instance, assume_yes):
        """Deploy a VM on the given hypervisor."""
        if not self.proxmox:
            self.init_api()
            #fatal('No proxmox connection')

        proxmox = self.proxmox
        node = self.default_node

        # find template id among all vms on the hypervisor node
        vm_ids = []
        template_id = None
        template = instance.template
        for vm in proxmox.nodes(node).qemu.get():
            if vm['name'] == template and vm['template']:
                template_id = vm['vmid']
            vm_ids.append(int(vm['vmid']))
        if not template_id:
            fatal('Template could not be found on: {}.'.format(self.host))

        # find next free vm id starting at 1001/2001
        new_id = self.index * 1000 + 1
        for id in sorted(vm_ids):
            if id == new_id:
                new_id += 1

        info('Creating a new VM:\n * ID: {}\n * Name: {}\n * Host: {}'.format(
            new_id, instance.name, self.host))
        if not assume_yes:
            yn = input('Continue [yN]? ')
            if yn != 'y' and yn != 'Y':
                return False
        t = proxmox.nodes(node).qemu(template_id)
        info('VM is being created. This may take some time...')
        debug('Cloning template...')
        c = t.clone.create(newid=new_id, name=instance.name)

        ssh_key = instance.ssh_keys['root']
        ssh_key_quoted = quote(ssh_key, safe='')

        adjustments = {
            # remove cdrom drive if any
            'delete': 'ide2',
            # set cloud-init options
            'cipassword': instance.passwords['root'],
            'ciuser': '******',
            'sshkeys': ssh_key_quoted,
        }
        # network parameters
        i = 0
        for interface, details in instance.interfaces.items():
            ipconfig = 'ip={ip_address}/{ip_prefix}'.format(**details)
            if 'gateway' in details:
                ipconfig += ',gw={}'.format(details['gateway'])
            adjustments['ipconfig{}'.format(i)] = ipconfig
            i += 1
        # remove mpls interface from conductors
        if instance.role == 'conductor':
            adjustments['delete'] += ',net2'

        proxmox.nodes(node).qemu(new_id).config.set(**adjustments)
        debug('Starting VM...')
        proxmox.nodes(node).qemu(new_id).status.start.post()
        instance.init_iso_instance()
        return True