Esempio n. 1
0
def update_test_files():
    shred.log.info("Updating list of test data files")
    global test_files
    filelist_cmd = ["hdfs", "dfs", "-ls", ospathjoin(test_file_path, test_file_dir)]
    filelist_iter = shred.run_shell_command(filelist_cmd)
    for line in filelist_iter:
        splits = ssplit(line)
        if "{0}".format(ospathjoin(test_file_path, test_file_dir)) in splits[-1]:
            test_files.append(splits[-1])
    shred.log.info("Test files are now [{0}]".format(test_files))
Esempio n. 2
0
	def test_server(self,ip=None,port=None,domain=None):
		try:
			sleep(2)
			_ip = ip or self.ip
			_port = port or self.port 
			_domain = domain or 'yahoo.com'
			proc=Popen(ssplit('dig -p {} @{} {} A +short'.format(_port,_ip,_domain)),stdout=PIPE)
			out,err=proc.communicate()
			raise ValueError(out)
		except Exception:
			pass
Esempio n. 3
0
    def do_analyze(self,line,silent=False):
        try:
            if silent:
                parsed_args = vars(self._analyze_parser.parse_args(""))
                parsed = Namespace({**parsed_args,**line})
                if parsed.uuid:
                    parsed.disk_dump_html = False
                    parsed.disk_dump_json = False
                    parsed.open = False
                    parsed.print = False
                    if not parsed.db_dump_json and not parsed.db_dump_html:
                        parsed.db_dump_json = True
                        parsed.db_dump_html = True
                else:
                    return
            else:
                parsed = self._analyze_parser.parse_args(ssplit(line))
                parsed.uuid = str(uuid4())

            try:
                if int(parsed.analyzer_timeout) > 0 and int(parsed.analyzer_timeout) < 240:
                    json_settings["analyzer_timeout"] = int(parsed.analyzer_timeout)
            except:
                pass
            try:
                if int(parsed.function_timeout) > 0 and int(parsed.function_timeout) < 240:
                    json_settings["function_timeout"] = int(parsed.function_timeout)
            except:
                pass

            log_string("Default timeout {}s for the task, and {}s for each logic".format(json_settings["analyzer_timeout"], json_settings["function_timeout"]),"Yellow")
        except:
            log_string("Parsing failed, something went wrong..","Red")
            return

        log_string("Task {} (Started)".format(parsed.uuid),"Yellow")

        if not parsed.output:
            parsed.output = gettempdir()
        if parsed.file or parsed.folder or parsed.buffer:
            try:
                setup_task_logger(parsed.uuid)
                if parsed.file:
                    self.analyze_file(parsed)
                elif parsed.folder:
                    self.analyze_folder(parsed)
                elif parsed.buffer:
                    self.analyze_buffer(parsed)
            finally:
                cancel_task_logger(parsed.uuid)
        else:
            log_string("File, Folder or Buffer is missing","Red")

        log_string("Task {} (Finished)".format(parsed.uuid),"Green")
Esempio n. 4
0
def runtest(test, jar):
    testurl = 'https://ipv6.he.net/certification/daily.php?test=' + test
    domain = get_domain()
    ip6 = socket.getaddrinfo(domain, None, socket.AF_INET6)[0][4][0]
    if test == 'aaaa':
        cmd = 'dig -t AAAA @ns1.galax.is ' + domain
    elif test == 'ptr':
        cmd = 'dig -x ' + ip6
    elif test == 'ping':
        cmd = 'ping -6 -c3 ' + domain
    # apparently those two tests don't need unique values every day..
    elif test == 'traceroute':
        cmd = 'traceroute -6 he.net'
    elif test == 'whois':
        cmd = 'whois 2001:470:0:76::2'
    print('running test "{}" with "{}"'.format(test, cmd))
    data = check_output(ssplit(cmd)).decode('utf-8')
    r = requests.post(testurl, data={'input': data}, cookies=jar)
Esempio n. 5
0
def run_blast(db, query, m8):
    '''Blast query file against db using default settings.'''

    if '*dbname*' in m8:
        m8 = m8.replace('*dbname*', db_name(db))
    command = 'blastn -task megablast -db '
    command += db
    command += ' -query '
    command += query
    command += ' -out '
    command += m8
    command += ' -outfmt 6 -num_threads 4 -evalue 0.01'
    command = ssplit(command)
    try:
        a = Popen(command)
        a.wait()
    except OSError:
        print 'blastn not found!'
        raise
Esempio n. 6
0
def run_blast(db, query, m8):
    '''Blast query file against db using default settings.'''

    if '*dbname*' in m8:
        m8 = m8.replace('*dbname*', db_name(db))
    command = 'blastn -task megablast -db '
    command += db
    command += ' -query '
    command += query
    command += ' -out '
    command += m8
    command += ' -outfmt 6 -num_threads 4 -evalue 0.01'
    command = ssplit(command)
    try:
        a = Popen(command)
        a.wait()
    except OSError:
        print 'blastn not found!'
        raise
Esempio n. 7
0
def graph_from_text(source: str, verbose: bool = True) -> MultiDiGraph:
    """Returns a graph from text.

    Parameters
    ----------
    source : str
        The text with which
        the graph will be created.

    verbose : bool
        If true, a progress bar will be displayed.

    Examples
    --------
    >>> import cfpq_data
    >>> g = cfpq_data.graph_from_text("1 A 2", verbose=False)
    >>> g.number_of_nodes()
    2
    >>> g.number_of_edges()
    1

    Returns
    -------
    g : MultiDiGraph
        Loaded graph.
    """
    g = MultiDiGraph()

    for edge in tqdm(source.splitlines(), disable=not verbose, desc="Loading..."):
        splitted_edge = ssplit(edge)
        if len(splitted_edge) == 1:
            g.add_node(splitted_edge[0])
        elif len(splitted_edge) == 2:
            u, v = splitted_edge
            g.add_edge(u, v)
        elif len(splitted_edge) == 3:
            u, label, v = splitted_edge
            g.add_edge(u, v, label=label)
        else:
            raise ValueError("only 1, 2, or 3 values per line are allowed")

    return g
Esempio n. 8
0
def run_formatdb(fasta_file, out_file, out_path, log_file='/dev/null'):
    '''Blast query file against db using default settings.'''
    
    fasta_file = os.path.abspath(fasta_file)
    log_file = os.path.abspath(log_file)
    old_dir = os.getcwd()
    os.chdir(out_path)
    command = 'formatdb -p F -i '
    command += fasta_file
    command += ' -l ' 
    command += log_file
    if out_file:
        command += ' -n  '
        command += out_file
    command = ssplit(command)
    try:
        a = Popen(command)
        a.wait()
        os.chdir(old_dir)
    except:
        os.chdir(old_dir)
        print 'Something went wrong with formatdb!'
        raise
Esempio n. 9
0
def run_formatdb(fasta_file, out_file, out_path, log_file='/dev/null'):
    '''Blast query file against db using default settings.'''

    fasta_file = os.path.abspath(fasta_file)
    log_file = os.path.abspath(log_file)
    old_dir = os.getcwd()
    os.chdir(out_path)
    command = 'formatdb -p F -i '
    command += fasta_file
    command += ' -l '
    command += log_file
    if out_file:
        command += ' -n  '
        command += out_file
    command = ssplit(command)
    try:
        a = Popen(command)
        a.wait()
        os.chdir(old_dir)
    except:
        os.chdir(old_dir)
        print 'Something went wrong with formatdb!'
        raise
Esempio n. 10
0
def test():
    command = 'python alfie.py -i test/homo_y.fasta test/pan_y.fasta -g test/y.gtf -o tmp/'
    command = ssplit(command)
    a = Popen(command)
    a.wait()
Esempio n. 11
0
def test():
    command = 'python alfie.py -i test/homo_y.fasta test/pan_y.fasta -g test/y.gtf -o tmp/'
    command = ssplit(command)
    a = Popen(command)
    a.wait()