def deep_matches_vids(exec_name, vid_frames_path, extra_params=""):
    all_matches = []
    # get a temporary filename
    tmp_vid_fname = next(tempfile._get_candidate_names()) + ".avi"
    tmp_fname = next(tempfile._get_candidate_names())
    ffmpeg_cmd = "ffmpeg -i %s -c:v huffyuv -pix_fmt rgb24 %s" % (vid_frames_path, tmp_vid_fname)
    cmd = '%s -i "%s" %s -b -out %s' % (exec_name, tmp_vid_fname, extra_params, tmp_fname)
    try:
        proc = subprocess.Popen(ffmpeg_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
        (out, err) = proc.communicate()
        ret = proc.wait()
        t1 = datetime.now()
        ret = os.system(cmd)
        t2 = datetime.now()
        delta = t2 - t1
        #sys.stdout.write("%s - %.2fs | " % (ret, delta.seconds + delta.microseconds/1E6))
        all_matches = read_output_vid_matches_binary(tmp_fname)
    finally:
        # delete the output file if it exists
        try:
            os.remove(tmp_vid_fname)
            os.remove(tmp_fname)
        except OSError:
            pass

    return all_matches
    def setUp(self):
        if not os.path.exists(self.fixture_dir):
            os.mkdir(self.fixture_dir)

        self.tmp_file_path = os.path.join(
            self.fixture_dir, next(tempfile._get_candidate_names()))
        while os.path.exists(self.tmp_file_path):
            self.tmp_file_path = os.path.join(
                self.fixture_dir, next(tempfile._get_candidate_names()))
 def save_flask_file_to_temp_from_req(filename, flask_files):
     if filename in flask_files and flask_files[filename].filename:
         uploaded_file = flask_files[filename]
         temp_file = "/tmp/" + next(tempfile._get_candidate_names()) + FileUtil.get_ext_with_dot(
             uploaded_file.filename)
         uploaded_file.save(temp_file)
         return temp_file
Example #4
0
    def run(self, musicbrainzid, fpath):
        temp_name = next(tempfile._get_candidate_names())
        tmpfile = "/tmp/%s.ly" % temp_name

        server_name = socket.gethostname()
        call(["/mnt/compmusic/%s/lilypond/usr/bin/musicxml2ly" % server_name, "--no-page-layout", fpath, "-o", tmpfile])

        tmp_dir = tempfile.mkdtemp()
        call(["lilypond", '-dpaper-size=\"junior-legal\"', "-dbackend=svg", "-o" "%s" % (tmp_dir), tmpfile])

        ret = {'score': []}

        os.unlink(tmpfile)

        regex = re.compile(r'.*<a style="(.*)" xlink:href="textedit:\/\/\/.*:([0-9]+):([0-9]+):([0-9]+)">.*')
        files = [os.path.join(tmp_dir, f) for f in os.listdir(tmp_dir)]
        files = filter(os.path.isfile, files)
        files.sort(key=lambda x: os.path.getmtime(x))

        for f in files:
            if f.endswith('.svg'):
                svg_file = open(f)
                score = svg_file.read()
                ret['score'].append(regex.sub(r'<a style="\1" id="l\2-f\3-t\4" from="\3" to="\4">', score))
                svg_file.close()
                os.remove(f)
        os.rmdir(tmp_dir)
        return ret
Example #5
0
def insert_qr(pdf, x, y, code=1234567):
    if len(code) > 8: return;
    qr = qrcode.QRCode(
        version=2,
        error_correction=qrcode.constants.ERROR_CORRECT_M,
        box_size=10,
        border=2,
    )
    qr.add_data(HERBURL % code)
    qr.make(fit=True)
    img = qr.make_image()
    temp_name = os.path.join(BASE_URL, './tmp',
                             next(tempfile._get_candidate_names()))
    temp_name += '.png'
    try:
        with open(temp_name, 'w') as tmpfile:
            img.save(tmpfile)
            tmpfile.flush()
            pdf.set_xy(x + LABEL_WIDTH - QR_SIZE - 2, y + LABEL_HEIGHT - QR_SIZE - 4)
            pdf.image(temp_name, w=QR_SIZE, h=QR_SIZE)
    finally:
        try:
            os.remove(temp_name)
        except IOError:
            pass
Example #6
0
 def generateTemporaryName(self):
     foundFileName = False
     while not (foundFileName):
         temporaryFileName = next(tempfile._get_candidate_names()) + ".txt"
         if not (os.path.isfile(temporaryFileName)):
             foundFileName = True
     return temporaryFileName
Example #7
0
 def mkdtemp(self):
     # TODO: Write up a more proper implementation that allows a function or
     # generator of some sort to be used to generate names instead, and see
     # if it can be written in such a way as to be used with copy_to to
     # allow conflicting files to be renamed on request.
     names = tempfile._get_candidate_names()
     for _ in range(20):
         name = "tmp" + next(names) + next(names)
         f = self.child(name)
         try:
             # TODO: Create this by default with permissions such that only
             # we have access to it, like tempfile.mkdtemp does
             f.mkdir()
             return f
         except exceptions.FileExistsError:
             continue
         except IOError:
             # Really ugly hack: SFTP v3 (and hence Paramiko) doesn't have
             # any support for indicating that a mkdir failed because the
             # directory in question already exists, so we assume that any
             # failure is because the directory already exists. There's not
             # much wrong with this approach other than taking an inordinate
             # amount of time to fail when the real issue is, say, related
             # to permissions, and annoyingly breaking encapsulation in
             # quite a glaring manner.
             from fileutils.ssh import SSHFile
             if isinstance(self, SSHFile):
                 continue
             else:
                 raise
Example #8
0
def gen_args ( args, infile_path, outfile ) :
    """
    Return the argument list generated from 'args' and the infile path
    requested.

    Arguments :
        args  ( string )
            Keyword or arguments to use in the call of Consense, excluding
            infile and outfile arguments.
        infile_path  ( string )
            Input alignment file path.
        outfile  ( string )
            Consensus tree output file.

    Returns :
        list
            List of arguments (excluding binary file) to call Consense.
    """
    if ( outfile ) :
        outfile_path = get_abspath(outfile)
    else :
        # Output files will be saved in temporary files to retrieve the
        # consensus tree
        outfile_path = os.path.join(tempfile.gettempdir(),
                                    tempfile.gettempprefix() + \
                                        next(tempfile._get_candidate_names()))
    # Create full command line list
    argument_list = [infile_path, outfile_path]
    return ( argument_list )
Example #9
0
def mode_pre(session_dir, args):
    global gtmpfilename

    """
    Read from Session file and write to session.pre file
    """
    endtime_to_update = int(time.time()) - get_changelog_rollover_time(
        args.volume)
    status_file = os.path.join(session_dir, args.volume, "status")
    status_file_pre = status_file + ".pre"

    mkdirp(os.path.dirname(args.outfile), exit_on_err=True, logger=logger)

    # If Pre status file exists and running pre command again
    if os.path.exists(status_file_pre) and not args.regenerate_outfile:
        fail("Post command is not run after last pre, "
             "use --regenerate-outfile")

    start = 0
    try:
        with open(status_file) as f:
            start = int(f.read().strip())
    except ValueError:
        pass
    except (OSError, IOError) as e:
        fail("Error Opening Session file %s: %s"
             % (status_file, e), logger=logger)

    logger.debug("Pre is called - Session: %s, Volume: %s, "
                 "Start time: %s, End time: %s"
                 % (args.session, args.volume, start, endtime_to_update))

    prefix = datetime.now().strftime("%Y%m%d-%H%M%S-%f-")
    gtmpfilename = prefix + next(tempfile._get_candidate_names())

    run_cmd_nodes("pre", args, start=start, end=-1, tmpfilename=gtmpfilename)

    # Merger
    if args.full:
        cmd = ["sort", "-u"] + node_outfiles + ["-o", args.outfile]
        execute(cmd,
                exit_msg="Failed to merge output files "
                "collected from nodes", logger=logger)
    else:
        # Read each Changelogs db and generate finaldb
        create_file(args.outfile, exit_on_err=True, logger=logger)
        outfilemerger = OutputMerger(args.outfile + ".db", node_outfiles)
        write_output(args.outfile, outfilemerger, args.field_separator)

    try:
        os.remove(args.outfile + ".db")
    except (IOError, OSError):
        pass

    run_cmd_nodes("cleanup", args, tmpfilename=gtmpfilename)

    with open(status_file_pre, "w", buffering=0) as f:
        f.write(str(endtime_to_update))

    sys.stdout.write("Generated output file %s\n" % args.outfile)
Example #10
0
	def do_upload_cap(s):
		cl = int(s.headers['Content-Length'])
		tmp_cap = "/tmp/" + next(tempfile._get_candidate_names()) + ".cap"
		with open(tmp_cap + ".gz", "wb") as fid:
			fid.write(s.rfile.read(cl))

		decompress(tmp_cap)
        
		# Check file is valid
		output = subprocess.check_output(['wpaclean', tmp_cap + ".tmp", tmp_cap])
		try:
			os.remove(tmp_cap + ".tmp")
		except:
			pass

		output_split = output.splitlines()
		if len(output_split) > 2:
			# We got more than 2 lines, which means there is a network
			#  in there with a WPA/2 PSK handshake
			os.rename(tmp_cap + ".gz", "dcrack.cap.gz")
			os.rename(tmp_cap, "dcrack.cap")
		else:
			 # If nothing in the file, just delete it
			os.remove(tmp_cap)
			os.remove(tmp_cap + ".gz")
Example #11
0
def report_worker(sid):
    try:
        job = get_job(sid) 

        log.info("=============  STARTING WORKER  ==============")
        log.debug(job)
        from ast import literal_eval
        job['series'] = literal_eval(job['series'])  # From string
        # Expand paths to full location on filesystem 
        output_filename = os.path.join(
            app.config['UPLOAD_FOLDER'], 
            next(tempfile._get_candidate_names()) + '.pdf')

        # Make list of input datafiles
        input_datafiles = [
            os.path.join(app.config['UPLOAD_FOLDER'], f['temporary_name'])
            for f in get_files(sid)
        ]

        report.report(input_datafiles, output_filename, 
                      **{**job, 'pdf': True, 'htm': False})

        log.info("=============  WORKER FINISHED  ==============")

        # Update finished job 
        upd_job(sid, 'generated_pdf', output_filename)
        upd_job(sid, 'status', 'done')

    except Exception as e:
        log.error("Exception occurred in worker thread")
        log.error(sys.exc_info()[0])

        upd_job(sid, 'status', 'error')
        upd_job(sid, 'generated_pdf', None)
        raise e
Example #12
0
def patch_repo(options):
    """
    Patches repository.
    """
    if not options['patchfile'] or options['no_patch']:
        return
    print_status('Patching {0}/{1} with {2}'.format(options['target'],
                                                    options['repo'],
                                                    options['patchfile']),
                 options)
    try:
        patchfile = os.path.join(options['target'],
                                 next(tempfile._get_candidate_names()))  # pylint: disable=protected-access
        execute_command(['cp', options['patchfile'], patchfile], options)
        modify_file(patchfile, options, 'patchfile')
        os.chdir(os.path.join(options['target'], options['repo']))
        result = execute_command(['patch', '-Np1', '-i', patchfile], options)
    except IOError:
        result = False
    finally:
        if os.path.isfile(patchfile):
            os.remove(patchfile)
    if not result:
        print_error('Failed patching {0}/{1} with {2}'.
                    format(options['target'], options['repo'],
                           options['patchfile']), options)
def prepare_queue(options):
    """
    Prepares a queue file which holds all hosts to scan.
    """
    expanded = False
    if not options['inputfile']:
        expanded = next(tempfile._get_candidate_names())
        with open(expanded, 'a') as inputfile:
            inputfile.write(options['target'])
        options['inputfile'] = expanded
    with open(options['inputfile'], 'r') as inputfile:
        hosts = inputfile.read().splitlines()
        targets = []
        for host in hosts:
            if ('/' in host) or ('-' in host):
                if not options['nmap']:
                    print_error('nmap is necessary for IP ranges', True)
                arguments = '-nsL'
                scanner = nmap.PortScanner()
                scanner.scan(hosts='{0}'.format(host), arguments=arguments)
                targets += sorted(scanner.all_hosts(),
                                  key=lambda x: tuple(map(int, x.split('.'))))
            else:
                targets.append(host)
        with open(options['queuefile'], 'a') as queuefile:
            for target in targets:
                queuefile.write(target + '\n')
    if expanded:
        os.remove(expanded)
Example #14
0
    def _copy_outside_keys(self):
        """Copy key from out of the workspace into one"""
        paths_map = {}
        real_inv = os.path.join(self.path, os.readlink(self.inventory))

        for line in fileinput.input(real_inv, inplace=True):
            key_defs = re.findall(r"ansible_ssh_private_key_file=\/\S+", line)
            for key_def in key_defs:
                path = key_def.split("=")[-1]
                paths_map.setdefault(path, path)

            new_line = line.strip()
            for mapped_orig, mapped_new in paths_map.iteritems():
                if mapped_orig == mapped_new:
                    keyfilename = os.path.basename(mapped_orig)
                    rand_part = next(tempfile._get_candidate_names())
                    new_fname = "{}-{}".format(keyfilename, rand_part)

                    shutil.copy2(mapped_orig, os.path.join(
                                 self.path, new_fname))
                    paths_map[mapped_orig] = os.path.join(
                        self.path_placeholder, new_fname)
                    new_fname = paths_map[mapped_orig]
                else:
                    new_fname = mapped_new

                new_line = re.sub(mapped_orig, new_fname, new_line)

            print(new_line)
Example #15
0
    def download_atomically(self,
                            url,
                            file_name,
                            headers=None,
                            show_progress=False):

        '''download stream atomically will stream to a temporary file, and
        rename only upon successful completion. This is to ensure that
        errored downloads are not found as complete in the cache
        :param file_name: the file name to stream to
        :param url: the url to stream from
        :param headers: additional headers to add to the get (default None)
        '''
        try:
            tmp_file = "%s.%s" % (file_name,
                                  next(tempfile._get_candidate_names()))
            response = self.stream(url,
                                   file_name=tmp_file,
                                   headers=headers,
                                   show_progress=show_progress)
            os.rename(tmp_file, file_name)

        except Exception:

            download_folder = os.path.dirname(os.path.abspath(file_name))
            msg = "Error downloading %s. " % (url)
            msg += "Do you have permission to write to %s?" % (download_folder)
            bot.error(msg)
            try:
                os.remove(tmp_file)
            except Exception:
                pass
            sys.exit(1)

        return file_name
Example #16
0
def crossmatchtwofiles(img1, img2, radius=3):
    ''' This module is crossmatch two images:
        It run sextractor transform the pixels position of the the sources in coordinates and crossmatch them  
        The output is a dictionary with the objects in common
    '''

    from numpy import array, argmin, min, sqrt
    import agnkey

    hd1 = pyfits.getheader(img1)
    hd2 = pyfits.getheader(img2)
    wcs1 = pywcs.WCS(hd1)
    wcs2 = pywcs.WCS(hd2)

    xpix1, ypix1, fw1, cl1, cm1, ell1, bkg1, fl1 = agnkey.agnastrodef.sextractor(img1)
    xpix2, ypix2, fw2, cl2, cm2, ell2, bkg2, fl2 = agnkey.agnastrodef.sextractor(img2)
    xpix1, ypix1, xpix2, ypix2 = array(xpix1, float), array(ypix1, float), array(xpix2, float), array(ypix2, float)

    bb = wcs1.wcs_pix2sky(zip(xpix1, ypix1), 1)  # transform pixel in coordinate
    xra1, xdec1 = zip(*bb)
    bb = wcs2.wcs_pix2sky(zip(xpix2, ypix2), 1)  # transform pixel in coordinate
    xra2, xdec2 = zip(*bb)

    xra1, xdec1, xra2, xdec2 = array(xra1, float), array(xdec1, float), array(xra2, float), array(xdec2, float)
    distvec, pos1, pos2 = agnkey.agnastrodef.crossmatch(xra1, xdec1, xra2, xdec2, radius)
    # dict={}
    dict = {'ra1': xra1[pos1], 'dec1': xdec1[pos1], 'ra2': xra2[pos2], 'dec2': xdec2[pos2],
            'xpix1': xpix1[pos1], 'ypix1': ypix1[pos1], 'xpix2': xpix2[pos2], 'ypix2': ypix2[pos2]}

    out = next(tempfile._get_candidate_names())+'.list'
    np.savetxt(out, zip(xpix1[pos1], ypix1[pos1]), fmt='%10.10s\t%10.10s')
    return out, dict
Example #17
0
def getTempfileName(ext=""):
    """Returns full path to temp file with with `ext` extension"""

    return os.path.join(
        tempfile.gettempdir(),
        sys.argv[0] + next(tempfile._get_candidate_names()) + ext
    )
def create_appliance(args):
    """Convert disk to another format."""
    input_ = op.abspath(to_unicode(args.input))
    output = op.abspath(to_unicode(args.output))
    temp_filename = to_unicode(next(tempfile._get_candidate_names()))
    temp_file = op.abspath(to_unicode(".%s" % temp_filename))
    output_fmt = args.format.lower()
    output_filename = "%s.%s" % (output, output_fmt)

    os.environ['LIBGUESTFS_CACHEDIR'] = os.getcwd()
    if args.verbose:
        os.environ['LIBGUESTFS_DEBUG'] = '1'

    create_disk(input_,
                temp_file,
                args.format,
                args.size,
                args.filesystem,
                args.verbose)
    logger.info("Installing bootloader")
    uuid, _, _ = install_bootloader(temp_file,
                                    args.extlinux_mbr,
                                    args.append)
    generate_fstab(temp_file, uuid, args.filesystem)

    logger.info("Exporting appliance to %s" % output_filename)
    if output_fmt == "qcow2":
        shutil.move(temp_file, output_filename)
    else:
        qemu_convert(temp_file, output_fmt, output_filename)
        os.remove(temp_file) if os.path.exists(temp_file) else None
Example #19
0
def create_model(x_train, y_train, x_test, y_test):
    """
    Create your model...
    """
    layer_1_size = {{quniform(12, 256, 4)}}
    l1_dropout = {{uniform(0.001, 0.7)}}
    params = {
        'l1_size': layer_1_size,
        'l1_dropout': l1_dropout
    }
    num_classes = 10
    model = Sequential()
    model.add(Dense(int(layer_1_size), activation='relu'))
    model.add(Dropout(l1_dropout))
    model.add(Dense(num_classes, activation='softmax'))
    model.compile(loss='categorical_crossentropy',
                  optimizer=RMSprop(),
                  metrics=['accuracy'])
    model.fit(x_train, y_train, batch_size=128, epochs=10, validation_data=(x_test, y_test))
    score, acc = model.evaluate(x_test, y_test, verbose=0)
    out = {
        'loss': -acc,
        'score': score,
        'status': STATUS_OK,
        'model_params': params,
    }
    # optionally store a dump of your model here so you can get it from the database later
    temp_name = tempfile.gettempdir()+'/'+next(tempfile._get_candidate_names()) + '.h5'
    model.save(temp_name)
    with open(temp_name, 'rb') as infile:
        model_bytes = infile.read()
    out['model_serial'] = model_bytes
    return out
Example #20
0
def abstracts(queryString):
	
# First call pubmed to pull out abstracts - one line per abstract	
    temp_name = "./"+next(tempfile._get_candidate_names())
    efetchCommand = "esearch -db pubmed -query \""+queryString+"\" | efetch -mode xml -format abstract | xtract -pattern PubmedArticle -block Abstract -element AbstractText>"+temp_name
    os.system(efetchCommand) 
	
# Now call Normalizr (Python 3 - oi vey) to normalise text. 
# Normalised text will overwrite original text.

    normalizeCommand = "/Users/upac004/Python/GOFind-master/normText.py "+ temp_name
    os.system(normalizeCommand)
    
# Now read in file to get each abstract and return a list    	
    theAbstracts = []
    fd = open(temp_name)
    for line in fd:

        
# Remove any special non-ASCII characters
        line = ''.join([i if ord(i) < 128 else '' for i in line]) 
        theAbstracts.append(line)
        
		
    fd.close()	
    os.remove(temp_name)

    return theAbstracts
Example #21
0
def get_tempname():
    """
    Gets a random string for use as a temporary filename.

    :return: A filename that can be used.
    """
    return next(tempfile._get_candidate_names())
Example #22
0
def random_names(prefix, suffix=""):
    """Use the same technique that tempfile uses."""

    names = tempfile._get_candidate_names()

    for i in range(TMP_MAX):
        yield prefix + names.next() + suffix
def prepare_queue(options):
    """Prepare a file which holds all hosts (targets) to scan."""
    expanded = False
    try:
        if not options['inputfile']:
            expanded = next(tempfile._get_candidate_names())  # pylint: disable=protected-access
            with open(expanded, 'a') as inputfile:
                inputfile.write(options['target'])
                options['inputfile'] = expanded
        with open(options['inputfile'], 'r') as inputfile:
            targets = []
            for host in [line for line in inputfile.read().splitlines() if line.strip()]:
                if options['dry_run'] or not re.match(r'.*[\.:].*[-/][0-9]+', host):
                    targets.append(host)
                else:
                    arguments = '-nsL'
                    scanner = nmap.PortScanner()
                    scanner.scan(hosts='{0}'.format(host), arguments=arguments)
                    if '.' in scanner.all_hosts():
                        targets += sorted(scanner.all_hosts(),
                                          key=lambda x: tuple(map(int, x.split('.'))))
                    else:
                        targets += scanner.all_hosts()
            with open(options['queuefile'], 'a') as queuefile:
                for target in targets:
                    queuefile.write(target + '\n')
        if expanded:
            os.remove(expanded)
    except IOError as exception:
        abort_program('Could not read/write file: {0}'.format(exception))
def get_temp_aln(aln):
    tfname = os.path.join(tempfile._get_default_tempdir(),
                          next(tempfile._get_candidate_names()))
    aln.write(tfname,alignment_format='PIR')
    seqs = get_seqs_from_pir(tfname)
    os.unlink(tfname)
    return seqs
Example #25
0
    def test(self):
        temp_name = next(tempfile._get_candidate_names())
        defult_tmp_dir = tempfile._get_default_tempdir()

        filename = os.path.join(defult_tmp_dir, temp_name)

        # Wfuzz results
        with wfuzz.FuzzSession(url=url, **params) as s:
            s.export_to_file(filename)

            if payloads is None:
                fuzzed = s.fuzz()
            else:
                fuzzed = s.get_payloads(payloads).fuzz()

            ret_list = [(x.code, x.history.urlparse.path) for x in fuzzed]

        # repeat test with recipe as only parameter
        with wfuzz.FuzzSession(recipe=[filename]) as s:
            if payloads is None:
                same_list = [(x.code, x.history.urlparse.path) for x in s.fuzz()]
            else:
                same_list = [(x.code, x.history.urlparse.path) for x in s.get_payloads(payloads).fuzz()]

        self.assertEqual(sorted(ret_list), sorted(same_list))
Example #26
0
def generate_temp_filename(dirname=None, prefix='tmp', suffix=''):
    """Generate a temporary file name with specified suffix and prefix.

    >>> from stonemason.util.tempfn import generate_temp_filename
    >>> generate_temp_filename('/tmp', prefix='hello-', suffix='.tmp') #doctest: +ELLIPSIS
    '/tmp/hello-....tmp'

    :param dirname: Base temp directory, default is system temp dir.
    :type dirname: str
    :param prefix: Prefix of the temporary file name, default is ``tmp``
    :type prefix: str
    :param suffix: Suffix of the temporary file name, default is emptry string.
    :type suffix: str
    :return: Generated temporary file name.
    :rtype: str
    :raises: :class:`IOError`
    """
    assert isinstance(suffix, six.string_types)
    assert isinstance(prefix, six.string_types)

    if not dirname:
        dirname = os.path.join(tempfile.gettempdir(), STONEMASON_TEMP_ROOT)
        if not os.path.exists(dirname):
            os.mkdir(dirname)

    for n, temp in enumerate(tempfile._get_candidate_names()):
        basename = '%s%s%s' % (prefix, temp, suffix)
        return os.path.join(dirname, basename)

    raise IOError(errno.EEXIST, 'Exhausted temporary file names.')
Example #27
0
def processtable(raster_obj, zone_file, w, out_folder, out_file):
  temp_name = "/" + next(tempfile._get_candidate_names()) + ".dbf"
  arcpy.sa.ZonalStatisticsAsTable(zone_file, 'VALUE', raster_obj, out_folder + temp_name, "DATA", "SUM")
  arcpy.AddField_management(out_folder + temp_name, "elev", 'TEXT')
  arcpy.CalculateField_management(out_folder + temp_name, "elev", "'" + str(w*0.1) + "'", "PYTHON")
  arcpy.TableToTable_conversion(out_folder + temp_name, out_folder, out_file)
  arcpy.Delete_management(out_folder + temp_name)
Example #28
0
def auto_resume(cmd, logdir=os.curdir, name=None, logger=None, debug=True):
    if name is None:
        name = next(tempfile._get_candidate_names()) + '.log'

    logpath = os.path.join(logdir, name)

    if logger is None:
        logger = _init_default_logger(logpath, debug)

    if isinstance(cmd, list):
        cmd = ' '.join(cmd)

    while True:
        is_first_line = True
        for line in CommandRunner.run(cmd):
            if is_first_line:
                is_first_line = False
                logger.info('start `%s`'%cmd)

            if line.extra_attrs['tag'] is CommandRunner.Status.STDOUT:
                logger.debug(line)
            else:
                logger.warning(line)

        logger.error('found %s exit...'%cmd)
Example #29
0
    def test(self):
        if not expected_list:
            return
        temp_name = next(tempfile._get_candidate_names())
        defult_tmp_dir = tempfile._get_default_tempdir()

        filename = os.path.join(defult_tmp_dir, temp_name)

        # Wfuzz results
        with wfuzz.FuzzSession(url=url, **dict(list(params.items()) + list(dict(save=filename).items()))) as s:
            if payloads is None:
                fuzzed = s.fuzz()
            else:
                fuzzed = s.get_payloads(payloads).fuzz()

            ret_list = [(x.code, x.history.urlparse.path) for x in fuzzed]

        # repeat test with performaing same saved request
        with wfuzz.FuzzSession(payloads=[("wfuzzp", dict(fn=filename))], url="FUZZ") as s:
            same_list = [(x.code, x.history.urlparse.path) for x in s.fuzz()]

        self.assertEqual(sorted(ret_list), sorted(same_list))

        # repeat test with performaing FUZZ[url] saved request
        with wfuzz.FuzzSession(payloads=[("wfuzzp", dict(fn=filename))], url="FUZZ[url]") as s:
            same_list = [(x.code, x.history.urlparse.path) for x in s.fuzz()]

        self.assertEqual(sorted(ret_list), sorted(same_list))
def deep_matches_ims(exec_name, im1_fp, im2_fp, binary_out=False, has_num_matches=True, extra_params=""):
    matches = []
    # get a temporary filename
    tmp_fname = next(tempfile._get_candidate_names())
    if binary_out:
        extra_params += " -b"
    cmd = '%s "%s" "%s" %s -out %s' % (exec_name, im1_fp, im2_fp, extra_params, tmp_fname)
    try:
        t1 = datetime.now()
        ret = os.system(cmd)
        t2 = datetime.now()
        delta = t2 - t1
        #sys.stdout.write("%s - %.2fs | " % (ret, delta.seconds + delta.microseconds/1E6))
        if binary_out:
            matches = read_output_matches_binary(tmp_fname)
        else:
            matches = read_output_matches(tmp_fname, has_num_matches)
    finally:
        # delete the output file if it exists
        try:
            os.remove(tmp_fname)
        except OSError:
            pass

    return matches
Example #31
0
    def test_same_thing(self):
        # _get_candidate_names always returns the same object
        a = tempfile._get_candidate_names()
        b = tempfile._get_candidate_names()

        self.assert_(a is b)
Example #32
0
 def test_retval(self):
     # _get_candidate_names returns a _RandomNameSequence object
     obj = tempfile._get_candidate_names()
     self.assert_(isinstance(obj, tempfile._RandomNameSequence))
Example #33
0
def main():
    args_parser = argparse.ArgumentParser()

    args_parser.add_argument('Target',
                             help='INPUT  (target) file, in CSV format')
    args_parser.add_argument('--output_dir',
                             '-o',
                             help='Where to put the output tree (newick)',
                             required=True)
    args_parser.add_argument('--email',
                             '-e',
                             help="Email for use with NCBI",
                             required=True)
    args_parser.add_argument('--cm',
                             '-c',
                             help='Covariance matrix for cmalign',
                             required=True)
    args_parser.add_argument('--threads',
                             '-t',
                             help='Number of threads',
                             default=1)

    args = args_parser.parse_args()

    threads = str(int(args.threads))
    cm_file = args.cm

    output_dir = args.output_dir

    # Load data
    target_df = pd.read_csv(args.Target)

    temp_fasta_fh = tempfile.NamedTemporaryFile(mode='w', delete=False)

    # Some accessions may be duplicated. Use a set to transfer them only once.
    transferred_accessions = set()
    # If our file is missing, put it here so we can grab them
    missing_accessions = set()
    # For each row, get the file and then sequence, put in into the temp fasta file
    for idx, row in target_df.iterrows():
        try:
            source_file_reads = SeqIO.parse(row.source_file, 'fasta')
            for sr in source_file_reads:
                if sr.id == row.sequence_id:
                    if not sr.id in transferred_accessions:
                        print "Transferring ", sr.id
                        SeqIO.write(sr, temp_fasta_fh, 'fasta')
                        transferred_accessions.add(sr.id)
                    # Now we can stop our loop and go to the next row
                    break
        except IOError:
            sys.stderr.write("Could not find " + str(row.source_file) + "\n")
            missing_accessions.add(row.sequence_id)
            pass  # No sense in searching in an empty file

    # OK, now try to grab the missing accessions from the NCBI nt database.

    for missing_acc in list(missing_accessions):

        try:
            h = Entrez.efetch(db='nucleotide',
                              id=missing_acc,
                              retmode='fasta',
                              rettype='fasta')
        except:
            # accession is frequently in the format accc.start.end. Try that now
            acc_list = missing_acc.split('.')
            try:
                start = int(acc_list[1])
            except IndexError:
                start = None
            try:
                end = int(acc_list[2])
            except IndexError:
                end = None

            try:
                h = Entrez.efetch(db='nucleotide',
                                  id=acc_list[0],
                                  retmode='fasta',
                                  rettype='fasta')
            except:
                # Still no dice
                h = None
            if h:
                try:
                    sr = SeqIO.read(h, 'fasta')
                    # If we have a start and end, slice the recovered sequence to them
                    if start and end:
                        sr.seq = sr.seq[start:end]
                    missing_accessions.remove(missing_acc)
                    SeqIO.write(sr, temp_fasta_fh, 'fasta')
                except:
                    pass

    if len(missing_accessions) > 0:
        sys.stderr.write("Still missing accessions " +
                         ", ".join(list(missing_accessions)) + "\n")

    # Be sure we flush everything to disk
    temp_fasta_fh.close()
    # Great. Now we need to align these sequences into phylip format. We will use cmalign

    # RUN
    # cmalign --cpu=4 --dnaout --noprob -o temp.sto /fh/fast/fredricks_d/pipeline_group/communities/data/RRNA_16S_BACTERIA.cm seqs.fasta

    aln_temp_name = next(tempfile._get_candidate_names())
    aln_temp_path = os.path.join(tempfile.gettempdir(), aln_temp_name)

    subprocess.call([
        'cmalign',
        '--cpu=' + threads,
        '--dnaout',
        '--noprob',
        '-o' + aln_temp_path,
        cm_file,
        temp_fasta_fh.name,
    ])

    # Output is in stockholm. Convert to 'phylip-relaxed' with alignIO
    aln_phyl_h = tempfile.NamedTemporaryFile(mode='w',
                                             suffix='.phl',
                                             delete=False)
    aln = AlignIO.read(aln_temp_path, 'stockholm')
    AlignIO.write(aln, aln_phyl_h, 'phylip-relaxed')
    aln_phyl_h.close()

    # Now make a tree with the alignment

    # raxmlHPC-PTHREADS-SSE3 -m GTRGAMMA -p 12345 -s align.phylip -n source.tre -T 4

    subprocess.call([
        'raxmlHPC', '-mGTRGAMMA', '-p12345', '-T' + threads,
        '-s' + aln_phyl_h.name, '-w' + os.path.abspath(output_dir),
        '-ntarget.tre'
    ])
Example #34
0
    def generate(self, out_path, aux, idx_in, idx_out):
        if self.scheme.version != 1:
            raise RuntimeError(
                'This model requires a "VERSION: STEJSKALTANNER" scheme.')

        scheme_high = amico.lut.create_high_resolution_scheme(self.scheme,
                                                              b_scale=1E6)
        filename_scheme = pjoin(out_path, 'scheme.txt')
        np.savetxt(filename_scheme,
                   scheme_high.raw,
                   fmt='%15.8e',
                   delimiter=' ',
                   header='VERSION: STEJSKALTANNER',
                   comments='')

        # temporary file where to store "datasynth" output
        filename_signal = pjoin(
            tempfile._get_default_tempdir(),
            next(tempfile._get_candidate_names()) + '.Bfloat')

        nATOMS = len(self.Rs) + len(self.ICVFs) + len(self.d_ISOs)
        progress = ProgressBar(n=nATOMS, prefix="   ", erase=True)

        # Cylinder(s)
        for R in self.Rs:
            CMD = 'datasynth -synthmodel compartment 1 CYLINDERGPD %E 0 0 %E -schemefile %s -voxels 1 -outputfile %s 2> /dev/null' % (
                self.d_par * 1E-6, R, filename_scheme, filename_signal)
            subprocess.call(CMD, shell=True)
            if not exists(filename_signal):
                raise RuntimeError(
                    'Problems generating the signal with "datasynth"')
            signal = np.fromfile(filename_signal, dtype='>f4')
            if exists(filename_signal):
                remove(filename_signal)

            lm = amico.lut.rotate_kernel(signal, aux, idx_in, idx_out, False)
            np.save(pjoin(out_path, 'A_%03d.npy' % progress.i), lm)
            progress.update()

        # Zeppelin(s)
        for d in [self.d_par * (1.0 - ICVF) for ICVF in self.ICVFs]:
            CMD = 'datasynth -synthmodel compartment 1 ZEPPELIN %E 0 0 %E -schemefile %s -voxels 1 -outputfile %s 2> /dev/null' % (
                self.d_par * 1E-6, d * 1e-6, filename_scheme, filename_signal)
            subprocess.call(CMD, shell=True)
            if not exists(filename_signal):
                raise RuntimeError(
                    'Problems generating the signal with "datasynth"')
            signal = np.fromfile(filename_signal, dtype='>f4')
            if exists(filename_signal):
                remove(filename_signal)

            lm = amico.lut.rotate_kernel(signal, aux, idx_in, idx_out, False)
            np.save(pjoin(out_path, 'A_%03d.npy' % progress.i), lm)
            progress.update()

        # Ball(s)
        for d in self.d_ISOs:
            CMD = 'datasynth -synthmodel compartment 1 BALL %E -schemefile %s -voxels 1 -outputfile %s 2> /dev/null' % (
                d * 1e-6, filename_scheme, filename_signal)
            subprocess.call(CMD, shell=True)
            if not exists(filename_signal):
                raise RuntimeError(
                    'Problems generating the signal with "datasynth"')
            signal = np.fromfile(filename_signal, dtype='>f4')
            if exists(filename_signal):
                remove(filename_signal)

            lm = amico.lut.rotate_kernel(signal, aux, idx_in, idx_out, True)
            np.save(pjoin(out_path, 'A_%03d.npy' % progress.i), lm)
            progress.update()
    def download_pack(self, url, pack, options):
        packxml = uecommunication.get_public_software_list(url, options, pack)
        try:
            root = etree.fromstring(packxml)
        except:
            print(self.xml)
            print('Error reading xml response in download_action')
            logging.exception('Error reading xml response in download_action')
            raise
        try:
            if not root.findall('Package'):
                print('Package ' + str(pack) + ' not found')
                logging.info('Package ' + str(pack) + ' not found')

            for pack in root.findall('Package'):
                try:
                    self.download_print_time()
                    print('Package: ' + pack.find('Name').text)
                    logging.info('Package: ' + pack.find('Name').text)
                    self.pid = pack.find('Pid').text
                    command = pack.find('Command').text
                    option_timeout = 'install_timeout_'
                    self.timeout = self.default_timeout
                    if command.find(option_timeout) != -1:
                        match = re.search(option_timeout + '(.+?)(\r?\n|$)',
                                          command)
                        try:
                            option_timeout += match.group(1)
                            command_value = int(match.group(1))
                            if command_value > 0:
                                self.timeout = command_value
                        except:
                            logmsg = 'Ignoring invalid option \'' + option_timeout + '\''
                            print(logmsg)
                            logging.warning(logmsg)
                    command = re.sub(
                        "\n\s*\n*", " && ",
                        command)  # Remove blank lines and convert \n to &&
                    command = command.replace(' && download_no_restart', '')
                    command = command.replace(' && no_break_on_error', '')
                    command = command.replace(' && section_end',
                                              '')  # for retro compatibility
                    command = command.replace(' && ' + option_timeout, '')
                    url = pack.find('Url').text
                    packagesum = pack.find('Packagesum').text
                except:
                    print('Error in package xml format')
                    logging.exception('Error in package xml format')
                    raise

                logging.info('Ready to download and execute (manually)')

                if packagesum != 'nofile':
                    try:
                        self.tmpdir = tempfile.gettempdir(
                        ) + '/updatengine/' + next(
                            tempfile._get_candidate_names()) + "/"
                        if not os.path.exists(self.tmpdir):
                            os.makedirs(self.tmpdir)
                        file_name = self.tmpdir + url.split('/')[-1]
                        self.download_tmp(url, file_name, packagesum)
                    except:
                        self.download_print_time()
                        print('Error when downloading: ' + file_name)
                        logging.exception('Error when downloading: ' +
                                          file_name)
                        raise
                    else:
                        print('Install in progress')
                        logging.info('Install in progress')

                        try:
                            os.chdir(self.tmpdir)
                            p = subprocess.Popen(command,
                                                 stderr=subprocess.PIPE,
                                                 shell=True)
                            retcode = p.wait(timeout=self.timeout)
                            if retcode != 0:
                                raise Exception(retcode)
                        except subprocess.TimeoutExpired:
                            p.kill()
                            err = "Timeout expired"
                            print('Error launching action: ' + err)
                            logging.exception('Error launching action: ' + err)
                            raise
                        except Exception as e:
                            import locale
                            console_encoding = locale.getpreferredencoding()
                            if console_encoding == 'cp1252':
                                console_encoding = 'cp850'
                            err = [
                                s.strip().decode(console_encoding)
                                for s in p.stderr.readlines()
                            ]
                            err = ' '.join(err)
                            if len(err):
                                err = err[:450] + (
                                    '...' if len(err) > 450 else
                                    '') + " | Exit code " + str(e)
                            else:
                                err = "Exit code " + str(e)
                            print('Error launching action: ' + str(err))
                            logging.exception('Error launching action: ' + err)
                            raise
                        finally:
                            try:
                                os.chdir(tempfile.gettempdir())
                                shutil.rmtree(tempfile.gettempdir() +
                                              '/updatengine/')
                            except:
                                print('Can\'t delete temp file')
                                logging.info('Can\'t delete temp file')
                else:
                    print('Install in progress')
                    logging.info('Install in progress')

                    try:
                        p = subprocess.Popen(command,
                                             stderr=subprocess.PIPE,
                                             shell=True)
                        retcode = p.wait(timeout=self.timeout)
                        if retcode != 0:
                            raise Exception(retcode)
                    except subprocess.TimeoutExpired:
                        p.kill()
                        err = "Timeout expired"
                        print('Error launching action: ' + err)
                        logging.exception('Error launching action: ' + err)
                        raise
                    except Exception as e:
                        import locale
                        console_encoding = locale.getpreferredencoding()
                        if console_encoding == 'cp1252':
                            console_encoding = 'cp850'
                        err = [
                            s.strip().decode(console_encoding)
                            for s in p.stderr.readlines()
                        ]
                        err = ' '.join(err)
                        if len(err):
                            err = err[:450] + ('...' if len(err) > 450 else
                                               '') + " | Exit code " + str(e)
                        else:
                            err = "Exit code " + str(e)
                        print('Error launching action: ' + str(err))
                        logging.exception('Error launching action: ' + err)
                        raise

                self.download_print_time()
                print('Operation completed')
                logging.info('Operation completed')
        except:
            print('Error detected when launching download_action')
            logging.info('Error detected when launching download_action')
            raise
Example #36
0
def calc_vmaf(ref,
              dist,
              model_path,
              scaling_algorithm="bicubic",
              phone_model=False,
              dry_run=False,
              verbose=False):
    vmaf_data = []

    if scaling_algorithm not in ALLOWED_SCALERS:
        print_stderr(f"Allowed scaling algorithms: {ALLOWED_SCALERS}")

    try:
        temp_dir = tempfile.gettempdir()

        temp_file_name_vmaf = os.path.join(
            temp_dir,
            next(tempfile._get_candidate_names()) + "-vmaf.txt")

        if verbose:
            print_stderr(
                f"Writing temporary SSIM information to: {temp_file_name_vmaf}"
            )

        vmaf_opts = {
            "model_path": model_path,
            "phone_model": "1" if phone_model else "0",
            "log_path": win_path_check(temp_file_name_vmaf),
            "log_fmt": "json",
            "psnr": "1",
            "ssim": "1",
            "ms_ssim": "1"
        }

        vmaf_opts_string = ":".join(f"{k}={v}" for k, v in vmaf_opts.items())

        filter_chains = [
            f"[1][0]scale2ref=flags={scaling_algorithm}[dist][ref]",
            f"[dist][ref]libvmaf={vmaf_opts_string}"
        ]

        cmd = [
            "ffmpeg", "-nostdin", "-y", "-threads", "1", "-i", ref, "-i", dist,
            "-filter_complex", ";".join(filter_chains), "-an", "-f", "null",
            NUL
        ]

        run_command(cmd, dry_run, verbose)

        if not dry_run:
            with open(temp_file_name_vmaf, "r") as in_vmaf:
                vmaf_log = json.load(in_vmaf)
                for frame_data in vmaf_log["frames"]:
                    # append frame number, increase +1
                    frame_data["metrics"]["n"] = int(
                        frame_data["frameNum"]) + 1
                    vmaf_data.append(frame_data["metrics"])

    except Exception as e:
        raise e
    finally:
        if os.path.isfile(temp_file_name_vmaf):
            os.remove(temp_file_name_vmaf)

    return vmaf_data
Example #37
0
def vet_targets(config, dbc, logger):
    ''' Get new targets and gather facts about them '''
    logger.debug("Starting Target Vetting process")
    while True:
        logger.debug("{0} Items in discovery queue".format(
            len(dbc.discovery_queue())))
        for host in dbc.discovery_queue():
            lookup = dbc.get_target(ip=host)
            facts_by_plugin = {}
            if lookup:
                logger.debug("Target {0} already found as {1}".format(
                    lookup['ip'], lookup['hostname']))
                dbc.pop_discovery(ip=host)
            else:
                logger.debug(
                    "Attempting to gather facts on host {0}".format(host))
                plugins = {
                    'remote':
                    os.listdir("{0}/vetting/remote".format(
                        config['plugin_path'])),
                    'ontarget':
                    os.listdir("{0}/vetting/ontarget".format(
                        config['plugin_path']))
                }
                for local in plugins['remote']:
                    logger.debug(
                        "Executing vetting plugin (local): {0}".format(local))
                    local_cmd = "{0}/vetting/remote/{1} {2}".format(
                        config['plugin_path'], local, host)
                    with fabric.api.hide('output', 'running', 'warnings'):
                        try:
                            results = fabric.api.local(local_cmd, capture=True)
                            if results.succeeded:
                                try:
                                    facts_by_plugin[local] = json.loads(
                                        results)
                                    logger.debug("Found facts: {0}".format(
                                        len(facts_by_plugin[local])))
                                except Exception as e:
                                    logger.debug("Could not parse output" + \
                                                 " from vetting plugin {0}".format(local))
                        except Exception as e:
                            logger.debug("Could not execute local vetting" + \
                                         " plugin {0} against host {1}: {2}".format(
                                             local_cmd, host, e.message))
                for ontarget in plugins['ontarget']:
                    logger.debug(
                        "Executing vetting plugin (ontarget): {0}".format(
                            ontarget))
                    fabric.api.env = core.fab.set_env(config, fabric.api.env)
                    fabric.api.env.host_string = host
                    dest_name = next(tempfile._get_candidate_names())
                    destination = "{0}/{1}".format(
                        config['discovery']['upload_path'], dest_name)
                    ontarget_plugin = "{0}/vetting/ontarget/{1}".format(
                        config['plugin_path'], ontarget)
                    with fabric.api.hide('output', 'running', 'warnings'):
                        try:
                            logger.debug(
                                "Uploading vetting plugin on target: {0}".
                                format(destination))
                            upload_results = fabric.api.put(
                                ontarget_plugin, destination)
                            if upload_results.succeeded:
                                logger.debug("Executing {0} on target".format(
                                    destination))
                                results = fabric.api.run(
                                    "chmod 700 {0} && {0}".format(destination))
                                if results.succeeded:
                                    try:
                                        facts_by_plugin[ontarget] = json.loads(
                                            results)
                                        logger.debug("Found facts: {0}".format(
                                            len(facts_by_plugin[ontarget])))
                                    except Exception as e:
                                        logger.debug("Could not parse output" + \
                                                     " from vetting plugin {0}".format(ontarget))
                        except Exception as e:
                            logger.debug(
                                "Could not login to discovered host {0} - {1}".
                                format(host, e.message))
                # Save gathered facts
                system_info = {'facts': {}}
                for item in facts_by_plugin.keys():
                    logger.debug("Appending facts: {0}".format(
                        facts_by_plugin[item]))
                    system_info['facts'].update(facts_by_plugin[item])
                system_info['ip'] = host
                if "hostname" not in system_info:
                    try:
                        system_info["hostname"] = socket.gethostbyaddr(host)[0]
                    except Exception as e:
                        logger.debug(
                            "Exception while looking up target hostname: {0}".
                            format(e.message))
                        system_info["hostname"] = host
                if dbc.save_target(target=system_info):
                    dbc.pop_discovery(ip=host)
        if "unit-testing" in config.keys():
            break
        time.sleep(config['discovery']['vetting_interval'])
Example #38
0
 def src(self):
     if self._values['src'] is not None:
         return self._values['src']
     result = next(tempfile._get_candidate_names()) + '.ucs'
     self._values['src'] = result
     return result
def get_temp_file_name():
    return next(tempfile._get_candidate_names())
Example #40
0
 def get_temp_filename():
     return os.path.join(tempfile.gettempdir(), next(tempfile._get_candidate_names()))
Example #41
0
 def disable_logger(self):
     temp_name = path.join(gettempdir(), next(_get_candidate_names()))
     tlog.startLogging(open(temp_name, "w"), setStdout=False)
Example #42
0
 def _generate_tmp_file_path(self):
     # pylint: disable=protected-access
     return os.path.join(self._tmp_dir,
                         next(tempfile._get_candidate_names()))
Example #43
0
def screenshot(img):
    dirname = tempfile._get_default_tempdir()
    filename = next(tempfile._get_candidate_names()) + '.png'
    filename = dirname + '/' + filename
    print('Screenshot %s' % filename)
    cv2.imwrite(filename, img)
Example #44
0
def do_portscan(host, options, logfile, stop_event, host_results):
    """Perform a portscan.

    Args:
        host:         Target host.
        options:      Dictionary object containing options.
        logfile:      Filename where logfile will be written to.
        stop_event:   Event handler for stop event
        host_results: Host results dictionary

    Returns:
        A list with tuples of open ports and the protocol.
    """
    ports = []
    open_ports = []
    if not options["nmap"]:
        if options["port"]:
            ports = [
                int(port) for port in options["port"].split(",")
                if port.isdigit()
            ]
            return zip(ports, ["unknown"] * len(ports))
        return ALLPORTS
    if ":" in host:
        options["nmap_arguments"] += " -6"
    logging.info("%s Starting nmap", host)
    logging.log(COMMAND, "nmap %s %s", options["nmap_arguments"], host)
    if options["dry_run"]:
        return ALLPORTS
    try:
        temp_file = "nmap-{0}-{1}".format(host,
                                          next(
                                              tempfile._get_candidate_names()))  # pylint: disable=protected-access
        scanner = nmap.PortScanner()
        scanner.scan(
            hosts=host,
            arguments="{0} -oN {1}".format(options["nmap_arguments"],
                                           temp_file),
        )
        for ip_address in [
                x for x in scanner.all_hosts()
                if scanner[x] and scanner[x].state() == "up"
        ]:
            ports = [
                port for port in scanner[ip_address].all_tcp()
                if scanner[ip_address]["tcp"][port]["state"] == "open"
            ]
            for port in ports:
                open_ports.append(
                    [port, scanner[ip_address]["tcp"][port]["name"]])
        check_nmap_log_for_alerts(temp_file, host_results, host, options)
        append_file(logfile, options, temp_file)
        if open_ports:
            logging.info("%s Found open TCP ports %s", host, open_ports)
        else:
            # Format logmessage as info message, so that it ends up in logfile
            logging.log(LOGS, "[*] %s No open ports found", host)
    except (AssertionError, nmap.PortScannerError) as exception:
        if stop_event.isSet():
            logging.debug("%s nmap interrupted", host)
        else:
            logging.log(
                STATUS,
                "%s Issue with nmap %s: %s",
                host,
                options["nmap_arguments"],
                exception,
            )
        open_ports = [UNKNOWN]
    finally:
        if os.path.isfile(temp_file):
            os.remove(temp_file)
    host_results["ports"] = ports
    return open_ports
Example #45
0
def process_host(options, host_queue, output_queue, finished_queue, stop_event,
                 results):
    """
    Worker thread: Process each host atomic, add output files to output_queue,
    and finished hosts to finished_queue.
    """
    while host_queue.qsize() and not stop_event.wait(0.01):
        try:
            host = host_queue.get()
            host_logfile = (host + "-" + next(tempfile._get_candidate_names()))  # pylint: disable=protected-access
            logging.debug("%s Processing (%s items left in host queue)", host,
                          host_queue.qsize())
            host_results = {}
            open_ports = do_portscan(host, options, host_logfile, stop_event,
                                     host_results)
            expected_ports = ALLOWED_OPEN_PORTS
            if ("targets" in options["settings"]
                    and host in options["settings"]["targets"] and
                    "allowed_ports" in options["settings"]["targets"][host]):
                expected_ports = options["settings"]["targets"][host][
                    "allowed_ports"]
            if open_ports:
                if UNKNOWN in open_ports:
                    logging.info("%s Scan interrupted ?", host)
                else:
                    for port, protocol in open_ports:
                        if stop_event.isSet():
                            logging.info("%s Scan interrupted ?", host)
                            break
                        # Sometimes nmap detects webserver as 'ssl/ssl'
                        if port not in expected_ports:
                            add_item(
                                host_results,
                                host,
                                port,
                                options,
                                "Unexpected open port found",
                                ALERT,
                            )
                        if "http" in protocol or "ssl" in protocol:
                            http_checks(
                                host,
                                port,
                                protocol,
                                options,
                                host_logfile,
                                host_results,
                            )
                        if ("ssl" in protocol or port in SSL_PORTS
                                or options["force_ssl"]):
                            tls_checks(
                                host,
                                port,
                                protocol,
                                options,
                                host_logfile,
                                host_results,
                            )
            if os.path.isfile(host_logfile):
                if os.stat(host_logfile).st_size:
                    with open(host_logfile, "r") as read_file:
                        output_queue.put(read_file.read())
                os.remove(host_logfile)
            if not stop_event.isSet():  # Do not flag host as being done
                results["results"][host] = host_results
                finished_queue.put(host)
            host_queue.task_done()
        except queue.Empty:
            break
    logging.debug("Exiting process_host thread, queue contains %s items",
                  host_queue.qsize())
Example #46
0
def calc_ssim_psnr(ref,
                   dist,
                   scaling_algorithm="bicubic",
                   dry_run=False,
                   verbose=False):
    psnr_data = []
    ssim_data = []

    if scaling_algorithm not in ALLOWED_SCALERS:
        print_stderr(f"Allowed scaling algorithms: {ALLOWED_SCALERS}")

    try:
        temp_dir = tempfile.gettempdir()

        temp_file_name_ssim = os.path.join(
            temp_dir,
            next(tempfile._get_candidate_names()) + "-ssim.txt")
        temp_file_name_psnr = os.path.join(
            temp_dir,
            next(tempfile._get_candidate_names()) + "-psnr.txt")

        if verbose:
            print_stderr(
                f"Writing temporary SSIM information to: {temp_file_name_ssim}"
            )
            print_stderr(
                f"Writing temporary PSNR information to: {temp_file_name_psnr}"
            )

        filter_chains = [
            f"[1][0]scale2ref=flags={scaling_algorithm}[dist][ref]",
            "[dist]split[dist1][dist2]", "[ref]split[ref1][ref2]",
            f"[dist1][ref1]psnr={win_path_check(temp_file_name_psnr)}",
            f"[dist2][ref2]ssim={win_path_check(temp_file_name_ssim)}"
        ]

        cmd = [
            "ffmpeg", "-nostdin", "-y", "-threads", "1", "-i", ref, "-i", dist,
            "-filter_complex", ";".join(filter_chains), "-an", "-f", "null",
            NUL
        ]

        run_command(cmd, dry_run, verbose)

        if not dry_run:
            with open(temp_file_name_psnr, "r") as in_psnr:
                # n:1 mse_avg:529.52 mse_y:887.00 mse_u:233.33 mse_v:468.25 psnr_avg:20.89 psnr_y:18.65 psnr_u:24.45 psnr_v:21.43
                lines = in_psnr.readlines()
                for line in lines:
                    line = line.strip()
                    fields = line.split(" ")
                    frame_data = {}
                    for field in fields:
                        k, v = field.split(":")
                        frame_data[k] = round(float(v),
                                              3) if k != "n" else int(v)
                    psnr_data.append(frame_data)

            with open(temp_file_name_ssim, "r") as in_ssim:
                # n:1 Y:0.937213 U:0.961733 V:0.945788 All:0.948245 (12.860441)\n
                lines = in_ssim.readlines()
                for line in lines:
                    line = line.strip().split(" (")[0]  # remove excess
                    fields = line.split(" ")
                    frame_data = {}
                    for field in fields:
                        k, v = field.split(":")
                        if k != "n":
                            # make psnr and ssim keys the same
                            k = "ssim_" + k.lower()
                            k = k.replace("all", "avg")
                        frame_data[k] = round(float(v),
                                              3) if k != "n" else int(v)
                    ssim_data.append(frame_data)

    except Exception as e:
        raise e
    finally:
        if os.path.isfile(temp_file_name_psnr):
            os.remove(temp_file_name_psnr)
        if os.path.isfile(temp_file_name_ssim):
            os.remove(temp_file_name_ssim)

    return {'ssim': ssim_data, 'psnr': psnr_data}
Example #47
0
def downloadFirmware():
    url = "https://github.com/rogerclarkmelbourne/OpenGD77/releases/latest"
    urlBase = "http://github.com"
    httpPool = urllib3.PoolManager()
    pattern = ""
    urlFW = ""
    webContent = ""
        
    print(" - " + "Try to download the firmware for your {} from the project page".format(outputModes[int(outputFormat)]))
    print(" - " + "Retrieve firmware location");

    try:
        response = httpPool.request('GET', url)
    except urllib3.URLError as e:
        print("".format(e.reason))
        sys.exit(-7)
        
    webContent = str(response.data)
    
    if (outputFormat == SGLFormatOutput.GD_77):
        pattern = r'/rogerclarkmelbourne/OpenGD77/releases/download/R([0-9\.]+)/OpenGD77\.sgl'
    elif (outputFormat == SGLFormatOutput.GD_77S):
        pattern = '/rogerclarkmelbourne/OpenGD77/releases/download/R([0-9\.]+)/OpenGD77S_HS\.sgl'
    elif (outputFormat == SGLFormatOutput.DM_1801):
        pattern = '/rogerclarkmelbourne/OpenGD77/releases/download/R([0-9\.]+)/OpenDM1801\.sgl'
    elif (outputFormat == SGLFormatOutput.DM_5R):
        pattern = '/rogerclarkmelbourne/OpenGD77/releases/download/R([0-9\.]+)/OpenDM5R\.sgl'
    
    contentArray = webContent.split("\n")    
    
    for l in contentArray:
        m = re.search(pattern, l)
        if (m != None):
            urlFW = urlBase + m.group(0)
            break
    
    if (len(urlFW)):
        global downloadedFW
        downloadedFW = os.path.join(tempfile.gettempdir(), next(tempfile._get_candidate_names()) + '.sgl')
        
        print(" - " + "Downloading the firmware, please wait");
        
        try:
            response = httpPool.request('GET', urlFW, preload_content=False)
        except urllib3.URLError as e:
            print("".format(e.reason))
            sys.exit(-8)

        length = response.getheader('content-length')
        
        if (length != None):
            length = int(length)
            blocksize = max(4096, (length//100))
        else:
            blocksize = 4096
        
        # Download data        
        with open(downloadedFW, "w+b") as f:
            while True:
                data = response.read(blocksize)
                
                if not data:
                    break
                
                f.write(data)
        f.close()
        return True
    
    return False
def gen_tempfile_name(prefix='', suffix=''):
    temp_name = next(tempfile._get_candidate_names())
    default_tmp_dir = tempfile._get_default_tempdir()
    return os.path.join(default_tmp_dir, prefix+temp_name+suffix)
Example #49
0
def get_temp_file():
    temp_name = next(tempfile._get_candidate_names())
    defult_tmp_dir = tempfile._get_default_tempdir()

    return os.path.join(defult_tmp_dir, temp_name)
Example #50
0
def modify_model_backprop(model, backprop_modifier):
    """Creates a copy of model by modifying all activations to use a custom op to modify the backprop behavior.

    Args:
        model:  The `keras.models.Model` instance.
        backprop_modifier: One of `{'guided', 'rectified'}`

    Returns:
        A copy of model with modified activations for backwards pass.
    """
    # The general strategy is as follows:
    # - Clone original model via save/load so that upstream callers don't see unexpected results with their models.
    # - Modify all activations in the model as ReLU.
    # - Save modified model so that it can be loaded with custom context modifying backprop behavior.
    # - Call backend specific function that registers the custom op and loads the model under modified context manager.
    # - Maintain cache to save this expensive process on subsequent calls.
    #
    # The reason for this round about way is because the graph needs to be rebuild when any of its layer builder
    # functions are changed. This is very complicated to do in Keras and makes the implementation very tightly bound
    # with keras internals. By saving and loading models, we dont have to worry about future compatibility.
    #
    # The only exception to this is the way advanced activations are handled which makes use of some keras internal
    # knowledge and might break in the future.

    # 0. Retrieve from cache if previously computed.
    modified_model = _MODIFIED_MODEL_CACHE.get((model, backprop_modifier))
    if modified_model is not None:
        return modified_model

    model_path = os.path.join(tempfile.gettempdir(),
                              next(tempfile._get_candidate_names()) + '.h5')
    try:
        # 1. Clone original model via save and load.
        model.save(model_path)
        modified_model = load_model(model_path)

        # 2. Replace all possible activations with ReLU.
        for i, layer in reverse_enumerate(modified_model.layers):
            if hasattr(layer, 'activation'):
                layer.activation = tf.nn.relu
            if isinstance(layer, _ADVANCED_ACTIVATIONS):
                # NOTE: This code is brittle as it makes use of Keras internal serialization knowledge and might
                # break in the future.
                modified_layer = Activation('relu')
                modified_layer.inbound_nodes = layer.inbound_nodes
                modified_layer.name = layer.name
                modified_model.layers[i] = modified_layer

        # 3. Save model with modifications.
        modified_model.save(model_path)

        # 4. Register modifier and load modified model under custom context.
        modifier_fn = _BACKPROP_MODIFIERS.get(backprop_modifier)
        if modifier_fn is None:
            raise ValueError(
                "'{}' modifier is not supported".format(backprop_modifier))
        modifier_fn(backprop_modifier)

        # 5. Create graph under custom context manager.
        with tf.get_default_graph().gradient_override_map(
            {'Relu': backprop_modifier}):
            #  This should rebuild graph with modifications.
            modified_model = load_model(model_path)

            # Cache to improve subsequent call performance.
            _MODIFIED_MODEL_CACHE[(model, backprop_modifier)] = modified_model
            return modified_model
    finally:
        os.remove(model_path)
Example #51
0
 def setUp(self):
     self.tf = tempfile.NamedTemporaryFile()
     self.tmp_file_prefix = next(tempfile._get_candidate_names())
Example #52
0
import system
import dirutils
import tempfile
import platform
import shutil
from pathlib import Path

temp_path = os.path.abspath(sys.argv[1])
directory = os.path.abspath(sys.argv[2])
csv = os.path.abspath(sys.argv[3])
exe = sys.argv[4]
opts = sys.argv[5]

# create temporary dir to run the analyzer
tmpdir_path = os.path.join(str(Path.home()), "tmp",
                           "clang-" + next(tempfile._get_candidate_names()))
shutil.copytree(directory, tmpdir_path)

print("\n======[CLANG]=======")
print("[CWD]:", tmpdir_path)
print("[CSV]:", csv)
print("[EXE]:", exe)
print("[EXE OPTIONS]:", opts)

source_files = dirutils.list_files(tmpdir_path, '.c')
sys_opts = "" if (
    platform.system() != 'Linux'
) else " -I /usr/include -I /usr/include/x86_64-linux-gnu/ -I /usr/lib/clang/6.0/include"

dirutils.file_line_error_header(csv)
dirutils.reset_file(temp_path)
    for test_file in test_file_list:
        if len(test_file) > 0:
            parts = test_file.split(',')
            if len(parts) == 2:
                filename = parts[0]
                print(
                    "|---------------------------------------------------------------------------- "
                    + get_time_str())
                print("| checking " + filename + " (" + str(counter) + "/" +
                      str(len(test_file_list)) + ")",
                      end=',')
                counter += 1
                text1 = parts[1].strip().lower()

                input_sound_file = os.path.join(base_path, filename)
                temp_name = next(tempfile._get_candidate_names())
                text_output_tuple = deep_speech_tt(conf["DeepSpeech"],
                                                   temp_name, input_sound_file,
                                                   60)
                text2 = ''.join([item for item, _ in text_output_tuple])

                wer, errs, count = calc_wer(text1, text2)
                wer_list.append(wer)
                total_errors += errs
                total_words += count

                c_wer = total_errors / total_words
                print(" WER:" + str(round(wer, 5)) + ", cumulative WER:" +
                      str(round(c_wer, 5)))
                print("| " + text1)
                print("| " + text2)
Example #54
0
    def annotate_hypothetical(self, genomes_list):
        '''
        Sort proteins coded by each genome into homologous clusters.

        Inputs
        ------
        genomes_list - list. list of Genome objects

        '''
        output_directory_path = path.join(self.output_directory,
                                          self.GENOME_HYPOTHETICAL)
        mkdir(output_directory_path)

        renamed_genomes = list()
        for genome in genomes_list:
            renamed_genome = next(tempfile._get_candidate_names())
            cmd = f"sed 's/>/>{genome.name}~/g' {genome.path} > {renamed_genome}"
            run_command(cmd)
            renamed_genomes.append(renamed_genome)

        tmp_dir = tempfile.mkdtemp()

        db_path = path.join(output_directory_path, "db")
        clu_path = path.join(output_directory_path, "clu")
        align_path = path.join(output_directory_path, "alignDb")
        blast_output_path = path.join(output_directory_path, "alignDb.m8")
        formatted_blast_output_path = path.join(output_directory_path,
                                                "alignDb.formatted.m8")

        clu_tsv_path = path.join(output_directory_path,
                                 "hypothetical_clusters.tsv")

        logging.info('    - Generating MMSeqs2 database')
        cmd = f"mmseqs createdb {' '.join(renamed_genomes)} {db_path}"
        run_command(cmd)
        for renamed_genome in renamed_genomes:
            os.remove(renamed_genome)

        logging.info('    - Clustering genome proteins')
        cmd = f"mmseqs cluster \
                    {db_path} \
                    {clu_path} \
                    {tmp_dir} \
                    --threads {self.threads} \
                    --min-seq-id {self.percent_id_cutoff} \
                    -c {self.fraction_aligned} \
                    -v 0"

        run_command(cmd)

        logging.info('    - Extracting clusters')
        cmd = f'mmseqs createtsv \
                    {db_path} \
                    {db_path} \
                    {clu_path} \
                    {clu_tsv_path} \
                    --threads {self.threads} \
                    -v 0'

        run_command(cmd)

        if self.annotate_ortholog:

            logging.info(
                '    - Computing Smith-Waterman alignments for clustering results'
            )
            cmd = f"mmseqs alignall \
                        {db_path} \
                        {clu_path} \
                        {align_path} \
                        --alignment-mode 3 \
                        --threads {self.threads} \
                        -v 0"

            run_command(cmd)

            logging.info('    - Converting to BLAST-like output')
            cmd = f"mmseqs createtsv \
                        {db_path} \
                        {db_path} \
                        {align_path} \
                        {blast_output_path} \
                        --threads {self.threads} \
                        -v 0"

            # --format-output query,target,bits
            run_command(cmd)

            logging.info('    - Reformatting BLAST output')
            cmd = "OFS=\"\t\" awk 'FNR==NR{a[$1]=$2;next}{$3=a[$3]; \
                                            $1=\"\"; for(i=2;i<NF;i++){printf(\"%s\t\",$i)} \
                                            printf(\"\\n\")}' %s %s | cut -f1,2,5 > %s" \
                % ("%s", db_path + '.lookup', blast_output_path, formatted_blast_output_path)
            run_command(cmd)

            ortholog_dict = self.run_mcl(formatted_blast_output_path,
                                         output_directory_path)
            ortholog_ids = ortholog_dict.keys()
        else:
            ortholog_dict = dict()
            ortholog_ids = list()
        cluster_ids = self.parse_cluster_results(clu_tsv_path, genomes_list,
                                                 ortholog_dict,
                                                 output_directory_path)
        return cluster_ids, ortholog_ids
Example #55
0
def fix_date(csv_file,
             out_file=None,
             offset_range=None,
             write_header=True,
             wid=0,
             verbose=True,
             progress: ValueProxy = None):
    """

    :param csv_file: Input csv file
    :param out_file: Output csv file (Optional)
    :param offset_range: Range considered in file
    :param write_header: Write out header
    :param wid: Process ID
    :param verbose: Print log
    :param progress: Tracking progress
    :return:
    """
    temp_name = next(tempfile._get_candidate_names())
    pdir = os.path.dirname(csv_file)
    temp_path = os.path.join(pdir, temp_name)
    fwrite = open(temp_path, "w")

    fsize = get_file_size(csv_file)
    fobj = open(csv_file, 'r')

    if offset_range is None:
        offset_range = [0, fsize]
    fobj.seek(offset_range[0])
    count = 0
    header = head(csv_file, 1)
    prev_offset = fobj.tell()
    patch_lines = []

    start_time = time.time()
    if write_header: fwrite.write(header)
    line = fobj.readline()
    while line:
        line = line.strip()
        if fobj.tell() > (offset_range[1] + 1):
            break
        if fobj.tell() == offset_range[1] or fobj.tell() == fsize:
            if len(line): patch_lines.append(_fix_sample(line))
            break
        if len(line) == 0:
            line = fobj.readline()
            count += 1
            continue
        if count == 0 and offset_range[0] == 0:
            line = fobj.readline()
            count += 1
            continue
        else:
            patch_lines.append(_fix_sample(line))
            if len(patch_lines) > 10000:
                _check_start_and_write(fwrite, patch_lines)
                patch_lines = []
                dealed = fobj.tell() - prev_offset
                per = float(dealed) / fsize
                prev_offset = fobj.tell()
                if progress is not None:
                    progress.value += per
        count += 1
        line = fobj.readline()
    _check_start_and_write(fwrite, patch_lines)
    fwrite.close()
    end_time = time.time()
    if verbose:
        logger.info(
            "wid%d/split(): Splitting process token %0.4f seconds, number of lines:%d"
            % (wid, end_time - start_time, count))

    dealed = fobj.tell() - prev_offset
    per = float(dealed) / fsize
    if progress is not None:
        progress.value += per
    if out_file:
        shutil.move(temp_path, out_file)
        return out_file
    shutil.move(temp_path, csv_file)
    return csv_file
def File_Get_TempName(Extension):
    if 'tempnam' in dir(os):  # sys.version_info.major < 3: # ArcGIS Desktop
        return os.tempnam(None, 'arc_saga_') + '.' + Extension
    import tempfile
    return tempfile._get_default_tempdir() + os.sep + 'arcsaga_' + next(
        tempfile._get_candidate_names()) + '.' + Extension
Example #57
0
# get level values
regions = {}
with nc(aggfile) as f:
    for lev in levels:
        regions[lev] = f.variables['lev_' + lev][:]

# make temporary directory
tempdir = os.getcwd() + sep + 'temp.%s.%04d' % (variable, chunk)
if not exists(tempdir):
    makedirs(tempdir)

nco = Nco()

# extract variable
tempinput = tempdir + sep + next(tempfile._get_candidate_names())
run_nco(nco, 'ncks', input = inputfile, output = tempinput, options = '-h -v %s %s' % (variable, chunkselect))

# append weight and agg files to input
for f in [weightfile, aggfile]:
    options = '-h -d lat,%f,%f -d lon,%f,%f' % (minlat, maxlat, minlon, maxlon)
    with nc(f) as ncf:
        if chunkdim in ncf.dimensions:
            if len(ncf.dimensions[chunkdim]) != ntasks:
                raise Exception('Length of %s in file %s not consistent with input file' % (chunkdim, f))
            options += ' ' + chunkselect
    tempf = tempdir + sep + next(tempfile._get_candidate_names())
    run_nco(nco, 'ncks', input = f, output = tempf, options = options)
    run_nco(nco, 'ncks', input = tempf, output = tempinput, options = '-h -A')
    remove(tempf)
    def download_action(self, url, xml, options=None):
        self.urlinv = url
        self.xml = xml
        self.options = options
        try:
            root = etree.fromstring(self.xml)
        except:
            print(self.xml)
            print('Error reading xml response in download_action')
            logging.info('Error reading xml response in download_action')
            raise
        # download_launch is used to know if a download action append
        download_launch = None
        self.max_download_action -= 1
        try:
            # Install packages
            for pack in root.findall('Package'):
                try:
                    command = pack.find('Command').text
                    if command.find('download_no_restart'
                                    ) != -1 and self.max_download_action < 4:
                        continue
                    self.download_print_time()
                    print('Package: ' + pack.find('Name').text)
                    logging.info('Package: ' + pack.find('Name').text)
                    self.mid = pack.find('Id').text
                    self.pid = pack.find('Pid').text
                    no_break_on_error = None
                    if command.find('no_break_on_error') != -1:
                        no_break_on_error = True
                    option_timeout = 'install_timeout_'
                    self.timeout = self.default_timeout
                    if command.find(option_timeout) != -1:
                        match = re.search(option_timeout + '(.+?)(\r?\n|$)',
                                          command)
                        try:
                            option_timeout += match.group(1)
                            command_value = int(match.group(1))
                            if command_value > 0:
                                self.timeout = command_value
                        except:
                            logmsg = 'Ignoring invalid option \'' + option_timeout + '\''
                            print(logmsg)
                            self.download_send_status(logmsg)
                            logging.warning(logmsg)
                    command = re.sub(
                        "\n\s*\n*", " && ",
                        command)  # Remove blank lines and convert \n to &&
                    command = command.replace(' && download_no_restart', '')
                    command = command.replace(' && no_break_on_error', '')
                    command = command.replace(' && section_end',
                                              '')  # for retro compatibility
                    command = command.replace(' && ' + option_timeout, '')
                    url = pack.find('Url').text
                    packagesum = pack.find('Packagesum').text
                    download_launch = True
                    status_msg = True
                except:
                    print('Error in package xml format')
                    logging.exception('Error in package xml format')
                    raise

                self.download_send_status('Ready to download and execute')
                logging.info('Ready to download and execute')

                if packagesum != 'nofile':
                    try:
                        self.tmpdir = tempfile.gettempdir(
                        ) + '/updatengine/' + next(
                            tempfile._get_candidate_names()) + "/"
                        if not os.path.exists(self.tmpdir):
                            os.makedirs(self.tmpdir)
                        file_name = self.tmpdir + url.split('/')[-1]
                        self.download_tmp(url, file_name, packagesum)
                    except:
                        self.download_print_time()
                        print('Error when downloading: ' + file_name)
                        self.download_send_status('Error downloading file ' +
                                                  file_name)
                        logging.exception('Error when downloading: ' +
                                          file_name)
                        raise
                    else:
                        print('Install in progress')
                        logging.info('Install in progress')
                        self.download_send_status('Install in progress')

                        try:
                            os.chdir(self.tmpdir)
                            p = subprocess.Popen(command,
                                                 stderr=subprocess.PIPE,
                                                 shell=True)
                            retcode = p.wait(timeout=self.timeout)
                            if retcode != 0:
                                raise Exception(retcode)
                        except subprocess.TimeoutExpired:
                            p.kill()
                            err = "Timeout expired"
                            print('Error launching action: ' + err)
                            self.download_send_status(
                                'Error launching action: ' + err)
                            logging.exception('Error launching action: ' + err)
                            if no_break_on_error is True:
                                status_msg = None
                            else:
                                raise
                        except Exception as e:
                            import locale
                            console_encoding = locale.getpreferredencoding()
                            if console_encoding == 'cp1252':
                                console_encoding = 'cp850'
                            err = [
                                s.strip().decode(console_encoding)
                                for s in p.stderr.readlines()
                            ]
                            err = ' '.join(err)
                            if len(err):
                                err = err[:450] + (
                                    '...' if len(err) > 450 else
                                    '') + " | Exit code " + str(e)
                            else:
                                err = "Exit code " + str(e)
                            print('Error launching action: ' + str(err))
                            self.download_send_status(
                                'Error launching action: ' + err)
                            logging.exception('Error launching action: ' + err)
                            if no_break_on_error is True:
                                status_msg = None
                            else:
                                raise
                        finally:
                            try:
                                os.chdir(tempfile.gettempdir())
                                shutil.rmtree(tempfile.gettempdir() +
                                              '/updatengine/')
                            except:
                                print('Can\'t delete temp file')
                                logging.info('Can\'t delete temp file')
                else:
                    print('Install in progress')
                    logging.info('Install in progress')
                    self.download_send_status('Install in progress')

                    try:
                        p = subprocess.Popen(command,
                                             stderr=subprocess.PIPE,
                                             shell=True)
                        retcode = p.wait(timeout=self.timeout)
                        if retcode != 0:
                            raise Exception(retcode)
                    except subprocess.TimeoutExpired:
                        p.kill()
                        err = "Timeout expired"
                        print('Error launching action: ' + err)
                        self.download_send_status('Error launching action: ' +
                                                  err)
                        logging.exception('Error launching action: ' + err)
                        if no_break_on_error is True:
                            status_msg = None
                        else:
                            raise
                    except Exception as e:
                        import locale
                        console_encoding = locale.getpreferredencoding()
                        if console_encoding == 'cp1252':
                            console_encoding = 'cp850'
                        err = [
                            s.strip().decode(console_encoding)
                            for s in p.stderr.readlines()
                        ]
                        err = ' '.join(err)
                        if len(err):
                            err = err[:450] + ('...' if len(err) > 450 else
                                               '') + " | Exit code " + str(e)
                        else:
                            err = "Exit code " + str(e)
                        print('Error launching action: ' + str(err))
                        self.download_send_status('Error launching action: ' +
                                                  err)
                        logging.exception('Error launching action: ' + err)
                        if no_break_on_error is True:
                            status_msg = None
                        else:
                            raise

                if status_msg is True:
                    self.download_print_time()
                    print('Operation completed')
                    self.download_send_status('Operation completed')
                    logging.info('Operation completed')

            if not root.findall('Package'):
                print('Done, no package to install')
                logging.info('Done, no package to install')
        except:
            print('Error detected when launching download_action')
            logging.exception('Error detected when lauching download_action')
            raise
        else:
            # Loop download action
            if download_launch:
                try:
                    self.download_print_time()
                    if self.max_download_action == 0:
                        print('End of download and install')
                    else:
                        print('Perform a new check')
                    time.sleep(5)
                    inventory = ueinventory.build_inventory()
                    response_inventory = uecommunication.send_inventory(
                        self.urlinv, inventory[0], options)
                    extended_inventory = ueinventory.build_extended_inventory(
                        response_inventory)
                    if extended_inventory:
                        response_inventory = uecommunication.send_extended_inventory(
                            self.urlinv, extended_inventory, options)
                    if self.max_download_action > 0:
                        self.download_action(self.urlinv, response_inventory,
                                             options)
                except:
                    print('Error in loop download action')
                    logging.exception('Error in loop download action')
Example #59
0
 def test_download(self):
     temp_file = os.path.join(tempfile.gettempdir(), next(tempfile._get_candidate_names()))
     self.s3.download(key='path/to/object2', filename=temp_file)
     with open(temp_file, 'r') as f:
         self.assertEqual(f.read(), 'object2')
Example #60
0
def	write_named_entities_config():

	# Solr host
	solr_url = 'http://localhost:8983/solr/'
	if os.getenv('OPEN_SEMANTIC_ETL_SOLR'):
		solr_url = os.getenv('OPEN_SEMANTIC_ETL_SOLR')

	wordlist_configfilename = "/etc/opensemanticsearch/ocr/dictionary.txt"
	
	tmp_wordlist_configfilename = tempfile.gettempdir() + os.path.sep +  next(tempfile._get_candidate_names()) + '_ocr_dictionary.txt'

	facets = []

	# create named entities configs for all ontologies
	for ontology in Ontologies.objects.all():
		
		print ("Importing Ontology or List {} (ID: {})".format( ontology, ontology.id ) )
	
		# Download, if URI
		is_tempfile, filename = get_ontology_file(ontology)
		
		facet = get_facetname(ontology)
	
		# analyse content type & encoding
		contenttype, encoding = get_contenttype_and_encoding(filename)
		print ( "Detected content type: {}".format(contenttype) )
		print ( "Detected encoding: {}".format(encoding) )


		#
		# export entries to entities index
		#
		
		if contenttype=='application/rdf+xml':

			#
			# write labels, words and synonyms config files
			#

			ontology_tagger = OntologyTagger()

			# load graph from RDF file
			ontology_tagger.parse(filename)

			# add the labels to entities index for normalization and entity linking
			ontology_tagger.solr_entities = solr_url
			ontology_tagger.solr_core_entities = 'opensemanticsearch-entities'
			
			# append synonyms to Solr managed synonyms resource "skos"
			ontology_tagger.solr = solr_url
			ontology_tagger.solr_core = 'opensemanticsearch'
			ontology_tagger.synonyms_resourceid = 'skos'

			# append single words of concept labels to wordlist for OCR word dictionary
			ontology_tagger.wordlist_configfile = tmp_wordlist_configfilename
			
			# additional all labels fields for language dependent / additional analyzers/stemmers
			if ontology.stemming:
				for stemmer in ontology.stemming.split(','):
					ontology_tagger.additional_all_labels_fields.append('all_labels_stemming_' + stemmer + '_ss')

			if ontology.stemming_force:
				for stemmer in ontology.stemming_force.split(','):
					ontology_tagger.additional_all_labels_fields.append('all_labels_stemming_force_' + stemmer + '_ss')

			if ontology.stemming_hunspell:
				for stemmer in ontology.stemming_hunspell.split(','):
					ontology_tagger.additional_all_labels_fields.append('all_labels_stemming_hunspell_' + stemmer + '_ss')

			if ontology.stemming_force_hunspell:
				for stemmer in ontology.stemming_force_hunspell.split(','):
					ontology_tagger.additional_all_labels_fields.append('all_labels_stemming_force_hunspell_' + stemmer + '_ss')

			# setup synonyms config and entities index
			ontology_tagger.apply(target_facet=facet)

		elif contenttype.startswith('text/plain'):
			dictionary2wordlist(sourcefilename=filename, encoding=encoding, wordlist_configfilename=tmp_wordlist_configfilename)
			importer = Entity_Importer_List()
			importer.import_entities(filename=filename, types=[facet], encoding=encoding)

		else:
			print ( "Unknown format {}".format(contenttype) )
		
		# remember each new facet for which there a list has been created so we can later write all this facets to schema.xml config part
		if not facet in facets:
			facets.append(facet)
		
		# Delete if downloaded ontology by URL to tempfile
		if is_tempfile:
			os.remove(filename)

	# Write thesaurus entries to facet entities list(s) / dictionaries, entities index and synonyms
	thesaurus_facets = thesaurus.views.export_entities(wordlist_configfilename=tmp_wordlist_configfilename)

	# add facets used in thesaurus but not yet in an ontology to facet config
	for thesaurus_facet in thesaurus_facets:
		if not thesaurus_facet in facets:
			facets.append(thesaurus_facet)

	# Move temp OCR words config file to destination
	if os.path.isfile(tmp_wordlist_configfilename):
		shutil.move(tmp_wordlist_configfilename, wordlist_configfilename)
	
	# Create config for UI
	write_facet_config()
	
	# Create config for ETL / entity extraction
	setup.views.generate_etl_configfile()
	
	# Reload/restart Solr core with new synonyms config
	# Todo: Use the Solr URI from config
	urlopen(solr_url + 'admin/cores?action=RELOAD&core=opensemanticsearch')