def pdf_workup(request, pk):

    wu = get_object_or_404(models.Workup, pk=pk)
    active_provider_type = get_object_or_404(ProviderType,
                                             pk=request.session['clintype_pk'])

    if active_provider_type.staff_view:
        data = {'workup': wu}

        template = get_template('workup/workup_body.html')
        html  = template.render(data)

        file = TemporaryFile(mode="w+b")
        pisa.CreatePDF(html.encode('utf-8'), dest=file,
                encoding='utf-8')

        file.seek(0)
        pdf = file.read()
        file.close()

        initials = ''.join(name[0].upper() for name in wu.patient.name(reverse=False, middle_short=False).split())
        formatdate = '.'.join([str(wu.clinic_day.clinic_date.month).zfill(2), str(wu.clinic_day.clinic_date.day).zfill(2), str(wu.clinic_day.clinic_date.year)])
        filename = ''.join([initials, ' (', formatdate, ')'])

        response = HttpResponse(pdf, 'application/pdf')
        response["Content-Disposition"] = "attachment; filename=%s.pdf" % (filename,)
        return response

    else:
        return HttpResponseRedirect(reverse('workup',
                                            args=(wu.id,)))
Exemplo n.º 2
0
Arquivo: tar.py Projeto: abg/holland
    def backup(self):
        if self.dry_run:
            return
        if not os.path.exists(self.config['tar']['directory']) \
         or not os.path.isdir(self.config['tar']['directory']):
            raise BackupError('{0} is not a directory!'.format(self.config['tar']['directory']))
        out_name = "{0}.tar".format(
            self.config['tar']['directory'].lstrip('/').replace('/', '_'))
        outfile = os.path.join(self.target_directory, out_name)
        args = ['tar', 'c', self.config['tar']['directory']]
        errlog = TemporaryFile()
        stream = self._open_stream(outfile, 'w')
        LOG.info("Executing: %s", list2cmdline(args))
        pid = Popen(
            args,
            stdout=stream.fileno(),
            stderr=errlog.fileno(),
            close_fds=True)
        status = pid.wait()
        try:
            errlog.flush()
            errlog.seek(0)
            for line in errlog:
                LOG.error("%s[%d]: %s", list2cmdline(args), pid.pid, line.rstrip())
        finally:
            errlog.close()

        if status != 0:
            raise BackupError('tar failed (status={0})'.format(status))
def megablast_seqs(seqs, params):
    seq_temp = NamedTemporaryFile(bufsize=0)
    seq_temp.write(LoadSeqs(data=seqs, moltype=DNA, aligned=False).toFasta())
    seq_temp.flush()
    out_temp = TemporaryFile()
    
    print seq_temp.name    
    
    params['-i'] = seq_temp.name
    params['-m'] = 9
    print params
    param_list = reduce((lambda x, y: x + y),
        [[str(k), str(v)] for (k, v) in params.items()])
    
    print param_list
    proc_handle = subprocess.Popen(["megablast"] + param_list, stdout=out_temp)
    proc_handle.wait()
    
    lines = [line for line in out_temp]
    print lines
    blast_result = BlastResult(lines)
    
    seq_temp.close()
    out_temp.close()
    
    return blast_result
Exemplo n.º 4
0
 def backup(self, config, **flags):
     # check if this is a config file
     config_f = open(config, "rb")
     mode = ("w:%s" % flags['compress']) if flags.get('compress') else "w"
     buff = TemporaryFile()
     with config_f:
         cfg = ConfigParser()
         cfg.readfp(config_f)
         project = cfg.get("general", "project")
         databases = cfg.get("general", "databases").split()
         tarname = "%s.tar" % project
         tar = tarfile.open(fileobj=buff, mode=mode, name=tarname)
         to_close = self.__add_database_to_tar(tar, cfg, databases)
         tar.close()
     for f in to_close:
         f.close()
     buff.seek(0)
     name = project + ".tar"
     if flags.get('compress'):
         name = project + ".t%s" % flags['compress']
     if flags.get("upload"):
         buff.flush()
         timestamp = datetime.now().isoformat()
         self.client.backup(project, name, buff, timestamp)
     buff.close()
Exemplo n.º 5
0
def graph(request, type, show_name):
    if not has_matplotlib:
        return HttpResponse("matplotlib missing")
    graph = None  # TODO: get cached graph
    if not graph:
        graph_lock.acquire()
        tmpfile = TemporaryFile()
        figure = pyplot.figure(1, figsize=(4, 3))

        if type == "weekday":
            _weekday_graph(show_name)
        elif type == "hours":
            _hours_graph(show_name)
        elif type == "weekday_hours":
            _weekday_hours_graph(show_name)
        elif type == "time_per_episode":
            _time_per_episode_graph(show_name, figure)

        pyplot.savefig(tmpfile, format="png")
        pyplot.close(figure)
        pyplot.clf()
        tmpfile.seek(0)
        graph = tmpfile.read()
        tmpfile.close()
        graph_lock.release()
        return HttpResponse(graph, content_type="image/png")
Exemplo n.º 6
0
    def import_lang(self, cr, uid, ids, context=None):
        if context is None:
            context = {}
        this = self.browse(cr, uid, ids[0])
        if this.overwrite:
            context = dict(context, overwrite=True)
        fileobj = TemporaryFile('w+')
        try:
            fileobj.write(base64.decodestring(this.data))

            # now we determine the file format
            fileobj.seek(0)
            first_line = fileobj.readline().strip().replace('"', '').replace(
                ' ', '')
            fileformat = first_line.endswith(
                "type,name,res_id,src,value") and 'csv' or 'po'
            fileobj.seek(0)

            tools.trans_load_data(cr,
                                  fileobj,
                                  fileformat,
                                  this.code,
                                  lang_name=this.name,
                                  context=context)
        finally:
            fileobj.close()
        return True
Exemplo n.º 7
0
    def import_lang(self, cr, uid, ids, context):
        """
            Import Language
            @param cr: the current row, from the database cursor.
            @param uid: the current user’s ID for security checks.
            @param ids: the ID or list of IDs
            @param context: A standard dictionary
        """

        import_data = self.browse(cr, uid, ids)[0]
        fileobj = TemporaryFile('w+')
        fileobj.write(base64.decodestring(import_data.data))

        # now we determine the file format
        fileobj.seek(0)
        first_line = fileobj.readline().strip().replace('"',
                                                        '').replace(' ', '')
        fileformat = first_line.endswith(
            "type,name,res_id,src,value") and 'csv' or 'po'
        fileobj.seek(0)

        tools.trans_load_data(cr,
                              fileobj,
                              fileformat,
                              import_data.code,
                              lang_name=import_data.name)
        tools.trans_update_res_ids(cr)
        fileobj.close()
        return {}
Exemplo n.º 8
0
def htar(*args):
    """Run :command:`htar` with arguments.

    Parameters
    ----------
    args : :func:`tuple`
        Arguments to be passed to :command:`htar`.

    Returns
    -------
    :func:`tuple`
        The standard output and standard error from :command:`htar`.

    Raises
    ------
    KeyError
        If the :envvar:`HPSS_DIR` environment variable has not been set.
    """
    outfile = TemporaryFile()
    errfile = TemporaryFile()
    path = get_hpss_dir()
    command = [os.path.join(path, 'htar')] + list(args)
    status = call(command, stdout=outfile, stderr=errfile)
    outfile.seek(0)
    out = outfile.read()
    errfile.seek(0)
    err = errfile.read()
    outfile.close()
    errfile.close()
    return (out.decode('utf8'), err.decode('utf8'))
Exemplo n.º 9
0
class SeedOutput(object):
    def __init__(self, fd, inv, label, resp_dict):
        self.__fd = fd
        self.__inv = inv
        self.__label = label
        self.__resp_dict = resp_dict
        self.__mseed_fd = TemporaryFile()

    def write(self, data):
        self.__mseed_fd.write(data)

    def close(self):
        try:
            try:
                seed_volume = SEEDVolume(self.__inv, ORGANIZATION, self.__label,
                    self.__resp_dict)

                self.__mseed_fd.seek(0)
                for rec in MSeedInput(self.__mseed_fd):
                    seed_volume.add_data(rec)

                seed_volume.output(self.__fd)

            except (MSeedError, SEEDError, DBError), e:
                logs.error("error creating SEED volume: " + str(e))

        finally:
            self.__mseed_fd.close()
            self.__fd.close()
Exemplo n.º 10
0
    def dataentry(self):
        self.toaster.msgblockbegin("writing to temporary file")
        f_tmp = TemporaryFile()
        try:
            total_padding = self.data.write(f_tmp)
            # comparing the files will usually be different because blocks may
            # have been written back in a different order, so cheaply just compare
            # file sizes
            self.toaster.msg("comparing file sizes")
            self.stream.seek(0, 2)
            f_tmp.seek(0, 2)
            if self.stream.tell() != f_tmp.tell():
                self.toaster.msg("original size: %i" % self.stream.tell())
                self.toaster.msg("written size:  %i" % f_tmp.tell())
                self.toaster.msg("padding:       %i" % total_padding)
                if self.stream.tell() > f_tmp.tell() or self.stream.tell() + total_padding < f_tmp.tell():
                    f_tmp.seek(0)
                    f_debug = open("debug.cgf", "wb")
                    f_debug.write(f_tmp.read(-1))
                    f_debug.close()
                    raise Exception('write check failed: file sizes differ by more than padding')
        finally:
            f_tmp.close()
        self.toaster.msgblockend()

        # spell is finished: prevent recursing into the tree
        return False
Exemplo n.º 11
0
def test_load_sift():
    f = TemporaryFile()
    f.write('''2 128
133.92 135.88 14.38 -2.732
3 12 23 38 10 15 78 20 39 67 42 8 12 8 39 35 118 43 17 0
0 1 12 109 9 2 6 0 0 21 46 22 14 18 51 19 5 9 41 52
65 30 3 21 55 49 26 30 118 118 25 12 8 3 2 60 53 56 72 20
7 10 16 7 88 23 13 15 12 11 11 71 45 7 4 49 82 38 38 91
118 15 2 16 33 3 5 118 98 38 6 19 36 1 0 15 64 22 1 2
6 11 18 61 31 3 0 6 15 23 118 118 13 0 0 35 38 18 40 96
24 1 0 13 17 3 24 98
132.36 99.75 11.45 -2.910
94 32 7 2 13 7 5 23 121 94 13 5 0 0 4 59 13 30 71 32
0 6 32 11 25 32 13 0 0 16 51 5 44 50 0 3 33 55 11 9
121 121 12 9 6 3 0 18 55 60 48 44 44 9 0 2 106 117 13 2
1 0 1 1 37 1 1 25 80 35 15 41 121 3 0 2 14 3 2 121
51 11 0 20 93 6 0 20 109 57 3 4 5 0 0 28 21 2 0 5
13 12 75 119 35 0 0 13 28 14 37 121 12 0 0 21 46 5 11 93
29 0 0 3 14 4 11 99''')
    f.seek(0)
    features = load_sift(f)
    f.close()

    assert_equal(len(features), 2)
    assert_equal(len(features['data'][0]), 128)
    assert_equal(features['row'][0], 133.92)
    assert_equal(features['column'][1], 99.75)
Exemplo n.º 12
0
def _add_hostname_to_hosts():
    """Adds the hostname to /etc/hosts, if it is not being assigned by DNS -- required for rabbitmq
    """
    hostname_file = open('/etc/hostname','r')
    hostname = hostname_file.readline().rstrip()
    private_ip = _get_self_private_ip()
    host_found = _check_name_resolves(hostname)
    if not host_found:
        stdout = TemporaryFile()
        stderr = TemporaryFile()
        err = subprocess.call( ('/usr/bin/sudo','/usr/bin/perl', '-i.orig',  '-n','-e', r"""BEGIN {($h,$ip)=@ARGV;
        $ip='127.0.1.1' unless $ip
        }
        next if/\Q$h\E/;
        s/^(127\.0\.0\.1\s+localhost)$/$1\n$ip $h/;
        print""", hostname, private_ip,  '/etc/hosts'),
            stdout=stdout, stderr=stderr)
        if err:
            stdout.seek(0)
            out='\n'.join(stdout.readlines())
            stderr.seek(0)
            err_text='\n'.join(stderr.readlines())
            raise OSError('Error updating /etc/hosts. Result code: {0}\n{1}\n{2}'.format( err,out,err_text ))
        stdout.close()
        stderr.close()
 def _process_data_1(self, cr, uid, ids, data, context):
     #try:
         fileobj = TemporaryFile('w+')        
         fileobj.write(data)
         fileobj.seek(0) 
         lines = []
         for line in fileobj.readlines():
             #log.info('++++++++++++++++\r\nline=%s' % line)
             lines = line.split(',')            
             #if len(lines) == 0: break
             if self._isnumeric(lines[0]) == True:
                 id = int(lines[0])
                 date_from = datetime.strptime(lines[1], '%m/%d/%Y %H:%M:%S').strftime('%Y-%m-%d  %H:%M:%S')
                 date_to = datetime.strptime(lines[2].replace("\n",""), '%m/%d/%Y %H:%M:%S').strftime('%Y-%m-%d  %H:%M:%S')               
                 #log.info('id=%s,df=%s,dt=%s' % (id, date_from, date_to))
                 #check existing
                 day = datetime.strptime(date_from, '%Y-%m-%d  %H:%M:%S').strftime('%Y-%m-%d')
                 
                 attds = self.pool.get('hr.attendance')
                 attd_ids = attds.search(cr, uid, [('employee_id','=',id),('day','=',day)], context=context)
                 
                 #log.info(attd_ids)
                 log.info('employee_id=%d,attd_ids=%s,len=%d,day=%s' % (id,attd_ids,len(attd_ids), day))                                           
                 if len(attd_ids) == 0:                        
                     attds.create(cr, uid, {'employee_id':id,'name':date_from,'action':'sign_in','source':'import'})
                     attds.create(cr, uid, {'employee_id':id,'name':date_to,'action':'sign_out','source':'import'})
             #log.info('+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++')
         fileobj.close()
Exemplo n.º 14
0
Arquivo: build.py Projeto: acli/xiphos
class BuildTests(unittest.TestCase):
    def setUp(self):
        self.fnull = TemporaryFile()

    def tearDown(self):
        self._run(["./waf","distclean"])
        self.fnull.close()

    def _run(self, args):
        sub = subprocess.Popen(args, stderr=self.fnull, stdout=self.fnull)
        sub.communicate()
        self.fnull.seek(0)
        tail = ''.join(self.fnull.readlines()[-10:])
        return sub, tail

    def _configure(self, gtk, backend, delint):
        arglist = ["./waf","configure","--gtk", gtk, "--backend", backend]
        if delint:
            arglist.append("--enable-delint")
        sub, tail = self._run(arglist)
        self.assertEqual(0, sub.returncode, msg=tail)

    def _build(self):
        sub, tail = self._run(["./waf","build"])
        self.assertEqual(0, sub.returncode, msg=tail)

    def _configure_and_build(self, gtk, backend, delint):
        self._configure(gtk, backend, delint)
        self._build()
Exemplo n.º 15
0
class MSeed4KOutput(object):
    def __init__(self, fd):
        self.__fd = fd
        self.__mseed_fd = TemporaryFile()

    def write(self, data):
        self.__mseed_fd.write(data)

    def close(self):
        try:
            try:
                wfd = _WaveformData()

                self.__mseed_fd.seek(0)
                for rec in MSeedInput(self.__mseed_fd):
                    wfd.add_data(rec)

                wfd.output_data(self.__fd, 0)

            except (MSeedError, SEEDError, DBError), e:
                logs.error("error reblocking Mini-SEED data: " + str(e))

        finally:
            self.__mseed_fd.close()
            self.__fd.close()
Exemplo n.º 16
0
def prepare_restore(args, backup_dir):
    url = args.factory_image
    if not url:
        # factory image is not set and standard NAND restore is used
        if not backup_dir:
            print('Backup cannot be found!')
            print(
                'Please use factory image or provide correct path to directory or tarball with previous backup.'
            )
            raise PlatformStop
        yield
        return

    if os.path.isfile(url):
        stream = open(url, 'rb')
    else:
        # download remote image to temporary file
        stream = TemporaryFile()
        remote = urlopen(Request(url, headers={'User-Agent': 'Mozilla/5.0'}))
        print('Downloading factory image...')
        shutil.copyfileobj(remote, stream)
        stream.seek(0)

    image_md5 = md5fo(stream)
    if image_md5 not in SUPPORTED_IMAGES:
        stream.close()
        print(
            'Unsupported factory image with MD5 digest: {}'.format(image_md5))
        raise PlatformStop

    args.factory_stream = stream
    args.factory_stream.seek(0)
    yield
    args.factory_stream.close()
Exemplo n.º 17
0
def local_ds_files(ds):
    """
    Gets the list of files corresponding to a published dataset
    stored on cms_dbs_ph_analysis_02.

    Args:
        ds: the path to the published dataset, ending in /USER

    Returns:
        A list of the LFN-s of the dataset.
    """
    tf = TemporaryFile()
    stdout = sys.stdout
    stdout.flush()
    sys.stdout = tf
    print "Query"
    ret = call_das_cli('--query=file dataset=%s instance=cms_dbs_ph_analysis_02' % ds, '--limit=0')
    print ret
    tf.flush()
    tf.seek(0)
    sys.stdout = stdout
    fl = []
    for li in tf.readlines():
        if "/store/" in li:
            fl.append(li.strip())
    tf.close()
    return fl
    def run_import(self):
        self.ensure_one()
        fileobj = TemporaryFile('w+')
        filestr = self.file_to_import.decode('base64')
        fileobj.write(filestr)
        fileobj.seek(0)  # We must start reading from the beginning !
        pivot = self.file2pivot(fileobj, filestr)
        fileobj.close()
        logger.debug('pivot before update: %s', pivot)
        self.update_pivot(pivot)
        moves = self.create_moves_from_pivot(pivot, post=self.post_move)
        action = {
            'name': _('Imported Journal Entries'),
            'res_model': 'account.move',
            'type': 'ir.actions.act_window',
            'nodestroy': False,
            'target': 'current',
        }

        if len(moves) == 1:
            action.update({
                'view_mode': 'form,tree',
                'res_id': moves[0].id,
            })
        else:
            action.update({
                'view_mode': 'tree,form',
                'domain': [('id', 'in', moves.ids)],
            })
        return action
Exemplo n.º 19
0
    def _ftpGet(self):
        """ Applies FTP commands to get the file. """
        _, path = self._url.split("://")
        _split = path.split("/")

        host = _split[0]
        path = "/".join(_split[1:-1])
        file = _split[-1]

        try:
            ftp = FTP(host, timeout=60)
            ftp.login()
            ftp.cwd(path)

            tmpfile = TemporaryFile()
            ftp.retrbinary("RETR " + file, tmpfile.write)

            tmpfile.seek(0)
            data = tmpfile.read()
            tmpfile.close()

            return data

        except TimeoutError:
            print("Timeout while fetching {}".format(self._url))
            return None
Exemplo n.º 20
0
 def test_read_subprocess_output(self):
     output_file = TemporaryFile('w+')
     cmd = ['ls', '/']
     output, return_code = _read_subprocess_output(cmd, output_file)
     output_file.close()
     self.assertTrue('tmp' in output, '{} not in {}'.format('tmp', output))
     self.assertEqual(0, return_code)
Exemplo n.º 21
0
    def test_save_load_meta_parameter(self):
        """Test saving and loading a device with custom parameters."""
        # Create the device and the array.
        rpu_config = SingleRPUConfig(
            forward=IOParameters(inp_noise=0.321),
            backward=IOParameters(inp_noise=0.456),
            update=UpdateParameters(desired_bl=78),
            device=ConstantStepDevice(w_max=0.987)
        )

        model = self.get_layer(rpu_config=rpu_config)

        # Save the model to a file.
        file = TemporaryFile()
        save(model, file)

        # Load the model.
        file.seek(0)
        new_model = load(file)
        file.close()

        # Assert over the new model tile parameters.
        parameters = new_model.analog_tile.tile.get_parameters()
        self.assertAlmostEqual(parameters.forward_io.inp_noise, 0.321)
        self.assertAlmostEqual(parameters.backward_io.inp_noise, 0.456)
        self.assertAlmostEqual(parameters.update.desired_bl, 78)
Exemplo n.º 22
0
def create_tarball(tar_paths):
    """
    Context Manger that creates the tarball of the Docker Context to use for building the image

    Parameters
    ----------
    tar_paths dict(str, str)
        Key representing a full path to the file or directory and the Value representing the path within the tarball

    Yields
    ------
        The tarball file
    """
    tarballfile = TemporaryFile()

    with tarfile.open(fileobj=tarballfile, mode='w') as archive:
        for path_on_system, path_in_tarball in tar_paths.items():
            archive.add(path_on_system, arcname=path_in_tarball)

    # Flush are seek to the beginning of the file
    tarballfile.flush()
    tarballfile.seek(0)

    try:
        yield tarballfile
    finally:
        tarballfile.close()
Exemplo n.º 23
0
    def process_warcs(self, id_, iterator):
        s3pattern = re.compile('^s3://([^/]+)/(.+)')
        base_dir = os.path.abspath(os.path.dirname(__file__))

        # S3 client (not thread-safe, initialize outside parallelized loop)
        no_sign_request = botocore.client.Config(
            signature_version=botocore.UNSIGNED)
        s3client = boto3.client('s3', config=no_sign_request)

        for uri in iterator:
            self.warc_input_processed.add(1)
            if uri.startswith('s3://'):
                self.get_logger().info('Reading from S3 {}'.format(uri))
                s3match = s3pattern.match(uri)
                if s3match is None:
                    self.get_logger().error("Invalid S3 URI: " + uri)
                    continue
                bucketname = s3match.group(1)
                path = s3match.group(2)
                warctemp = TemporaryFile(mode='w+b',
                                         dir=self.args.local_temp_dir)
                try:
                    s3client.download_fileobj(bucketname, path, warctemp)
                except botocore.client.ClientError as exception:
                    self.get_logger().error('Failed to download {}: {}'.format(
                        uri, exception))
                    self.warc_input_failed.add(1)
                    warctemp.close()
                    continue
                warctemp.seek(0)
                stream = warctemp
            elif uri.startswith('hdfs://'):
                self.get_logger().error("HDFS input not implemented: " + uri)
                continue
            else:
                self.get_logger().info('Reading local stream {}'.format(uri))
                if uri.startswith('file:'):
                    uri = uri[5:]
                uri = os.path.join(base_dir, uri)
                try:
                    stream = open(uri, 'rb')
                except IOError as exception:
                    self.get_logger().error('Failed to open {}: {}'.format(
                        uri, exception))
                    self.warc_input_failed.add(1)
                    continue

            no_parse = (not self.warc_parse_http_header)
            try:
                for record in ArchiveIterator(stream,
                                              no_record_parse=no_parse):
                    for res in self.process_record(record):
                        yield res
                    self.records_processed.add(1)
            except ArchiveLoadFailed as exception:
                self.warc_input_failed.add(1)
                self.get_logger().error('Invalid WARC: {} - {}'.format(
                    uri, exception))
            finally:
                stream.close()
Exemplo n.º 24
0
    def test_save_load_model(self):
        """Test saving and loading a model directly."""
        model = self.get_layer()

        # Keep track of the current weights and biases for comparing.
        (model_weights, model_biases,
         tile_weights, tile_biases) = self.get_layer_and_tile_weights(model)
        assert_array_almost_equal(model_weights, tile_weights)
        if self.bias:
            assert_array_almost_equal(model_biases, tile_biases)

        # Save the model to a file.
        file = TemporaryFile()
        save(model, file)

        # Load the model.
        file.seek(0)
        new_model = load(file)
        file.close()

        # Compare the new model weights and biases.
        (new_model_weights, new_model_biases,
         new_tile_weights, new_tile_biases) = self.get_layer_and_tile_weights(new_model)

        assert_array_almost_equal(model_weights, new_model_weights)
        assert_array_almost_equal(tile_weights, new_tile_weights)
        if self.bias:
            assert_array_almost_equal(model_biases, new_model_biases)
            assert_array_almost_equal(tile_biases, new_tile_biases)
Exemplo n.º 25
0
def close_chunk(chunk: TemporaryFile, batch: models.ImportBatch, key):
    """
    Write a chunk to the database.

    To close a chunk properly it must have the envelope closing tag added before
    being read into the db.
    """
    chunk.write(
        render_to_string(
            template_name="common/taric/end_envelope.xml").encode(), )
    chunk.seek(0)
    if isinstance(key, tuple):
        record_code, chapter_heading = key
    else:
        record_code = key
        chapter_heading = None

    models.ImporterXMLChunk.objects.create(
        batch=batch,
        record_code=record_code,
        chapter=chapter_heading,
        chunk_number=batch.chunks.filter(
            record_code=record_code,
            chapter=chapter_heading,
        ).count(),
        chunk_text=chunk.read().decode(),
    )
    chunk.close()

    logger.info(
        "closed chunk with code %s and chapter %s",
        record_code,
        chapter_heading,
    )
Exemplo n.º 26
0
    def check_commit_id(path):
        cur_dir = os.getcwd()
        commit = None
        stderr = TemporaryFile()
        try:
            os.chdir(path)
            git_cmd = 'git log -1 --format="%H" | cut -c1-32'
            git_out = Popen(git_cmd,
                            shell=True,
                            stdin=PIPE,
                            stdout=PIPE,
                            stderr=stderr,
                            close_fds=True)
            errmsg = stderr.readlines()
            if errmsg:
                logging.debug("git error message (in %s): %s" % (path, '\n'.join(errmsg)))

            try:
                commit = git_out.stdout.readlines()[0].strip()
            except IndexError:
                pass
        finally:
            os.chdir(cur_dir)
            stderr.close()
        return commit
Exemplo n.º 27
0
    def generate_pdf_ticket(registration=None, context=None, encoding='utf-8'):
        import ho.pisa as pisa
        import cStringIO as StringIO
        from django.utils.six import BytesIO

        if not registration and not context:
            raise Http404(_("Invalid arguments"))

        if not context:
            d = ConfirmationEmailView.get_extra_context(registration)
            context = Context(d)
        template = loader.get_template('registration/ticket.html')
        html  = template.render(context)

        if not registration:
            registration = context['r']

        result = StringIO.StringIO()
        pdf = pisa.pisaDocument(StringIO.StringIO(html.encode("ISO-8859-1")), result)
        result = result.getvalue()

        try:
            file = TemporaryFile()
            file.write(result)
            registration.ticket_file = File(file)
            registration.save()
            file.close()
        except Exception, e:
            charge = registration.charge
            if charge:
                charge.save_server_message(
                    ['Failed while saving ticket file'], exception=e)
Exemplo n.º 28
0
def collate(bundles):
    # TODO: do this properly instead of __file__
    stripePath = os.path.join(os.path.dirname(__file__), 'stripe.pdf')
    wpdf = PdfFileReader(open(stripePath, "rb"))
    endmark = wpdf.getPage(0)

    output = PdfFileMerger()
    for bundle in bundles:
        subpdf = PdfFileWriter()
        numLeft = len(bundle["files"])
        for fileToInsert in bundle["files"]:
            # fileToInsert is passed as a file-like object from Flask
            numLeft -= 1
            ipdf = PdfFileReader(fileToInsert)
            length = ipdf.getNumPages()
            for i in range(length):
                page = ipdf.getPage(i)
                if numLeft == 0 and i == length - 1:
                    page.mergePage(endmark)
                subpdf.addPage(page)
        # had trouble merging PdfFileMerger-s, so save this and reopen
        subpdfFile = TemporaryFile()
        subpdf.write(subpdfFile)
        subpdfFile.seek(0)
        subpdf = PdfFileReader(subpdfFile)
        length = subpdf.getNumPages()
        for i in range(bundle["copies"]):
            output.append(subpdf)
        subpdfFile.close()
    outfile = TemporaryFile()
    output.write(outfile)
    outfile.seek(0)
    return outfile
Exemplo n.º 29
0
def image_register(request):
    params = request.GET if request.method == "GET" else request.POST

    if not ("url" in params):
        content = {"message": u"パラメータ`url`が指定されていません"}
        return Response(content, status=status.HTTP_400_BAD_REQUEST)

    try:
        image = Images.objects.get(url=params["url"])
    except ObjectDoesNotExist:
        image = Images(url=params["url"], adult_flag=False, grotesque_flag=False)

    descriptor = factory.descriptor(filepath=image.local_path)
    if descriptor == None:
        content = {"message": u"ローカルに画像が見つかりません"}
        return Response(content, status=status.HTTP_412_PRECONDITION_FAILED)
    else:
        tmp = TemporaryFile()
        try:
            np.save(tmp, descriptor)
            tmp.seek(0)

            image.description = tmp.read()
            image.save()
        finally:
            tmp.close()

        return Response(ImageMapper(image).as_dict())
Exemplo n.º 30
0
    def _import(self):
        model_item_name = self._get_obj_item_name()
        if not model_item_name:
            return False
        obj_item = self.env[model_item_name]
        fileobj = TemporaryFile("w+")
        fileobj.write(base64.decodestring(self.data))
        fileobj.seek(0)

        reader = csv.reader(fileobj, delimiter=",")
        reader.next()  # noqa: B305

        for row in reader:
            name = row[0]
            item_name = row[3]
            value = row[4]

            criteria = [
                ("measurement_id.name", "=", name),
                ("item_type_id.name", "=", item_name),
            ]
            item_id = obj_item.search(criteria)
            if item_id:
                state = item_id.measurement_id.state
                if state == "open":
                    item_type_id = item_id.item_type_id
                    if item_type_id.question_type == "quantitative":
                        item_id.write({"quantitative_value": value})
            else:
                fileobj.close()
                return False

        fileobj.close()

        return True
Exemplo n.º 31
0
def run_cmd(options, jenkins):
    """Run the jshint command using options.

    Run the jshint command using options and return the output.

    :param options: Options received by the code_analysis_jshint funciton.
    :param jenkins: It is true when the jenkins output is turned on.

    """
    # cmd is a sequence of program arguments
    # first argument is child program
    paths = options['directory'].split('\n')
    cmd = [
        options['jshint-bin'],
        '--verbose',
        '--exclude={0}'.format(options['jshint-exclude'] or ' ')] + paths
    try:
        if jenkins:
            cmd.append('--reporter=jslint')
            output_file_name = os.path.join(options['location'], 'jshint.xml')
            output_file = open(output_file_name, 'w+')
        else:
            output_file = TemporaryFile('w+')

        # Wrapper to subprocess.Popen
        try:
            # Return code is not used for jshint.
            output = read_subprocess_output(cmd, output_file)[0]
            return output
        except OSError:
            log('skip')
            message = 'Command: {0}. Outputfile: {1}'.format(cmd, output_file)
            raise CmdError(message)
    finally:
        output_file.close()
    def import_lang(self, cr, uid, ids, context=None):
        """
            Import Language
            @param cr: the current row, from the database cursor.
            @param uid: the current user’s ID for security checks.
            @param ids: the ID or list of IDs
            @param context: A standard dictionary
        """
        if context is None:
            context = {}
        import_data = self.browse(cr, uid, ids)[0]
        if import_data.overwrite:
            context.update(overwrite=True)
        fileobj = TemporaryFile('w+')
        fileobj.write(base64.decodestring(import_data.data))

        # now we determine the file format
        fileobj.seek(0)
        first_line = fileobj.readline().strip().replace('"', '').replace(' ', '')
        fileformat = first_line.endswith("type,name,res_id,src,value") and 'csv' or 'po'
        fileobj.seek(0)

        tools.trans_load_data(cr, fileobj, fileformat, import_data.code, lang_name=import_data.name, context=context)
        fileobj.close()
        return {}
Exemplo n.º 33
0
def latest_dataset():
    """Retrive the latest CLDR dataset and provide a ZipFile interface, handling cleanup automatically.
	
	This streams the dataset into a temporary file before wrapping it in the ZipFile interface.
	"""
    spool = TemporaryFile(prefix='cldr', suffix='.zip')
    version, latest = get_latest_version_url()

    with Session() as http:
        response = http.get(latest, stream=True)

        for chunk in response.iter_content(chunk_size=4096):
            if chunk: spool.write(chunk)

    # Write out any uncommitted data, then return to the beginning.
    spool.flush()
    spool.seek(0)

    zipfile = ZipFile(spool, 'r')
    zipfile.version = version  # Expose the version number through to consumers of the ZipFile.

    yield zipfile

    zipfile.close()
    spool.close()
Exemplo n.º 34
0
    def append(self, seg, crossfade=100):
        seg1, seg2 = AudioSegment._sync(self, seg)

        if not crossfade:
            return seg1._spawn(seg1._data + seg2._data)
        elif crossfade > len(self):
            raise ValueError(
                "Crossfade is longer than the original AudioSegment ({}ms > {}ms)"
                .format(crossfade, len(self)))
        elif crossfade > len(seg):
            raise ValueError(
                "Crossfade is longer than the appended AudioSegment ({}ms > {}ms)"
                .format(crossfade, len(seg)))

        xf = seg1[-crossfade:].fade(to_gain=-120, start=0, end=float('inf'))
        xf *= seg2[:crossfade].fade(from_gain=-120, start=0, end=float('inf'))

        output = TemporaryFile()

        output.write(seg1[:-crossfade]._data)
        output.write(xf._data)
        output.write(seg2[crossfade:]._data)

        output.seek(0)
        obj = seg1._spawn(data=output)
        output.close()
        return obj
Exemplo n.º 35
0
def test_conversion_highlevel(path):
    """
    Test whether the high-level GenBank interface can properly read
    the locus, annotation and sequence from GenBank file and write
    these properties to a file, without data changing.
    """
    suffix = path[-2:]
    gb_file = gb.GenBankFile.read(path)
    ref_locus = gb.get_locus(gb_file)
    ref_annot_seq = gb.get_annotated_sequence(gb_file, format=suffix)

    gb_file = gb.GenBankFile()
    gb.set_locus(gb_file, *ref_locus)
    gb.set_annotated_sequence(gb_file, ref_annot_seq)
    temp = TemporaryFile("w+")
    gb_file.write(temp)
    
    temp.seek(0)
    gb_file = gb.GenBankFile.read(temp)
    temp.close()
    test_locus = gb.get_locus(gb_file)
    test_annot_seq = gb.get_annotated_sequence(gb_file, format=suffix)
    assert test_locus == ref_locus
    assert test_annot_seq.sequence       == ref_annot_seq.sequence
    assert test_annot_seq.annotation     == ref_annot_seq.annotation
    assert test_annot_seq.sequence_start == ref_annot_seq.sequence_start
Exemplo n.º 36
0
class MSeed4KOutput(object):
    def __init__(self, fd):
        self.__fd = fd
        self.__mseed_fd = TemporaryFile()

    def write(self, data):
        self.__mseed_fd.write(data)

    def close(self):
        try:
            try:
                wfd = _WaveformData()

                self.__mseed_fd.seek(0)
                for rec in MSeedInput(self.__mseed_fd):
                    wfd.add_data(rec)

                wfd.output_data(self.__fd, 0)

            except (MSeedError, SEEDError, DBError), e:
                logs.error("error reblocking Mini-SEED data: " + str(e))

        finally:
            self.__mseed_fd.close()
            self.__fd.close()
Exemplo n.º 37
0
Arquivo: views.py Projeto: wwick/osler
def pdf_workup(request, pk):

    wu = get_object_or_404(models.Workup, pk=pk)
    active_provider_type = get_object_or_404(ProviderType,
                                             pk=request.session['clintype_pk'])

    if active_provider_type.staff_view:
        data = {'workup': wu}

        template = get_template('workup/workup_body.html')
        html = template.render(data)

        file = TemporaryFile(mode="w+b")
        pisa.CreatePDF(html.encode('utf-8'), dest=file, encoding='utf-8')

        file.seek(0)
        pdf = file.read()
        file.close()

        initials = ''.join(name[0].upper() for name in wu.patient.name(
            reverse=False, middle_short=False).split())
        formatdate = '.'.join([
            str(wu.clinic_day.clinic_date.month).zfill(2),
            str(wu.clinic_day.clinic_date.day).zfill(2),
            str(wu.clinic_day.clinic_date.year)
        ])
        filename = ''.join([initials, ' (', formatdate, ')'])

        response = HttpResponse(pdf, 'application/pdf')
        response["Content-Disposition"] = "attachment; filename=%s.pdf" % (
            filename, )
        return response

    else:
        return HttpResponseRedirect(reverse('workup', args=(wu.id, )))
Exemplo n.º 38
0
def test_array_conversion(path):
    pdbx_file = pdbx.PDBxFile.read(path)
    ref_structure = pdbx.get_structure(pdbx_file,
                                       model=1,
                                       extra_fields=["charge"])
    ref_structure.bonds = struc.connect_via_residue_names(ref_structure)

    pdbqt_file = pdbqt.PDBQTFile()
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        # Ignore warnings about atoms not parametrized
        mask = pdbqt.set_structure(pdbqt_file, ref_structure)
    ref_structure = ref_structure[mask]
    temp = TemporaryFile("r+")
    pdbqt_file.write(temp)

    temp.seek(0)
    pdbqt_file = pdbqt.PDBQTFile.read(temp)
    test_structure = pdbqt.get_structure(pdbqt_file, model=1)
    temp.close()

    assert np.allclose(test_structure.coord, ref_structure.coord)
    for category in test_structure.get_annotation_categories():
        if category == "element":
            # PDBQT uses special atom types, which replace the usual
            # elements
            # -> there cannot be equality of the 'element' annotation
            continue
        try:
            assert np.array_equal(test_structure.get_annotation(category),
                                  ref_structure.get_annotation(category))
        except AssertionError:
            print(f"Inequality in '{category}' category")
            raise
Exemplo n.º 39
0
class SeedOutput(object):
    def __init__(self, fd, inv, resp_dict):
        self.__fd = fd
        self.__inv = inv
        self.__resp_dict = resp_dict
        self.__mseed_fd = TemporaryFile()

    def write(self, data):
        self.__mseed_fd.write(data)

    def close(self):
        try:
            try:
                seed_volume = SEEDVolume(self.__inv, ORGANIZATION, LABEL,
                                         self.__resp_dict)

                self.__mseed_fd.seek(0)
                for rec in MSeedInput(self.__mseed_fd):
                    seed_volume.add_data(rec)

                seed_volume.output(self.__fd)

            except (MSeedError, SEEDError, DBError), e:
                logs.error("error creating SEED volume: " + str(e))

        finally:
            self.__mseed_fd.close()
            self.__fd.close()
Exemplo n.º 40
0
def test_get_xml_iter():
    #1 file object
    #2 stream (file-like)
    #3 string
    #4 zipfile
    from openpyxl2.reader.worksheet import _get_xml_iter
    from tempfile import TemporaryFile

    FUT = _get_xml_iter
    s = b""
    stream = FUT(s)
    assert isinstance(stream, BytesIO), type(stream)

    u = unicode(s)
    stream = FUT(u)
    assert isinstance(stream, BytesIO), type(stream)

    f = TemporaryFile(mode='rb+', prefix='openpyxl.', suffix='.unpack.temp')
    stream = FUT(f)
    assert stream == f
    f.close()

    t = TemporaryFile()
    z = ZipFile(t, mode="w")
    z.writestr("test", "whatever")
    stream = FUT(z.open("test"))
    assert hasattr(stream, "read")

    try:
        z.close()
    except IOError:
        # you can't just close zipfiles in Windows
        z.close()  # python 2.7
Exemplo n.º 41
0
    def sandbox_helper(sandbox: Sandbox, command, privileged=False):
        stdout, stderr = TemporaryFile("wb+"), TemporaryFile("wb+")
        sandbox.execute(command=command,
                        stdin_fd=None,
                        stdout_fd=stdout,
                        stderr_fd=stderr,
                        privileged=privileged)

        stdout.flush()
        stdout.seek(0)
        stdout_text = stdout.read().decode().strip()
        stdout.close()
        stderr.flush()
        stderr.seek(0)
        stderr_text = stderr.read().decode().strip()
        stderr.close()

        # If running java or javac or jar the JVM prints an annoying message:
        # "Picked up JAVA_TOOL_OPTIONS: <actual options set by sandbox environment>
        # Remove it from the stderr if it is there
        if any(java in command for java in ["java", "javac", "jar"]):
            stdout_text = "\n".join([
                line for line in stdout_text.splitlines()
                if not line.startswith("Picked up JAVA_TOOL_OPTIONS")
            ])
            stderr_text = "\n".join([
                line for line in stderr_text.splitlines()
                if not line.startswith("Picked up JAVA_TOOL_OPTIONS")
            ])
        return stdout_text, stderr_text
def shell_command(cmd, directory):
    """ Execute a command in a shell
    cmd : the command as a string
    dir : the directory where the cmd is executed
    Output : status
    """
    from subprocess import Popen, STDOUT, PIPE

    output_stream = TemporaryFile()
    if platform.system() == 'Darwin':
        p = Popen(cmd,
                  shell=True,
                  cwd=directory,
                  stdin=PIPE,
                  stdout=output_stream,
                  stderr=PIPE)
        status = p.communicate()
    else:
        p = Popen(cmd,
                  shell=True,
                  cwd=directory,
                  stdin=PIPE,
                  stdout=output_stream,
                  stderr=STDOUT)
        status = p.wait()

    output_stream.seek(0)
    s = output_stream.read()
    output_stream.close()
    return status, s
    def send_form(self,):
        import csv
        product = self[0]
        #_logger.warning('data %s b64 %s ' % (account.data,base64.decodestring(account.data)))
        if not product.data == None:
            fileobj = TemporaryFile('w+')
            fileobj.write(base64.decodestring(product.data))
            fileobj.seek(0)
 
            try:
                for row in csv.DictReader(fileobj):
                    pass                
            finally:
                fileobj.close()
            return True
        #product.write({'state': 'get', 'name': '%s.xml' % account.model.model.replace('.','_'),'data': base64.b64encode(account._export_xml()) })
        return {
            'type': 'ir.actions.act_window',
            'res_model': 'account.export',
            'view_mode': 'form',
            'view_type': 'form',
            'res_id': product.id,
            'views': [(False, 'form')],
            'target': 'new',
        }
Exemplo n.º 44
0
    def send_form(self, ):
        import csv
        product = self[0]
        #_logger.warning('data %s b64 %s ' % (account.data,base64.decodestring(account.data)))
        if not product.data == None:
            fileobj = TemporaryFile('w+')
            fileobj.write(base64.decodestring(product.data))
            fileobj.seek(0)

            try:
                for row in csv.DictReader(fileobj):
                    pass
            finally:
                fileobj.close()
            return True
        #product.write({'state': 'get', 'name': '%s.xml' % account.model.model.replace('.','_'),'data': base64.b64encode(account._export_xml()) })
        return {
            'type': 'ir.actions.act_window',
            'res_model': 'account.export',
            'view_mode': 'form',
            'view_type': 'form',
            'res_id': product.id,
            'views': [(False, 'form')],
            'target': 'new',
        }
Exemplo n.º 45
0
    def _parse(self):
        rels = dict()
        with self._zf.open('xl/_rels/workbook.bin.rels', 'r') as zf:
            for el in ET.parse(zf).getroot():
                rels[el.attrib['Id']] = el.attrib['Target']

        with TemporaryFile() as temp:
            with self._zf.open('xl/workbook.bin', 'r') as zf:
                temp.write(zf.read())
                temp.seek(0, os.SEEK_SET)
            reader = BIFF12Reader(fp=temp, debug=self._debug)
            for item in reader:
                if item[0] == biff12.SHEET:
                    self._sheets.append((item[1].name, rels[item[1].rId]))
                elif item[0] == biff12.SHEETS_END:
                    break

        try:
            temp = TemporaryFile()
            with self._zf.open('xl/sharedStrings.bin', 'r') as zf:
                temp.write(zf.read())
                temp.seek(0, os.SEEK_SET)
            self.stringtable = StringTable(fp=temp)
        except KeyError:
            temp.close()
        except Exception:
            temp.close()
            raise
Exemplo n.º 46
0
    def get_upload_results(self, job_id, batch_id, callback = dump_results, batch_size=0, logger=None):
        job_id = job_id or self.lookup_job_id(batch_id)

        if not self.is_batch_done(job_id, batch_id):
            return False
        http = Http()
        uri = self.endpoint + "/services/async/29.0/job/%s/batch/%s/result" % (job_id, batch_id)
        resp, content = http.request(uri, method="GET", headers=self.headers())

        tf = TemporaryFile()
        tf.write(content)

        total_remaining = self.count_file_lines(tf)
        if logger:
            logger("Total records: %d" % total_remaining)
        tf.seek(0)

        records = []
        line_number = 0
        col_names = []
        reader = csv.reader(tf, delimiter=",", quotechar='"')
        for row in reader:
            line_number += 1
            records.append(UploadResult(*row))
            if len(records) == 1:
                col_names = records[0]
            if batch_size > 0 and len(records) >= (batch_size+1):
                callback(records, total_remaining, line_number)
                total_remaining -= (len(records)-1)
                records = [col_names]
        callback(records, total_remaining, line_number)

        tf.close()

        return True
Exemplo n.º 47
0
    def backup(self):
        """
        Create backup
        """
        if self.dry_run:
            return
        if not os.path.exists(self.config["tar"]["directory"]) or not os.path.isdir(
            self.config["tar"]["directory"]
        ):
            raise BackupError("{0} is not a directory!".format(self.config["tar"]["directory"]))
        out_name = "{0}.tar".format(self.config["tar"]["directory"].lstrip("/").replace("/", "_"))
        outfile = os.path.join(self.target_directory, out_name)
        args = ["tar", "c", self.config["tar"]["directory"]]
        errlog = TemporaryFile()
        stream = open_stream(outfile, "w", **self.config["compression"])
        LOG.info("Executing: %s", list2cmdline(args))
        pid = Popen(args, stdout=stream.fileno(), stderr=errlog.fileno(), close_fds=True)
        status = pid.wait()
        try:
            errlog.flush()
            errlog.seek(0)
            for line in errlog:
                LOG.error("%s[%d]: %s", list2cmdline(args), pid.pid, line.rstrip())
        finally:
            errlog.close()

        if status != 0:
            raise BackupError("tar failed (status={0})".format(status))
Exemplo n.º 48
0
def save_res_file(zip_path, result_obj, confirm_overwrite=False):
    """
    save results of a pose error metric (pe_metric) to a zip file
    :param zip_path: path to zip file
    :param result_obj: evo.core.result.Result instance
    :param confirm_overwrite: whether to require user interaction
           to overwrite existing files
    """
    from tempfile import TemporaryFile
    logger.debug("Saving results to " + zip_path + "...")
    if confirm_overwrite and not user.check_and_confirm_overwrite(zip_path):
        return
    with zipfile.ZipFile(zip_path, 'w') as archive:
        archive.writestr("info.json", json.dumps(result_obj.info))
        archive.writestr("stats.json", json.dumps(result_obj.stats))
        for name, array in result_obj.np_arrays.items():
            tmp_file = TemporaryFile()
            np.save(tmp_file, array)
            tmp_file.seek(0)
            archive.writestr("{}.npz".format(name), tmp_file.read())
            tmp_file.close()
        for name, traj in result_obj.trajectories.items():
            tmp_file = TemporaryFile()
            if type(traj) is PosePath3D:
                fmt_suffix = ".kitti"
                write_kitti_poses_file(tmp_file, traj)
            elif type(traj) is PoseTrajectory3D:
                fmt_suffix = ".tum"
                write_tum_trajectory_file(tmp_file, traj)
            else:
                raise FileInterfaceException(
                    "unknown format of trajectory {}".format(name))
            tmp_file.seek(0)
            archive.writestr("{}{}".format(name, fmt_suffix), tmp_file.read())
            tmp_file.close()
def render_handler(event=None, context=None):
    if context is None: context = {'Error': 'no context data'}
    if event is None: event = {'Error': 'no event data'}

    bucket = 'bpc-serverless'
    path = event['path'] if 'path' in event else 'test/no_event_path'
    key = '/'.join(('pages', path, 'index.html'))

    # Construct payload and render page markup
    source = MayaEngine.render(
        payload={
            'event': event,
            'context': context,
            'environ': os.environ,
            'word': 'AWS Lambda'
        })

    # Write page markup to AWS S3
    aws_s3 = boto3.client('s3')
    tmp_file = TemporaryFile()

    try:
        tmp_file.write(source)
        aws_s3.upload_fileobj(tmp_file,
                              bucket,
                              key,
                              ExtraArgs={
                                  'ContentType': "text/html",
                                  'ACL': "public-read"
                              })
    finally:
        tmp_file.close()

    return 'rendered: ' + '/'.join((bucket, key))
Exemplo n.º 50
0
def test_get_xml_iter():
    #1 file object
    #2 stream (file-like)
    #3 string
    #4 zipfile
    from openpyxl.reader.worksheet import _get_xml_iter
    from tempfile import TemporaryFile
    FUT = _get_xml_iter
    s = ""
    stream = FUT(s)
    assert isinstance(stream, BytesIO), type(stream)

    u = unicode(s)
    stream = FUT(u)
    assert isinstance(stream, BytesIO), type(stream)

    f = TemporaryFile(mode='rb+', prefix='openpyxl.', suffix='.unpack.temp')
    stream = FUT(f)
    assert isinstance(stream, tempfile), type(stream)
    f.close()

    from zipfile import ZipFile
    t = TemporaryFile()
    z = ZipFile(t, mode="w")
    z.writestr("test", "whatever")
    stream = FUT(z.open("test"))
    assert hasattr(stream, "read")
    z.close()
Exemplo n.º 51
0
class CandidateUploadFile(BaseHandler):
    def initialize(self):
        self.tempfile = TemporaryFile()

    @tornado.web.authenticated
    @granted()
    @tornado.web.asynchronous
    def post(self):
        fp_url = self.get_argument("url")
        mime_type = self.get_argument("data[type]")
        size = int(self.get_argument("data[size]"))
        candidate_id = self.get_argument("id")
        self.candidate = self.db.query(Candidate).get(int(candidate_id))
        logging.info("type: %s, size: %r", mime_type, size)
        if mime_type == "image/jpeg" and size < MAX_UPLOAD_SIZE:
            http_client = tornado.httpclient.AsyncHTTPClient()
            request = tornado.httpclient.HTTPRequest(url=fp_url, streaming_callback=self.streaming_callback)
            http_client.fetch(request, self.on_download)
        else:
            self.finish(dict(status=0))

    def streaming_callback(self, data):
        self.tempfile.write(data)
        logging.info("This is the streaming_callback file tell function: %r", self.tempfile.tell())

    def on_download(self, response):
        img_path = os.path.join(os.path.dirname(__file__), "static/profiles/img/" + str(self.candidate.backup_id) + '.jpg')
        self.tempfile.seek(0)
        ptr = open(img_path, 'wb')
        ptr.write(self.tempfile.read())
        ptr.close()
        self.tempfile.close()
        self.finish(dict(src="/static/profiles/img/" + str(self.candidate.backup_id) + '.jpg', status=1))
Exemplo n.º 52
0
def test_conversion(chars_per_line):
    path = os.path.join(data_dir("sequence"), "random.fastq")
    fasta_file = fastq.FastqFile.read(
        path, offset=33, chars_per_line=chars_per_line
    )
    ref_content = dict(fasta_file.items())

    fasta_file = fastq.FastqFile(offset=33, chars_per_line=chars_per_line)
    for identifier, (sequence, scores) in ref_content.items():
        fasta_file[identifier] = sequence, scores
    temp = TemporaryFile("w+")
    fasta_file.write(temp)

    temp.seek(0)
    fasta_file = fastq.FastqFile.read(
        temp, offset=33, chars_per_line=chars_per_line
    )
    content = dict(fasta_file.items())
    temp.close()
    
    for identifier in ref_content:
        ref_sequence, ref_scores = ref_content[identifier]
        test_sequence, test_scores = content[identifier]
        assert test_sequence == ref_sequence
        assert np.array_equal(test_scores, ref_scores)
Exemplo n.º 53
0
    def dataentry(self):
        self.toaster.msgblockbegin("writing to temporary file")
        f_tmp = TemporaryFile()
        try:
            total_padding = self.data.write(f_tmp)
            # comparing the files will usually be different because blocks may
            # have been written back in a different order, so cheaply just compare
            # file sizes
            self.toaster.msg("comparing file sizes")
            self.stream.seek(0, 2)
            f_tmp.seek(0, 2)
            if self.stream.tell() != f_tmp.tell():
                self.toaster.msg("original size: %i" % self.stream.tell())
                self.toaster.msg("written size:  %i" % f_tmp.tell())
                self.toaster.msg("padding:       %i" % total_padding)
                if self.stream.tell() > f_tmp.tell(
                ) or self.stream.tell() + total_padding < f_tmp.tell():
                    f_tmp.seek(0)
                    f_debug = open("debug.cgf", "wb")
                    f_debug.write(f_tmp.read(-1))
                    f_debug.close()
                    raise Exception(
                        'write check failed: file sizes differ by more than padding'
                    )
        finally:
            f_tmp.close()
        self.toaster.msgblockend()

        # spell is finished: prevent recursing into the tree
        return False
Exemplo n.º 54
0
def sendKey(id, ipfshash, address):
    temp = TemporaryFile('w+b')
    bytes_key = ipfsclient.cat(str(ipfshash))
    temp.write(bytes_key)
    content = '尊敬的版权购买者,这是您购买的id为%s版权,请查收。' % (id)
    textApart = MIMEText(content)

    temp.seek(0)
    zipApart = MIMEApplication(temp.read())
    zipApart.add_header('Content-Disposition',
                        'attachment',
                        filename='%s_%s.key' % (id, ipfshash))

    m = MIMEMultipart()
    m.attach(textApart)
    m.attach(zipApart)
    m['Subject'] = Header('您的版权密钥——区块链音乐版权平台', 'utf-8')
    m['from'] = EMAIL_HOST_USER
    m['to'] = address
    try:
        server = smtplib.SMTP()
        server.connect(EMAIL_HOST, 25)
        server.login(EMAIL_HOST_USER, EMAIL_HOST_PASSWORD)
        server.sendmail(EMAIL_HOST_USER, address, m.as_string())
        server.quit()
        temp.close()
        return 'email send success'
    except smtplib.SMTPException as e:
        temp.close()
        return 'email send error%s' % e
Exemplo n.º 55
0
def create_tarball(tar_paths):
    """
    Context Manger that creates the tarball of the Docker Context to use for building the image

    Parameters
    ----------
    tar_paths dict(str, str)
        Key representing a full path to the file or directory and the Value representing the path within the tarball

    Yields
    ------
        The tarball file
    """
    tarballfile = TemporaryFile()

    with tarfile.open(fileobj=tarballfile, mode="w") as archive:
        for path_on_system, path_in_tarball in tar_paths.items():
            archive.add(path_on_system, arcname=path_in_tarball)

    # Flush are seek to the beginning of the file
    tarballfile.flush()
    tarballfile.seek(0)

    try:
        yield tarballfile
    finally:
        tarballfile.close()
Exemplo n.º 56
0
    def index_html (self, icon=0, preview=0, width=None, height=None,
                    REQUEST=None):
        """ Return the file with it's corresponding MIME-type """

        if REQUEST is not None:
            if self._if_modified_since_request_handler(REQUEST):
                self.ZCacheable_set(None)
                return ''

            if self._redirect_default_view_request_handler(icon, preview, REQUEST):
                return ''

        filename, content_type, icon, preview = self._get_file_to_serve(icon, preview)
        filename = self._get_fsname(filename)

        if _debug > 1: logger.info('serving %s, %s, %s, %s' %(filename, content_type, icon, preview))

        if filename:
            size = os.stat(filename)[6]
        else:
            filename = self._get_icon_file(broken=True)
            size = os.stat(filename)[6]
            content_type = 'image/gif'
            icon = 1

        if icon==0 and width is not None and height is not None:
            data = TemporaryFile() # hold resized image
            try:
                from PIL import Image
                im = Image.open(filename)
                if im.mode!='RGB':
                    im = im.convert('RGB')
                filter = Image.BICUBIC
                if hasattr(Image, 'ANTIALIAS'): # PIL 1.1.3
                    filter = Image.ANTIALIAS
                im = im.resize((int(width),int(height)), filter)
                im.save(data, 'JPEG', quality=85)
            except:
                data = open(filename, 'rb')
            else:
                data.seek(0,2)
                size = data.tell()
                data.seek(0)
                content_type = 'image/jpeg'
        else:
            data = open(filename, 'rb')

        if REQUEST is not None:
            last_mod = rfc1123_date(self._p_mtime)
            REQUEST.RESPONSE.setHeader('Last-Modified', last_mod)
            REQUEST.RESPONSE.setHeader('Content-Type', content_type)
            REQUEST.RESPONSE.setHeader('Content-Length', size)
            self.ZCacheable_set(None)
            return stream_iterator(data)

        try:
            return data.read()
        finally:
            data.close()
Exemplo n.º 57
0
def upload(filepath):
    if not dropbox.is_authenticated:
        return redirect(dropbox.login_url)
    client = dropbox.client
    try:
        # checks for Google Drive authorization
        if 'credentials' not in session:
            session['credentials'] = None
        credentials = session['credentials']
        if credentials == None:
            return redirect(url_for('login_google'))

        if filepath[0] != '/': filepath = '/' + filepath # fixes path if broken
        http = httplib2.Http()
        http = credentials.authorize(http)
        service = build('drive', 'v2', http=http)
        req= service.files().get(fileId=fileInfo[str(filepath)]).execute()

        if 'downloadUrl' in req:
            url = req['downloadUrl']
        elif 'webContentLink' in req:
            url = req['webContentLink']
        else: # handle different file types
            if 'document' in req['mimeType']:
                if '.docx' in req['title']:
                    url = req['exportLinks']['application/vnd.openxmlformats-officedocument.wordprocessingml.document']
                elif '.odt' in req['title']:
                    url = req['exportLinks']['application/vnd.oasis.opendocument.text']
                elif '.txt' in req['title'] or '.md' in req['title']:
                    url = req['exportLinks']['text/plain']
                elif '.rtf' in req['title']:
                    url = req['exportLinks']['application/rtf']
                elif '.html' in req['title'] or '.htm' in req['title']:
                    url = req['exportLinks']['text/html']
            elif 'spreadsheet' in req['mimeType']:
                if 'xlsx' in req['title']:
                    url = req['exportLinks']['application/vnd.openxmlformats-officedocument.spreadsheetml.sheet']
                elif 'ods' in req['title']:
                    url = req['exportLinks']['application/x-vnd.oasis.opendocument.spreadsheet']
            elif 'presentation' in req['mimeType']:
                url = req['exportLinks']['application/vnd.openxmlformats-officedocument.presentationml.presentation']
            else: # user edited a filetype not supported by Google Drive for export
                url = req['exportLinks']['application/pdf']
                print str(req['title']) + ' converted to PDF'
        
        response, content = http.request(url)
        tf = TemporaryFile()
        tf.write(content)
        tf.seek(0)

        # uploads to dropbox
        client.put_file(filepath, tf, overwrite=True)
        tf.close()
        service.files().delete(fileId=fileInfo.pop(str(filepath))).execute()
        return redirect(url_for('dropboxStart', uploaded='success'))
    except:
        if str(filepath) in fileInfo:
            fileInfo.pop(str(filepath)) # removing the file record
        return redirect(url_for('dropboxStart', uploaded='failed'))
Exemplo n.º 58
0
def url_to_doc(url):
    content = urllib.request.urlopen(url).read()
    tmp = TemporaryFile()
    tmp.write(content)
    tmp.seek(0)
    tree = html.parse(tmp)
    tmp.close()
    return tree
Exemplo n.º 59
0
 def test_open_in_binary(self):
     outfile = TemporaryFile('wb+')
     g = self.getGClass()(outfile=outfile, print_lines=False)
     g.move(10,10)
     outfile.seek(0)
     lines = outfile.readlines()
     assert(type(lines[0]) == bytes)
     outfile.close()