Exemplo n.º 1
0
def get_data_file(cr, uid, data):
    filedata = base64.decodestring(data)
    buf = cStringIO.StringIO(filedata)

    try:
        zipf = zipfile.ZipFile(buf, 'r')
        file_name = zipf.namelist()
        if not file_name or len(file_name) > 1:
            raise osv.except_osv(_('Error'), _('The Zip file should contain only one file'))
        filedata = zipf.read(file_name[0])
        zipf.close()
    except zipfile.BadZipfile:
        pass
    buf.close()

    try:
        s_xml = SpreadsheetXML(xmlstring=filedata)
    except osv.except_osv:
        fileobj = TemporaryFile('w+b')
        fileobj.write(filedata)
        fileformat = 'csv'
    else:
        fileobj = TemporaryFile('w+')
        s_xml.to_csv(to_file=fileobj)
        fileformat = 'xml'
    fileobj.seek(0)

    return (fileobj, fileformat)
Exemplo n.º 2
0
def testFile():
    """Creates and returns a test file that you
    can muck around with"""
    file_ = TemporaryFile()
    file_.write(TEST_TEXT)
    file_.seek(0)
    return file_
Exemplo n.º 3
0
    def test_read_several(self):
        """Read several stanzas from file"""
        tmpf = TemporaryFile()
        tmpf.write("""\
version_header: 1

name: foo
val: 123

name: quoted
address:   "Willowglen"
\t  42 Wallaby Way
\t  Sydney

name: bar
val: 129319
""")
        tmpf.seek(0)
        s = read_stanza(tmpf)
        self.assertEquals(s, Stanza(version_header='1'))
        s = read_stanza(tmpf)
        self.assertEquals(s, Stanza(name="foo", val='123'))
        s = read_stanza(tmpf)
        self.assertEqualDiff(s.get('name'), 'quoted')
        self.assertEqualDiff(s.get('address'), '  "Willowglen"\n  42 Wallaby Way\n  Sydney')
        s = read_stanza(tmpf)
        self.assertEquals(s, Stanza(name="bar", val='129319'))
        s = read_stanza(tmpf)
        self.assertEquals(s, None)
        self.check_rio_file(tmpf)
    def build (self):
        data = []
        datapath = self.home.joinpath('data.xml')
        dom = minidom.parse(datapath.absolute().as_posix())
        index = 0
        for page in dom.getElementsByTagName('page'):
            page_data = self.parse_page(page)
            page_data['page.index'] = index
            data.append(page_data)
            index += 1

        data_loader = """
(function initData(w){{
w.Sectioner = new Object();
w.Sectioner.pages = {};
Object.freeze(w.Sectioner.pages);
}})(window);
        """.format(json.dumps(data, indent=2)).encode('UTF-8')

        data_js = TemporaryFile()
        data_js.write(data_loader)
        self.compiler.add_file(data_js, 'data.js')

        for asset in dom.getElementsByTagName('asset'):
            self.parse_asset(asset)

        return data
Exemplo n.º 5
0
 def parse(self, loc, conf_tree):
     """Set loc.scheme, loc.loc_type, loc.paths."""
     loc.scheme = "rsync"
     # Attempt to obtain the checksum(s) via "ssh"
     host, path = loc.name.split(":", 1)
     cmd = self.manager.popen.get_cmd("ssh", host, "bash")
     f = TemporaryFile()
     f.write("""
     set -eu
     if [[ -d %(path)s ]]; then
         echo '%(tree)s'
         cd %(path)s
         find . -type d | sed '/^\.$/d; /\/\./d; s/^\.\/*/- /'
         md5sum $(find . -type f | sed '/\/\./d; s/^\.\///')
     elif [[ -f %(path)s ]]; then
         echo '%(blob)s'
         md5sum %(path)s
     fi
     """ % {"path": path, "blob": loc.TYPE_BLOB, "tree": loc.TYPE_TREE})
     f.seek(0)
     out, err = self.manager.popen(*cmd, stdin=f)
     lines = out.splitlines()
     if not lines or lines[0] not in [loc.TYPE_BLOB, loc.TYPE_TREE]:
         raise ValueError(loc.name)
     loc.loc_type = lines.pop(0)
     if loc.loc_type == loc.TYPE_BLOB:
         line = lines.pop(0)
         checksum, name = line.split(None, 1)
         loc.add_path(loc.BLOB, checksum)
     for line in lines:
         checksum, name = line.split(None, 1)
         if checksum == "-":
             checksum = None
         loc.add_path(name, checksum)
Exemplo n.º 6
0
class CandidateUploadFile(BaseHandler):
    def initialize(self):
        self.tempfile = TemporaryFile()

    @tornado.web.authenticated
    @granted()
    @tornado.web.asynchronous
    def post(self):
        fp_url = self.get_argument("url")
        mime_type = self.get_argument("data[type]")
        size = int(self.get_argument("data[size]"))
        candidate_id = self.get_argument("id")
        self.candidate = self.db.query(Candidate).get(int(candidate_id))
        logging.info("type: %s, size: %r", mime_type, size)
        if mime_type == "image/jpeg" and size < MAX_UPLOAD_SIZE:
            http_client = tornado.httpclient.AsyncHTTPClient()
            request = tornado.httpclient.HTTPRequest(url=fp_url, streaming_callback=self.streaming_callback)
            http_client.fetch(request, self.on_download)
        else:
            self.finish(dict(status=0))

    def streaming_callback(self, data):
        self.tempfile.write(data)
        logging.info("This is the streaming_callback file tell function: %r", self.tempfile.tell())

    def on_download(self, response):
        img_path = os.path.join(os.path.dirname(__file__), "static/profiles/img/" + str(self.candidate.backup_id) + '.jpg')
        self.tempfile.seek(0)
        ptr = open(img_path, 'wb')
        ptr.write(self.tempfile.read())
        ptr.close()
        self.tempfile.close()
        self.finish(dict(src="/static/profiles/img/" + str(self.candidate.backup_id) + '.jpg', status=1))
Exemplo n.º 7
0
class MSeed4KOutput(object):
    def __init__(self, fd):
        self.__fd = fd
        self.__mseed_fd = TemporaryFile()

    def write(self, data):
        self.__mseed_fd.write(data)

    def close(self):
        try:
            try:
                wfd = _WaveformData()

                self.__mseed_fd.seek(0)
                for rec in MSeedInput(self.__mseed_fd):
                    wfd.add_data(rec)

                wfd.output_data(self.__fd, 0)

            except (MSeedError, SEEDError, DBError), e:
                logs.error("error reblocking Mini-SEED data: " + str(e))

        finally:
            self.__mseed_fd.close()
            self.__fd.close()
Exemplo n.º 8
0
def main(argv):
    args = docopt(__doc__, argv=argv)

    headers = get_args_dict(args['--header'])
    if args['--size-hint']:
        headers['x-archive-size-hint'] = args['--size-hint']

    # Upload keyword arguments.
    upload_kwargs = dict(
        metadata=get_args_dict(args['--metadata']),
        headers=headers,
        debug=args['--debug'],
        queue_derive=True if args['--no-derive'] is False else False,
        ignore_preexisting_bucket=args['--ignore-bucket'],
        checksum=args['--checksum'],
        verbose=True if args['--quiet'] is False else False,
        retries=int(args['--retries']) if args['--retries'] else 0,
        retries_sleep=int(args['--sleep']),
        delete=args['--delete'],
    )

    if args['<file>'] == ['-'] and not args['-']:
        sys.stderr.write('--remote-name is required when uploading from stdin.\n')
        call(['ia', 'upload', '--help'])
        sys.exit(1)

    # Upload from stdin.
    if args['-']:
        local_file = TemporaryFile()
        local_file.write(sys.stdin.read())
        local_file.seek(0)
        _upload_files(args, args['<identifier>'], local_file, upload_kwargs)

    # Bulk upload using spreadsheet.
    elif args['--spreadsheet']:
        # Use the same session for each upload request.
        session = ArchiveSession()

        spreadsheet = csv.DictReader(open(args['--spreadsheet'], 'rU'))
        prev_identifier = None
        for row in spreadsheet:
            local_file = row['file']
            identifier = row['identifier']
            del row['file']
            del row['identifier']
            if (not identifier) and (prev_identifier):
                identifier = prev_identifier
            # TODO: Clean up how indexed metadata items are coerced
            # into metadata.
            md_args = ['{0}:{1}'.format(k.lower(), v) for (k, v) in row.items() if v]
            metadata = get_args_dict(md_args)
            upload_kwargs['metadata'].update(metadata)
            _upload_files(args, identifier, local_file, upload_kwargs, prev_identifier,
                          session)
            prev_identifier = identifier

    # Upload files.
    else:
        local_file = args['<file>']
        _upload_files(args, args['<identifier>'], local_file, upload_kwargs)
Exemplo n.º 9
0
class SeedOutput(object):
    def __init__(self, fd, inv, label, resp_dict):
        self.__fd = fd
        self.__inv = inv
        self.__label = label
        self.__resp_dict = resp_dict
        self.__mseed_fd = TemporaryFile()

    def write(self, data):
        self.__mseed_fd.write(data)

    def close(self):
        try:
            try:
                seed_volume = SEEDVolume(self.__inv, ORGANIZATION, self.__label,
                    self.__resp_dict)

                self.__mseed_fd.seek(0)
                for rec in MSeedInput(self.__mseed_fd):
                    seed_volume.add_data(rec)

                seed_volume.output(self.__fd)

            except (MSeedError, SEEDError, DBError), e:
                logs.error("error creating SEED volume: " + str(e))

        finally:
            self.__mseed_fd.close()
            self.__fd.close()
Exemplo n.º 10
0
def test_clip_to_date_one(pcic_data_portal):
    base_url = '/data/pcds/agg/?'
    sdate = datetime(2007, 01, 01)
    params = {'from-date': sdate.strftime('%Y/%m/%d'),
              'network-name': 'RTA', 'data-format': 'csv',
              'cliptodate': 'cliptodate',
              }
    req = Request.blank(base_url + urlencode(params))

    resp = req.get_response(pcic_data_portal)
    print resp.status
    assert resp.status == '200 OK'
    t = TemporaryFile()
    t.write(resp.body)
    z = ZipFile(t, 'r')
    assert 'RTA/pondosy.csv' in z.namelist()
    f = z.open("RTA/pondosy.csv")
    [f.readline() for _ in range(10)]
    # Read through the file and ensure the no data outside of the date
    # range was returned
    reader = csv.reader(f)
    for row in reader:
        if len(row) > 0:
            d = datetime.strptime(row[0], '%Y-%m-%d %H:%M:%S')
            assert d >= sdate
    # Check values on the first 5 just to make sure
    expected = ['2007-01-09 00:00:00',
                '2007-01-10 00:00:00',
                '2007-01-11 00:00:00',
                '2007-01-12 00:00:00',
                '2007-01-13 00:00:00']
    for exp, actual in izip(expected, reader):
        assert exp[0] == actual
Exemplo n.º 11
0
def test_load_sift():
    f = TemporaryFile()
    fname = f.name
    f.close()
    f = open(fname, 'wb')
    f.write(b'''2 128
133.92 135.88 14.38 -2.732
3 12 23 38 10 15 78 20 39 67 42 8 12 8 39 35 118 43 17 0
0 1 12 109 9 2 6 0 0 21 46 22 14 18 51 19 5 9 41 52
65 30 3 21 55 49 26 30 118 118 25 12 8 3 2 60 53 56 72 20
7 10 16 7 88 23 13 15 12 11 11 71 45 7 4 49 82 38 38 91
118 15 2 16 33 3 5 118 98 38 6 19 36 1 0 15 64 22 1 2
6 11 18 61 31 3 0 6 15 23 118 118 13 0 0 35 38 18 40 96
24 1 0 13 17 3 24 98
132.36 99.75 11.45 -2.910
94 32 7 2 13 7 5 23 121 94 13 5 0 0 4 59 13 30 71 32
0 6 32 11 25 32 13 0 0 16 51 5 44 50 0 3 33 55 11 9
121 121 12 9 6 3 0 18 55 60 48 44 44 9 0 2 106 117 13 2
1 0 1 1 37 1 1 25 80 35 15 41 121 3 0 2 14 3 2 121
51 11 0 20 93 6 0 20 109 57 3 4 5 0 0 28 21 2 0 5
13 12 75 119 35 0 0 13 28 14 37 121 12 0 0 21 46 5 11 93
29 0 0 3 14 4 11 99''')
    f.close()
    f = open(fname, 'rb')
    features = load_sift(f)
    f.close()

    assert_equal(len(features), 2)
    assert_equal(len(features['data'][0]), 128)
    assert_equal(features['row'][0], 133.92)
    assert_equal(features['column'][1], 99.75)
 def _process_data_1(self, cr, uid, ids, data, context):
     #try:
         fileobj = TemporaryFile('w+')        
         fileobj.write(data)
         fileobj.seek(0) 
         lines = []
         for line in fileobj.readlines():
             #log.info('++++++++++++++++\r\nline=%s' % line)
             lines = line.split(',')            
             #if len(lines) == 0: break
             if self._isnumeric(lines[0]) == True:
                 id = int(lines[0])
                 date_from = datetime.strptime(lines[1], '%m/%d/%Y %H:%M:%S').strftime('%Y-%m-%d  %H:%M:%S')
                 date_to = datetime.strptime(lines[2].replace("\n",""), '%m/%d/%Y %H:%M:%S').strftime('%Y-%m-%d  %H:%M:%S')               
                 #log.info('id=%s,df=%s,dt=%s' % (id, date_from, date_to))
                 #check existing
                 day = datetime.strptime(date_from, '%Y-%m-%d  %H:%M:%S').strftime('%Y-%m-%d')
                 
                 attds = self.pool.get('hr.attendance')
                 attd_ids = attds.search(cr, uid, [('employee_id','=',id),('day','=',day)], context=context)
                 
                 #log.info(attd_ids)
                 log.info('employee_id=%d,attd_ids=%s,len=%d,day=%s' % (id,attd_ids,len(attd_ids), day))                                           
                 if len(attd_ids) == 0:                        
                     attds.create(cr, uid, {'employee_id':id,'name':date_from,'action':'sign_in','source':'import'})
                     attds.create(cr, uid, {'employee_id':id,'name':date_to,'action':'sign_out','source':'import'})
             #log.info('+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++')
         fileobj.close()
    def import_lang(self, cr, uid, ids, context=None):
        """
            Import Language
            @param cr: the current row, from the database cursor.
            @param uid: the current user’s ID for security checks.
            @param ids: the ID or list of IDs
            @param context: A standard dictionary
        """
        if context is None:
            context = {}
        import_data = self.browse(cr, uid, ids)[0]
        if import_data.overwrite:
            context.update(overwrite=True)
        fileobj = TemporaryFile('w+')
        fileobj.write(base64.decodestring(import_data.data))

        # now we determine the file format
        fileobj.seek(0)
        first_line = fileobj.readline().strip().replace('"', '').replace(' ', '')
        fileformat = first_line.endswith("type,name,res_id,src,value") and 'csv' or 'po'
        fileobj.seek(0)

        tools.trans_load_data(cr, fileobj, fileformat, import_data.code, lang_name=import_data.name, context=context)
        fileobj.close()
        return {}
Exemplo n.º 14
0
def temp_image():
    media_dir = settings.MEDIA_ROOT
    if not os.path.exists(media_dir):
        os.makedirs(media_dir)
    temp = TemporaryFile(suffix='.jpeg', dir=media_dir)
    temp.write(base64_gif_image())
    yield File(temp)
Exemplo n.º 15
0
    def _create_temp_file(edid_binary):
        edid_file = TemporaryFile()
        edid_file.write(edid_binary)
        edid_file.flush()
        edid_file.seek(0)

        return edid_file
Exemplo n.º 16
0
def main(number):
    """
    Generates 100 temporal files with random numbers
    """
    for x in range(100):
        f = TemporaryFile(mode="w")
        f.write(randint(100, 10000))
Exemplo n.º 17
0
    def to_xml(self, f=None):
        """Get this domain as an XML DOM Document
        :param f: Optional File to dump directly to
        :type f: File or Stream

        :return: File object where the XML has been dumped to
        :rtype: file
        """
        if not f:
            from tempfile import TemporaryFile
            f = TemporaryFile()
        print('<?xml version="1.0" encoding="UTF-8"?>', file=f)
        print('<Domain id="%s">' % self.name, file=f)
        for item in self:
            print('\t<Item id="%s">' % item.name, file=f)
            for k in item:
                print('\t\t<attribute id="%s">' % k, file=f)
                values = item[k]
                if not isinstance(values, list):
                    values = [values]
                for value in values:
                    print('\t\t\t<value><![CDATA[', end=' ', file=f)
                    if isinstance(value, unicode):
                        value = value.encode('utf-8', 'replace')
                    else:
                        value = unicode(value, errors='replace').encode('utf-8', 'replace')
                    f.write(value)
                    print(']]></value>', file=f)
                print('\t\t</attribute>', file=f)
            print('\t</Item>', file=f)
        print('</Domain>', file=f)
        f.flush()
        f.seek(0)
        return f
Exemplo n.º 18
0
    def process_response(self, response):
        # Parse the metadata zip file from the response
        zipstr = parseString(response.content).getElementsByTagName('zipFile')
        if zipstr:
            zipstr = zipstr[0].firstChild.nodeValue
        else:
            return self.packages
        zipfp = TemporaryFile()
        zipfp.write(base64.b64decode(zipstr))
        zipfile = ZipFile(zipfp, 'r')
    
        packages = {}
    
        # Loop through all files in the zip skipping anything other than InstalledPackages
        for path in zipfile.namelist():
            if not path.endswith('.installedPackage'):
                continue
            namespace = path.split('/')[-1].split('.')[0]
            version = parseString(zipfile.open(path).read()).getElementsByTagName('versionNumber')
            if version:
                version = version[0].firstChild.nodeValue
    
            packages[namespace] = version

        self.packages = packages
        return self.packages
def fix_img_gff_errors(gff_file):
    ''' GFF files in the IMG directory can contain errors. This fixes them and returns a file handle to the fixed file

        transforms literal semicolon (;) characters in field 9 to their proper percent encoding (%3B)
        fixes the CRISPR lines
    '''
    new_gff = TemporaryFile()
    with open(gff_file) as fp:
        for ln, line in enumerate(fp):
            if line[0] == '#':
                new_gff.write(line)
            else:
                fields = line.split('\t')
                if fields[2] == 'CRISPR':
                    fields[5] = '.'
                    fields[6] = '?'
                    fields[7] = '.'
                else:
                    attributes = fields[8]
                    attribute_kv = attributes.split(';')
                    new_attributes = []
                    for i in range(len(attribute_kv)):
                        if ',' in attribute_kv[i]:
                            attribute_kv[i] = re.sub(',', '%2C', attribute_kv[i])

                        if '=' not in attribute_kv[i]:
                            new_attributes[-1] += '%3B' + attribute_kv[i]
                        else:
                            new_attributes.append(attribute_kv[i])
                    fields[8] = ';'.join(new_attributes)

                new_gff.write('\t'.join(fields))

    new_gff.seek(0)
    return new_gff
    def send_form(self,):
        import csv
        product = self[0]
        #_logger.warning('data %s b64 %s ' % (account.data,base64.decodestring(account.data)))
        if not product.data == None:
            fileobj = TemporaryFile('w+')
            fileobj.write(base64.decodestring(product.data))
            fileobj.seek(0)
 
            try:
                for row in csv.DictReader(fileobj):
                    pass                
            finally:
                fileobj.close()
            return True
        #product.write({'state': 'get', 'name': '%s.xml' % account.model.model.replace('.','_'),'data': base64.b64encode(account._export_xml()) })
        return {
            'type': 'ir.actions.act_window',
            'res_model': 'account.export',
            'view_mode': 'form',
            'view_type': 'form',
            'res_id': product.id,
            'views': [(False, 'form')],
            'target': 'new',
        }
Exemplo n.º 21
0
    def test_read_SIFT(self):
        f = TemporaryFile()
        f.write(
            """2 128
133.92 135.88 14.38 -2.732
 3 12 23 38 10 15 78 20 39 67 42 8 12 8 39 35 118 43 17 0
 0 1 12 109 9 2 6 0 0 21 46 22 14 18 51 19 5 9 41 52
 65 30 3 21 55 49 26 30 118 118 25 12 8 3 2 60 53 56 72 20
 7 10 16 7 88 23 13 15 12 11 11 71 45 7 4 49 82 38 38 91
 118 15 2 16 33 3 5 118 98 38 6 19 36 1 0 15 64 22 1 2
 6 11 18 61 31 3 0 6 15 23 118 118 13 0 0 35 38 18 40 96
 24 1 0 13 17 3 24 98
132.36 99.75 11.45 -2.910
 94 32 7 2 13 7 5 23 121 94 13 5 0 0 4 59 13 30 71 32
 0 6 32 11 25 32 13 0 0 16 51 5 44 50 0 3 33 55 11 9
 121 121 12 9 6 3 0 18 55 60 48 44 44 9 0 2 106 117 13 2
 1 0 1 1 37 1 1 25 80 35 15 41 121 3 0 2 14 3 2 121
 51 11 0 20 93 6 0 20 109 57 3 4 5 0 0 28 21 2 0 5
 13 12 75 119 35 0 0 13 28 14 37 121 12 0 0 21 46 5 11 93
 29 0 0 3 14 4 11 99"""
        )
        f.seek(0)
        features = feature.SIFT.fromfile(f)
        f.close()

        assert_equal(len(features), 2)
        assert_equal(len(features["data"][0]), 128)
        assert_equal(features["row"][0], 133.92)
        assert_equal(features["column"][1], 99.75)
Exemplo n.º 22
0
    def generate_pdf_ticket(registration=None, context=None, encoding='utf-8'):
        import ho.pisa as pisa
        import cStringIO as StringIO
        from django.utils.six import BytesIO

        if not registration and not context:
            raise Http404(_("Invalid arguments"))

        if not context:
            d = ConfirmationEmailView.get_extra_context(registration)
            context = Context(d)
        template = loader.get_template('registration/ticket.html')
        html  = template.render(context)

        if not registration:
            registration = context['r']

        result = StringIO.StringIO()
        pdf = pisa.pisaDocument(StringIO.StringIO(html.encode("ISO-8859-1")), result)
        result = result.getvalue()

        try:
            file = TemporaryFile()
            file.write(result)
            registration.ticket_file = File(file)
            registration.save()
            file.close()
        except Exception, e:
            charge = registration.charge
            if charge:
                charge.save_server_message(
                    ['Failed while saving ticket file'], exception=e)
Exemplo n.º 23
0
def handle_request(request, object_id, size=None):
    book = get_object_or_404(Book, pk=object_id)
    
    if size is None:
        size = request.REQUEST.get('size',None)
    
    handle_number = book.identifier.split(':')[2].replace('/', '_')
    file_name_complete = 'var/media/' + handle_number + '_' + size + '.pdf'
    file_name_inprogress = 'var/media/' + handle_number + '_' + size
    
    if os.path.exists(file_name_complete):
        print "found complete PDF"
        PDF_file = open(file_name_complete, 'r')
        return HttpResponse(File(PDF_file).readlines(), mimetype='application/pdf')
    elif os.path.exists(file_name_inprogress):
        print "found PDF in progress"
        tempfile = TemporaryFile()
        tempfile.write('PDF compilation in progress, please check back later...')
        tempfile.seek(0);
        return HttpResponse(tempfile.readlines(), mimetype='text/plain')
    else:
        # Fire the missiles
        f = open(file_name_inprogress, 'w')
        PDF_file = File(f)
        t = threading.Thread(target=compile_PDF,
                             args=[request, book, PDF_file, size])
        t.setDaemon(True)
        t.start()
        tempfile = TemporaryFile()
        tempfile.write('PDF compilation initiated, please check back later...')
        tempfile.seek(0);
        return HttpResponse(tempfile.readlines(), mimetype='text/plain')
Exemplo n.º 24
0
	def string_to_numpy(string):
		"""Convert human-readable string into numpy array.
		
		Note:
			loads as floats even if stored as ints. 
			
			human-readable string example:
				1 2 3
				4 5 6
			is a string for the following array:
				[[1,2,3]
				 [4,5,6]]
		
		Args:
			string (string): human-readable string to convert to numpy array

		Returns:
			numpy array

		"""
		f = TemporaryFile()
		f.write(string)
		f.seek(0)
		array = np.loadtxt(f)
		return array
Exemplo n.º 25
0
 def exportFile(self, oid, f=None):
     if f is None:
         f = TemporaryFile()
     elif isinstance(f, str):
         f = open(f,'w+b')
     f.write('ZEXP')
     oids = [oid]
     done_oids = {}
     done=done_oids.has_key
     load=self._storage.load
     while oids:
         oid = oids.pop(0)
         if oid in done_oids:
             continue
         done_oids[oid] = True
         try:
             p, serial = load(oid, self._version)
         except:
             logger.debug("broken reference for oid %s", repr(oid),
                          exc_info=True)
         else:
             referencesf(p, oids)
             f.writelines([oid, p64(len(p)), p])
     f.write(export_end_marker)
     return f
Exemplo n.º 26
0
 def _open(self):
     tmp = TemporaryFile()
     resp = requests.get(self.metadata['url'], stream=True)
     for chunk in resp.iter_content(256*1024):
         tmp.write(chunk)
     tmp.seek(0)
     return tmp
Exemplo n.º 27
0
def dmenu_fetch(inputstr):
  t = TemporaryFile()
  t.write(bytes(inputstr, 'UTF-8'))
  t.seek(0)
  dmenu_run = subprocess.Popen(["dmenu","-b","-fn","'DejaVu Sans Mono-13'"], stdout=subprocess.PIPE, stdin=t)
  output = (dmenu_run.communicate()[0]).decode().strip()
  return output
Exemplo n.º 28
0
    def get_upload_results(self, job_id, batch_id, callback = dump_results, batch_size=0, logger=None):
        job_id = job_id or self.lookup_job_id(batch_id)

        if not self.is_batch_done(job_id, batch_id):
            return False
        http = Http()
        uri = self.endpoint + "/services/async/29.0/job/%s/batch/%s/result" % (job_id, batch_id)
        resp, content = http.request(uri, method="GET", headers=self.headers())

        tf = TemporaryFile()
        tf.write(content)

        total_remaining = self.count_file_lines(tf)
        if logger:
            logger("Total records: %d" % total_remaining)
        tf.seek(0)

        records = []
        line_number = 0
        col_names = []
        reader = csv.reader(tf, delimiter=",", quotechar='"')
        for row in reader:
            line_number += 1
            records.append(UploadResult(*row))
            if len(records) == 1:
                col_names = records[0]
            if batch_size > 0 and len(records) >= (batch_size+1):
                callback(records, total_remaining, line_number)
                total_remaining -= (len(records)-1)
                records = [col_names]
        callback(records, total_remaining, line_number)

        tf.close()

        return True
Exemplo n.º 29
0
def install_package_version(request, namespace, number):
    oauth = request.session.get('oauth_response', None)
    if not oauth:
        raise HttpResponse('Unauthorized', status=401)

    version = get_object_or_404(PackageVersion, package__namespace = namespace, number = number)

    # Log the install
    install = PackageInstallation(
        package = version.package, 
        version = version, 
        action = 'install', 
        username = oauth['username'], 
        org_id = oauth['org_id'],
        org_type = oauth['org_type'],
        status = 'Starting',
    )
    install.save()

    request.session['mpinstaller_current_install'] = install.id

    endpoint = build_endpoint_url(oauth)

    # If we have a version number, install via a custom built metadata package using InstalledPackage
    if version.number:
        # Build a zip for the install package
        package_zip = PackageZipBuilder(namespace, number).install_package() 
    else:
        try:
            zip_resp = requests.get(version.zip_url)
            zipfp = TemporaryFile()
            zipfp.write(zip_resp.content)
            zipfile = ZipFile(zipfp, 'r')
            zipfile.close()
            zipfp.seek(0)
            package_zip = base64.b64encode(zipfp.read())
            # FIXME: Implement handling of the subdir field   
        except:
            raise ValueError('Failed to fetch zip from %s' % version.zip_url)

    # Construct the SOAP envelope message
    message = SOAP_DEPLOY % {'package_zip': package_zip}
    message = message.encode('utf-8')
    
    headers = {
        'Content-Type': "text/xml; charset=UTF-8",
        'Content-Length': len(message),
        'SOAPAction': 'deploy',
    }

    response = call_mdapi(request, url=endpoint, headers=headers, data=message)

    id = parseString(response.content).getElementsByTagName('id')[0].firstChild.nodeValue

    # Delete the cached org package versions
    if request.session.get('mpinstaller_org_packages', None) is not None:
        del request.session['mpinstaller_org_packages']

    return HttpResponse(json.dumps({'process_id': id}), content_type='application/json')
Exemplo n.º 30
0
def upload(filepath):
    if not dropbox.is_authenticated:
        return redirect(dropbox.login_url)
    client = dropbox.client
    try:
        # checks for Google Drive authorization
        if 'credentials' not in session:
            session['credentials'] = None
        credentials = session['credentials']
        if credentials == None:
            return redirect(url_for('login_google'))

        if filepath[0] != '/': filepath = '/' + filepath # fixes path if broken
        http = httplib2.Http()
        http = credentials.authorize(http)
        service = build('drive', 'v2', http=http)
        req= service.files().get(fileId=fileInfo[str(filepath)]).execute()

        if 'downloadUrl' in req:
            url = req['downloadUrl']
        elif 'webContentLink' in req:
            url = req['webContentLink']
        else: # handle different file types
            if 'document' in req['mimeType']:
                if '.docx' in req['title']:
                    url = req['exportLinks']['application/vnd.openxmlformats-officedocument.wordprocessingml.document']
                elif '.odt' in req['title']:
                    url = req['exportLinks']['application/vnd.oasis.opendocument.text']
                elif '.txt' in req['title'] or '.md' in req['title']:
                    url = req['exportLinks']['text/plain']
                elif '.rtf' in req['title']:
                    url = req['exportLinks']['application/rtf']
                elif '.html' in req['title'] or '.htm' in req['title']:
                    url = req['exportLinks']['text/html']
            elif 'spreadsheet' in req['mimeType']:
                if 'xlsx' in req['title']:
                    url = req['exportLinks']['application/vnd.openxmlformats-officedocument.spreadsheetml.sheet']
                elif 'ods' in req['title']:
                    url = req['exportLinks']['application/x-vnd.oasis.opendocument.spreadsheet']
            elif 'presentation' in req['mimeType']:
                url = req['exportLinks']['application/vnd.openxmlformats-officedocument.presentationml.presentation']
            else: # user edited a filetype not supported by Google Drive for export
                url = req['exportLinks']['application/pdf']
                print str(req['title']) + ' converted to PDF'
        
        response, content = http.request(url)
        tf = TemporaryFile()
        tf.write(content)
        tf.seek(0)

        # uploads to dropbox
        client.put_file(filepath, tf, overwrite=True)
        tf.close()
        service.files().delete(fileId=fileInfo.pop(str(filepath))).execute()
        return redirect(url_for('dropboxStart', uploaded='success'))
    except:
        if str(filepath) in fileInfo:
            fileInfo.pop(str(filepath)) # removing the file record
        return redirect(url_for('dropboxStart', uploaded='failed'))
Exemplo n.º 31
0
 def __call__(self):
     """Return File/Image Raw Data"""
     context = removeSecurityProxy(self.context)
     #response.setHeader("Content-Type", "application/octect-stream")
     if len(self.traverse_subpath) != 1:
         return
     fname = self.traverse_subpath[0]
     tempfile = TemporaryFile()
     data = getattr(context, fname, None)
     if data is not None:
         tempfile.write(data)
         tempfile.flush()
         return tempfile
     else:
         raise NotFound(self.context, fname, self.request)
Exemplo n.º 32
0
 def test_run_command_with_stdin_from_handles(self):
     """Test STDIN from multiple file handles"""
     handles = []
     for txt in ['catches mice.\n', 'eat fish.\n']:
         handle = TemporaryFile()
         handle.write(txt.encode('UTF-8'))
         handle.seek(0)
         handles.append(handle)
     ctx = SubProcContext('meow', ['cat'], stdin_files=handles)
     SubProcPool.run_command(ctx)
     self.assertEqual(ctx.err, '')
     self.assertEqual(ctx.out, 'catches mice.\neat fish.\n')
     self.assertEqual(ctx.ret_code, 0)
     for handle in handles:
         handle.close()
Exemplo n.º 33
0
 def get(self, filepath, filename):
     if not filepath: filepath = ''
     if self.is_('ftp') or self.is_('sftp'):
         outfile = TemporaryFile('w+b')
         if self.is_('ftp'):
             self.connection.cwd(filepath)
             self.connection.retrbinary("RETR " + filename, outfile.write)
         elif self.is_('sftp'):
             self.connection.chdir(filepath)
             remote_file = self.connection.file(filename)
             outfile.write(remote_file.read())
         outfile.seek(0)
         return outfile
     elif self.is_('filestore'):
         return open(os.path.join(filepath, filename), 'r+b')
Exemplo n.º 34
0
class tee:
    """
    Inspired by: http://shallowsky.com/blog/programming/python-tee.html
    """
    def __init__(self, stream):
        self.temp_file = TemporaryFile(mode="w+")
        self.stream = stream

    def write(self, text):
        self.temp_file.write(text)
        self.stream.write(text)

    def flush(self):
        self.temp_file.flush()
        self.stream.flush()
Exemplo n.º 35
0
 def write_binary(string):
     stream, total_length, on_disk = _closure
     if on_disk:
         stream.write(string)
     else:
         length = len(string)
         if length + _closure[1] <= threshold:
             stream.write(string)
         else:
             new_stream = TemporaryFile('wb+')
             new_stream.write(stream.getvalue())
             new_stream.write(string)
             _closure[0] = new_stream
             _closure[2] = True
         _closure[1] = total_length + length
Exemplo n.º 36
0
 def popen(self, args=(), **kwargs):
     if "stdin" in kwargs and kwargs["stdin"] != PIPE:
         raise RedirectionError("stdin is already redirected")
     data = self.data
     if isinstance(data,
                   six.unicode_type) and self._get_encoding() is not None:
         data = data.encode(self._get_encoding())
     f = TemporaryFile()
     while data:
         chunk = data[:self.CHUNK_SIZE]
         f.write(chunk)
         data = data[self.CHUNK_SIZE:]
     f.seek(0)
     # try:
     return self.cmd.popen(args, stdin=f, **kwargs)
Exemplo n.º 37
0
    def from_text(self):

        tempfile = TemporaryFile(mode="w+")
        tempfile.write(self.text.get("1.0", 'end-1c'))
        tempfile.seek(0)
        try:
            if not self.mode.get():
                self.graph = Graph(parse=tempfile)
            else:
                self.graph = HGraph(parse=tempfile)
        except Exception as e:
            messagebox.showerror("Error", e)
            return False
        tempfile.close()
        return True
Exemplo n.º 38
0
 def __call__(self):
     context = proxy.removeSecurityProxy(self.context)
     mimetype = getattr(context, 'file_mimetype', None)
     if mimetype == None:
         mimetype = 'application/octect-stream'
     filename = getattr(context, 'file_name', None)
     if filename == None:
         filename = getattr(context, 'file_title', None)
     tempfile = TemporaryFile()
     data = getattr(context, 'file_data', None)
     if type(data) == buffer:
         tempfile.write(data)
         self.request.response.setHeader('Content-type', mimetype)
         self.request.response.setHeader('Content-disposition', 'attachment;filename="%s"' % filename)
         return tempfile
Exemplo n.º 39
0
def _run_command(ctx):
    """Execute a shell command and capture its output and exit status."""

    if cylc.flags.debug:
        if ctx.cmd_kwargs.get('shell'):
            sys.stdout.write("%s\n" % ctx.cmd)
        else:
            sys.stdout.write(
                "%s\n" % ' '.join([quote(cmd_str) for cmd_str in ctx.cmd]))

    if (SuiteProcPool.STOP_JOB_SUBMISSION.value and
            ctx.cmd_key == SuiteProcPool.JOBS_SUBMIT):
        ctx.err = "job submission skipped (suite stopping)"
        ctx.ret_code = SuiteProcPool.JOB_SKIPPED_FLAG
        ctx.timestamp = get_current_time_string()
        return ctx

    try:
        stdin_file = None
        if ctx.cmd_kwargs.get('stdin_file_paths'):
            stdin_file = TemporaryFile()
            for file_path in ctx.cmd_kwargs['stdin_file_paths']:
                for line in open(file_path):
                    stdin_file.write(line)
            stdin_file.seek(0)
        elif ctx.cmd_kwargs.get('stdin_str'):
            stdin_file = PIPE
        proc = Popen(
            ctx.cmd, stdin=stdin_file, stdout=PIPE, stderr=PIPE,
            env=ctx.cmd_kwargs.get('env'), shell=ctx.cmd_kwargs.get('shell'))
    except IOError as exc:
        if cylc.flags.debug:
            traceback.print_exc()
        ctx.ret_code = 1
        ctx.err = str(exc)
    except OSError as exc:
        if exc.filename is None:
            exc.filename = ctx.cmd[0]
        if cylc.flags.debug:
            traceback.print_exc()
        ctx.ret_code = 1
        ctx.err = str(exc)
    else:
        ctx.out, ctx.err = proc.communicate(ctx.cmd_kwargs.get('stdin_str'))
        ctx.ret_code = proc.wait()

    ctx.timestamp = get_current_time_string()
    return ctx
Exemplo n.º 40
0
    def do_POST(self):
        try:
            request = loads(
                self.rfile.read(int(self.headers['Content-Length'])))

            if 'stdin' in request:
                stdin = TemporaryFile()
                stdin.write(request['stdin'])
                stdin.seek(0)
            else:
                stdin = None

            stdout = TemporaryFile()
            stderr = TemporaryFile()

            shell = request['shell'] if 'shell' in request else False

            returncode = call(request['args'],
                              stdin=stdin,
                              stdout=stdout,
                              stderr=stderr,
                              shell=shell)

            if stdin != None:
                stdin.close()

            stdout.seek(0)
            stderr.seek(0)

            response_code = 200
            response_body = {
                'returncode': returncode,
                'stdout': stdout.read(),
                'stderr': stderr.read()
            }

            stdout.close()
            stderr.close()
        except:
            response_code = 500
            response_body = {'error': format_exc()}

        self.send_response(response_code)
        self.send_header('Content-Type', 'application/json')
        self.end_headers()

        self.wfile.write(dumps(response_body))
        self.wfile.write('\n')
Exemplo n.º 41
0
    def convert_itk_file(headers: Mapping[str, Union[str, None]],
                         filename: Path) -> Tuple[Image, Sequence[ImageFile]]:
        try:
            simple_itk_image = sitk.ReadImage(str(filename.absolute()))
            simple_itk_image: sitk.Image
        except RuntimeError:
            raise ValueError("SimpleITK cannot open file")

        color_space = simple_itk_image.GetNumberOfComponentsPerPixel()
        color_space = {
            1: Image.COLOR_SPACE_GRAY,
            3: Image.COLOR_SPACE_RGB,
            4: Image.COLOR_SPACE_RGBA,
        }.get(color_space, None)
        if color_space is None:
            raise ValueError("Unknown color space for MetaIO image.")

        with TemporaryDirectory() as work_dir:
            work_dir = Path(work_dir)

            sitk.WriteImage(simple_itk_image, str(work_dir / "out.mhd"), True)

            depth = simple_itk_image.GetDepth()
            db_image = Image(
                name=filename.name,
                width=simple_itk_image.GetWidth(),
                height=simple_itk_image.GetHeight(),
                depth=depth if depth else None,
                resolution_levels=None,
                color_space=color_space,
            )
            db_image_files = []
            for _file in work_dir.iterdir():
                temp_file = TemporaryFile()
                with open(_file, "rb") as open_file:
                    buffer = True
                    while buffer:
                        buffer = open_file.read(1024)
                        temp_file.write(buffer)

                db_image_file = ImageFile(
                    image=db_image,
                    image_type=ImageFile.IMAGE_TYPE_MHD,
                    file=File(temp_file, name=_file.name),
                )
                db_image_files.append(db_image_file)

        return db_image, db_image_files
Exemplo n.º 42
0
def remove_legacy_buckets(event, context):
    """removes AllUsers or AllAuthenticatedUsers permissions from Google Cloud Buckets"""
    alert = False
    bucket_dict = {}

    # open tempfile
    findings = TemporaryFile()
    opener = 'Hello, \n\nBelow are Google Cloud Storage Legacy Permission Issues:\n\n'
    findings.write(bytes(opener, 'UTF-8'))

    logging.info('-----Checking for legacy bucket permissions-----')
    for project_name in get_projects():
        storage_client = storage.Client(project=project_name)
        buckets = storage_client.list_buckets()

        try:
            for bucket in buckets:
                policy = bucket.get_iam_policy()
                for role in policy:
                    members = policy[role]

                    for member in members:
                        if role == 'roles/storage.legacyBucketOwner' or role == 'roles/storage.legacyBucketReader':
                            alert = True
                            logging.warning(
                                '"{0}" permissions were removed from Bucket "{1}" in project "{2}"'
                                .format(member, bucket.name, project_name))
                            data = '"{0}" permissions were removed from Bucket "{1}" in project "{2}"' \
                                   '\n\n'.format(member, bucket.name, project_name)
                            findings.write(bytes(data, 'UTF-8'))

                            bucket_dict[bucket.name] = project_name
                            policy = bucket.get_iam_policy()
                            policy[role].discard(member)
                            bucket.set_iam_policy(policy)

        except Exception as err:
            logging.error(err)

    if alert is False:
        logging.info('No Legacy Bucket permissions found')

    else:
        # write tempfile to email body and delete
        findings.seek(0)
        email_body = findings.read().decode()
        send_email(email_body)
        findings.close()
Exemplo n.º 43
0
    def __init__(self, file):

        file = open(file, "r")

        rule = file.read()

        file.close()

        graph_re = r"\n(    |\t)*\d+( \d+)*(\n(    |\t)*\d+ \d+ [A-Z]+)*"
        rule_re = r"(\n(    |\t)*rule\d+:\n(    |\t)*I:{}\n(    |\t)*L:{}\n(    |\t)*R:{}\n(    |\t)*morphL:\n(    |\t)*V:(\n(    |\t)*\d+->\d+)+\n(    |\t)*E:(\n(    |\t)*\d+ \d+ [A-Z]+->\d+ \d+ [A-Z]+)*\n(    |\t)*morphR:\n(    |\t)*V:(\n(    |\t)*\d+->\d+)+\n(    |\t)*E:(\n(    |\t)*\d+ \d+ [A-Z]+->\d+ \d+ [A-Z]+)*)+".format(graph_re, graph_re, graph_re)

        regex = re.fullmatch(r"T:(?P<T>{})\nrules:(?P<rules>{})".format(graph_re, rule_re), rule, flags=re.DOTALL)

        if regex == None:
            if len(sys.argv) > 1:
                print("There has been an Error parsing the rule.")
                print("The content of the input file does not match the required syntax.")
                exit()
            else:
                messagebox.showerror("", "There has been an Error parsing the rule.")
                raise

        t = regex.group("T").lstrip("\n")
        t = re.compile("(    |\t)").sub("", t)
        t_file = TemporaryFile(mode="w+")
        t_file.write(t)
        t_file.seek(0)

        try:
            t = cores.Graph(parse=t_file)
        except Exception as e:
            if len(sys.argv) > 1:
                print("There has been an Error parsing graph T.")
                print(e)
                exit()
            else:
                messagebox.showerror("", "There has been an Error parsing graph T.\n"+str(e))
                raise

        t.solve()

        self.core_t = t

        self.rules = {}

        for n in re.split(r"\n(?:    |\t)rule", regex.group("rules"))[1:]:
            rule = re.fullmatch(r"(\d+):(\n.*)", n, flags=re.DOTALL)
            self.rules[rule[1]] = Rule(rule[2], rule[1])
Exemplo n.º 44
0
    def ImportChartAccount(self):
        account_type_obj = self.env['account.account.type']
        account_obj = self.env['account.account']
        
         
        if self.select_file and self.data_file:
            if self.select_file == 'csv':
                fileobj = TemporaryFile('w+')
                fileobj.write(base64.decodestring(self.data_file))
                fileobj.seek(0)
                reader = csv.reader(fileobj, delimiter=',', quotechar="'")
                next(reader)
                file_data = reader
            elif self.select_file == 'xls':
                file_datas = base64.decodestring(self.data_file)
                workbook = xlrd.open_workbook(file_contents=file_datas)
                sheet = workbook.sheet_by_index(0)
                result = []
                data = [[sheet.cell_value(r, c) for c in range(sheet.ncols)] for r in range(sheet.nrows)]
                data.pop(0)
                file_data = data
        else:
            raise exceptions.Warning(_('Please select file and type of file'))

        for row in file_data:
            accounts = account_obj.search([('name','=',row[0])], limit=1)
            accounts_type = account_type_obj.search([('name','=',row[1])], limit=1)
            account_id = 0
            account_type_id = 0
            if not accounts: 
                if not accounts_type:
                    acc_type = account_type_obj.create({'name':row[1]})
                    account_type_id = acc_type.id
                else: 
                    account_type_id = accounts_type.id
                    
                accounut_ids = account_obj.create({
                                                    'name':row[0], 
                                                    'user_type_id': account_type_id,
                                                    'code':row[2],
                                                    })
                
            else:
                
                accounts.write({
                                'code':row[2],
                                #'user_type_id': account_type_id,
                                       })
Exemplo n.º 45
0
    def read_featureframe(self, spark):
        """
        Reads a training dataset in hdf5 format from HopsFS

        Args:
            :spark: the spark session

        Returns:
            dataframe with the data of the training dataset

        Raises:
              :TrainingDatasetNotFound: if the requested training dataset could not be found
              :CouldNotConvertDataframe: if the hdf5 dataset could not be converted to a spark dataframe
              :HDF5DatasetFormatNotSupportedForExternalTrainingDatasets: if the user tries to read an
                                                                          external training dataset in the .hdf5 format.
        """
        if self.training_dataset.training_dataset_type == constants.REST_CONFIG.JSON_TRAINING_DATASET_EXTERNAL_TYPE:
            raise HDF5DatasetFormatNotSupportedForExternalTrainingDatasets("The .hdf5 dataset format is not "
                                                                            "supported for external training datasets.")
        if not hdfs.exists(self.path + constants.FEATURE_STORE.TRAINING_DATASET_HDF5_SUFFIX):
            raise TrainingDatasetNotFound("Could not find a training dataset in file {}".format(
                self.path + constants.FEATURE_STORE.TRAINING_DATASET_HDF5_SUFFIX))
        tf = TemporaryFile()
        data = hdfs.load(self.path + constants.FEATURE_STORE.TRAINING_DATASET_HDF5_SUFFIX)
        tf.write(data)
        tf.seek(0)
        hdf5_file = h5py.File(tf)
        np_array = hdf5_file[self.training_dataset.name][()]
        if self.dataframe_type == constants.FEATURE_STORE.DATAFRAME_TYPE_NUMPY:
            return np_array
        if self.dataframe_type == constants.FEATURE_STORE.DATAFRAME_TYPE_PYTHON:
            return np_array.tolist()
        if self.dataframe_type == constants.FEATURE_STORE.DATAFRAME_TYPE_SPARK \
                or self.dataframe_type == constants.FEATURE_STORE.DATAFRAME_TYPE_PANDAS:
            if np_array.ndim != 2:
                raise CouldNotConvertDataframe(
                    "Cannot convert numpy array that do not have two dimensions to a dataframe. "
                    "The number of dimensions are: {}".format(
                        np_array.ndim))
            num_cols = np_array.shape[1]
            dataframe_dict = {}
            for n_col in list(range(num_cols)):
                col_name = "col_" + str(n_col)
                dataframe_dict[col_name] = np_array[:, n_col]
            pandas_df = pd.DataFrame(dataframe_dict)
            sc = spark.sparkContext
            sql_context = SQLContext(sc)
            return fs_utils._return_dataframe_type(sql_context.createDataFrame(pandas_df), self.dataframe_type)
Exemplo n.º 46
0
class Temporary:
    """Temporary file"""
    def __init__(self, filename):
        log.debug("Start uploading file: %s", filename)
        self.uploaded = 0
        self.__hash = sha256()
        # pylint: disable=consider-using-with
        self.__file = TemporaryFile('wb+')

    def write(self, data):
        """Only count uploaded data size."""
        size = self.__file.write(data)
        self.__hash.update(data)
        self.uploaded += size
        return size

    def seek(self, size):
        """Proxy to internal file object seek method."""
        return self.__file.seek(size)

    def read(self, size):
        """Proxy to internal file object read method."""
        return self.__file.seek(size)

    def close(self):
        """Proxy to internal file object close method."""
        return self.__file.close()

    def hexdigest(self):
        """Return sha256 hexdigest of file."""
        return self.__hash.hexdigest()
Exemplo n.º 47
0
    def _check(self):
        if self._is_file:
            return

        pos = self._stream.tell()
        if pos <= self._threshold:
            return

        stream = TemporaryFile('wb+')
        stream.write(self._stream.getvalue())
        stream.flush()
        stream.seek(pos)

        self._stream.close()
        self._stream = stream
        self._is_file = True
Exemplo n.º 48
0
    def _append_content_to_body(_content_disposition: str, _content_type: str,
                                _content_data):
        _content_disposition = parse_header(_content_disposition[20:])
        if "filename" in _content_disposition[1]:
            tmp_file = TemporaryFile()
            tmp_file.write(_content_data)
            tmp_file.seek(0)

            field = FormFileField(_content_disposition[1]["name"], tmp_file,
                                  _content_type[14:].lower(),
                                  _content_disposition[1]["filename"])
        else:
            field = FormField(_content_disposition[1]["name"],
                              _content_data.decode(encoding))

        body.append(field)
Exemplo n.º 49
0
    def testSetStreamAsInput(self, size, getter, code):
        expected = '*' * 1024 * size + '!'
        expectedLength = 1024 * size + 1

        stream = TemporaryFile()
        stream.write(expected)
        stream.seek(0)

        proc = Process("python -c '{0}'".format(code))
        proc.setInput(stream)
        proc.run()

        stream.close()

        self.assertEqual(len(getattr(proc, getter)()), expectedLength)
        self.assertEqual(proc.getExitCode(), 0)
Exemplo n.º 50
0
 def process_bind_param(self, file, dialect):
     if file:
         filename = get_unique_filename(file.filename, self.upload_to)
         # https://github.com/boto/boto3/issues/929
         # https://github.com/matthewwithanm/django-imagekit/issues/391
         temp_file = TemporaryFile()
         temp_file.write(file.read())
         temp_file.seek(0)
         self.storage.write(temp_file, filename)
         print(self.variations)
         if self.variations:
             [
                 i for i in process_thumbnail(file, filename,
                                              self.variations, self.storage)
             ]
         return filename
Exemplo n.º 51
0
    def __iter__(self):
        if self._filelist.__len__() == 0:
            raise AssertionError("TarStream can not find target file.")

        # 3. Null Block 추가
        for i in range(0, 2):
            nullBlockFile = TemporaryFile()
            nullBlockFile.write(bytes(TarStream.BLOCK_SIZE))
            nullBlockFile.seek(0)
            self._filelist.append(nullBlockFile)

        self._iter_file = self._filelist.__iter__()
        self._currentFile = self._iter_file.__next__()
        self._EOS = False

        return self
Exemplo n.º 52
0
    def barcode_upload(self):
        print "fffffffffffffffffffffffff"
        # print self
        dataa = base64.b64decode(self.data)
        xls_filelike = io.BytesIO(dataa)
        workbook = openpyxl.load_workbook(xls_filelike)
        print "ddddddddd", xls_filelike
        split_rec = [xls_filelike.split('\n')][0]
        print split_rec
        for line in split_rec[1::]:
            field_name = [line.split(',')][0]
            print field_name

        fileobj = TemporaryFile('w+')
        fileobj.write(base64.decodestring('data'))
        return
Exemplo n.º 53
0
 def _prepare_file(self, filename='test.bin', type_='text/plain'):
     fp = TemporaryFile('w+b')
     fp.write('\x00' + 'x' * (1 << 19))
     fp.seek(0)
     env = {'REQUEST_METHOD': 'POST'}
     headers = {
         'Content-Type': type_,
         'content-length': 1 << 19,
         'content-disposition': 'attachment; filename=%s' % filename
     }
     fs = FieldStorage(fp=fp, environ=env, headers=headers)
     fu = FileUpload(fs)
     fu.read = fp.read
     fu.seek = fp.seek
     fu.tell = fp.tell
     return fu
Exemplo n.º 54
0
 def aplay(self, file_=BC.OUTPUT_MP3_NAME, is_buffer=False):
     if is_buffer:
         temp = TemporaryFile()
         temp.write(file_)
         temp.seek(0)
         p = Popen(['play', '-'], stdin=temp)
         temp.close()
     else:
         if platform.system() == 'Darwin':
             player = 'play'
         elif platform.system() == 'Linux':
             player = 'mpg321'
         cmd = '%s %s 1>/dev/null 2>/dev/null &' % (player, file_)
         p = Popen(cmd, shell=True, stdout=PIPE)
         # self.active_process.put({'play': p})
     p.wait()
Exemplo n.º 55
0
 def _get_stdin(self, stdin):
     if not is_string(stdin):
         return stdin
     if stdin.upper() == 'NONE':
         return None
     if stdin == 'PIPE':
         return subprocess.PIPE
     path = os.path.normpath(os.path.join(self.cwd, stdin))
     if os.path.isfile(path):
         return open(path)
     stdin_file = TemporaryFile()
     if is_string(stdin):
         stdin = console_encode(stdin, self.output_encoding, force=True)
     stdin_file.write(stdin)
     stdin_file.seek(0)
     return stdin_file
Exemplo n.º 56
0
class PassphraseFile(object):
    def __init__(self, passphrase):
        self.passphrase = passphrase.encode(
            'utf-8') if type(passphrase) != bytes else passphrase
        self.file = TemporaryFile()

    def __enter__(self):
        self.file.write(self.passphrase)
        self.file.flush()
        return self.name()

    def __exit__(self, type, value, traceback):
        self.file.close()

    def name(self):
        return '/proc/%d/fd/%d' % (getpid(), self.file.fileno())
Exemplo n.º 57
0
    def test_new_media_object(self):
        file_ = TemporaryFile()
        file_.write('My test content')
        file_.seek(0)
        media = {'name': 'zinnia_test_file.txt',
                 'type': 'text/plain',
                 'bits': Binary(file_.read())}
        file_.close()

        self.assertRaises(Fault, self.server.metaWeblog.newMediaObject,
                          1, 'contributor', 'password', media)
        new_media = self.server.metaWeblog.newMediaObject(
            1, 'webmaster', 'password', media)
        self.assertTrue('/zinnia_test_file' in new_media['url'])
        default_storage.delete('/'.join([
            UPLOAD_TO, new_media['url'].split('/')[-1]]))
Exemplo n.º 58
0
    def _import_gnucash(self, cr, uid, data, context):
        form = data['form']
        fileobj = TemporaryFile('w+')
        fileobj.write(base64.decodestring(form['data']))

        # now we determine the file format
        fileobj.seek(0)
        try:
            gch = GCHandler(cr, uid)
            gch.def_book = form['account']
            handler = gnccontent.GCContent(gch)
            sax.parse(fileobj, handler)
        except sax._exceptions.SAXParseException, exc:
            raise Exception(
                "Parse exception: %s at %d,%d\n" %
                (exc.getMessage(), exc.getLineNumber(), exc.getColumnNumber()))
Exemplo n.º 59
0
 def _append_content_to_body(raw_content_disposition: str,
                             _content_type: str, _content_data) -> None:
     parsed_content_disposition: Tuple[str, Dict[str, str]] = parse_header(
         raw_content_disposition[20:])
     if "filename" in parsed_content_disposition[1]:
         tmp_file = TemporaryFile()
         tmp_file.write(_content_data)
         tmp_file.seek(0)
         body[parsed_content_disposition[1]["name"]] = UploadedFile(
             tmp_file,
             _content_type[14:].lower(),
             parsed_content_disposition[1]["filename"],
         )
     else:
         body[parsed_content_disposition[1]["name"]] = _content_data.decode(
             encoding)
Exemplo n.º 60
0
def main(argv):
    args = docopt(__doc__, argv=argv)

    if args['--verbose'] and not args['--debug']:
        stdout.write('getting item: {0}\n'.format(args['<identifier>']))

    upload_kwargs = dict(metadata=get_args_dict(args['--metadata']),
                         headers=get_args_dict(args['--header']),
                         debug=args['--debug'],
                         queue_derive=args['--no-derive'],
                         ignore_bucket=args['--ignore-bucket'],
                         verbose=args['--verbose'])

    # Upload stdin.
    if args['<file>'] == ['-'] and not args['-']:
        stderr.write('--remote-name is required when uploading from stdin.\n')
        call(['ia', 'upload', '--help'])
        exit(1)
    if args['-']:
        local_file = TemporaryFile()
        local_file.write(stdin.read())
        local_file.seek(0)
        upload_kwargs['remote_name'] = args['--remote-name']
    # Upload files.
    else:
        local_file = args['<file>']

    response = upload(args['<identifier>'], local_file, **upload_kwargs)

    if args['--debug']:
        for i, r in enumerate(response):
            if i != 0:
                stdout.write('---\n')
            headers = '\n'.join(
                [' {0}: {1}'.format(k, v) for (k, v) in r.headers.items()])
            stdout.write('Endpoint:\n {0}\n\n'.format(r.url))
            stdout.write('HTTP Headers:\n{0}\n'.format(headers))
    else:
        for resp in response:
            if resp.status_code == 200:
                continue
            error = parseString(resp.content)
            code = get_xml_text(error.getElementsByTagName('Code'))
            msg = get_xml_text(error.getElementsByTagName('Message'))
            stderr.write('error "{0}" ({1}): {2}\n'.format(
                code, resp.status_code, msg))
            exit(1)