def test_dump(self):

        cbd1 = CitiBikeData(source_url=self.test_data_url)
        self.assert_data_loaded(cbd1)

        js = StringIO()
        cbd1.dump(js)
        self.assert_data_loaded(cbd1)
        self.assertGreater(len(js.getvalue()), 0)
        js.reset()

        cbd2 = CitiBikeData(load_on_init=False)
        self.assert_data_not_loaded(cbd2)
        cbd2.load(js)
        self.assert_data_loaded(cbd2)
        self.assertDictEqual(cbd1.json, cbd2.json)

        ntf = NamedTemporaryFile(delete=False)
        cbd1.dump(ntf)
        self.assert_data_loaded(cbd1)
        self.assertGreater(len(js.getvalue()), 0)
        ntf.close()

        cbd3 = CitiBikeData(source_url="file:"+ntf.name)
        self.assert_data_loaded(cbd3)
        self.assertDictEqual(cbd1.json, cbd3.json)
        ntf.unlink(ntf.name)  # delete file
Example #2
0
def gen_logic(M, gen_2X=True, taps_per_phase=taps_per_phase, sample_width=18):

    from cStringIO import StringIO
    import shutil

    plt.close('all')

    chan = Channelizer(M=M,
                       gen_2X=gen_2X,
                       taps_per_phase=taps_per_phase,
                       desired_msb=pfb_msb,
                       qvec=qvec,
                       qvec_coef=qvec_coef)
    gen_taps(M, gen_2X, taps_per_phase)

    print("Filter MSB = {}".format(chan.filter.msb))

    c_str = StringIO()
    c_str.reset()
    # store c_str to file.
    with open('../verilog/file.xml', 'w') as fh:
        c_str.seek(0)
        shutil.copyfileobj(c_str, fh)

    # generate half-band filter
    fil_obj = LPFilter(num_taps=40, half_band=True)
    fil_obj.gen_fixed_filter(coe_file=ip_path + '/hb_fil/hb_fil.coe')

    print("HB Filter MSB = {}".format(fil_obj.msb))

    fil_obj.plot_psd()
Example #3
0
def writetodb(route_table, x_true, x_est, flow):
    cursor = connection.cursor()
    x_true = flatten(x_true)
    x_est = flatten(x_est)
    flow = flatten(flow)

    sql_query = '''
    DROP TABLE IF EXISTS visualization_routes;
    CREATE TABLE visualization_routes
    (
    o int,
    d int,
    od_route_index int,
    x_true float,
    x_est float,
    flow float
    )
    '''

    cursor.execute(sql_query)

    stringIO = StringIO()
    for (o,d, index), x_t, x_e, f in zip(route_table, x_true, x_est, flow):
        stringIO.write('\t'.join(map(str, [o, d, index, x_t, x_e, f])))
        stringIO.write('\n')
    stringIO.reset()
    cursor.copy_from(stringIO, 'visualization_routes')
Example #4
0
def generate_ldif_from_list(action, array):
    """generate ldif string by array
    Parameters:
        array: a list contains several dicts which contains user or group info.
    """
    if isinstance(array, list):
        output = StringIO()
        w = LDIFWriter(output)
        for a in array:
            if a.has_key('dn'):
                dn = a.pop('dn')
                for k, v in a.iteritems():
                    if not isinstance(v, list):
                        a[k] = [v]

                w.unparse(dn, a)
            else:
                logger.error('the element of ldif does not have "dn": %s', a)


        output.reset()
        r = output.read()
        output.close()

        return r
Example #5
0
class LogWrapper(object):
    def setUp(self, level=log.INFO, encoding='utf-8'):
        self.f = StringIO()
        self.flo = log.CrawlmiFileLogObserver(self.f, level, encoding)
        self.flo.start()

    def tearDown(self):
        self.flo.stop()

    def clear(self):
        self.f.reset()
        self.f.truncate()

    def get_logged(self, clear=True):
        logged = self.f.getvalue()
        if clear:
            self.clear()
        return logged

    def get_lines(self, strip=True, clear=True):
        lines = self.get_logged(clear=clear).splitlines()
        if strip:
            lines = map(lambda l: l.strip()[25:], lines)
        return lines

    def get_first_line(self, strip=True, clear=True):
        lines = self.get_lines(strip=strip, clear=clear)
        return lines[0] if lines else ''
Example #6
0
def test_devectorize_axes():
    np.random.seed(0)

    x, y = np.random.random((2, 1000))

    # save vectorized version
    fig = plt.figure()
    ax = fig.add_subplot(111)
    ax.scatter(x, y)
    sio = StringIO()
    fig.savefig(sio)
    sio.reset()
    im1 = image.imread(sio)
    plt.close()

    # save devectorized version
    fig = plt.figure()
    ax = fig.add_subplot(111)
    ax.scatter(x, y)
    devectorize_axes(ax, dpi=200)
    sio = StringIO()
    fig.savefig(sio)
    sio.reset()
    im2 = image.imread(sio)
    plt.close()

    assert_(im1.shape == im2.shape)
    assert_((im1 != im2).sum() < 0.1 * im1.size)
    def test_error_when_getinfo_false(self):

        # Command.process should complain if supports_getinfo == False
        # We support dynamic configuration, not static

        # The exception line number may change, so we're using a regex match instead
        expected_pattern = re.compile(
            '\r\n'
            'ERROR' \
            '\r\n' \
            '"NotImplementedError at ' \
            '\"\"' + \
            os.path.abspath(os.path.join(os.path.dirname(__file__),
                                         "../../splunklib/searchcommands/search_command.py")) + \
            '\"\"' \
            ', line \d\d\d : ' \
            'Command search appears to be statically configured and static configuration is unsupported by splunklib.searchcommands. Please ensure that default/commands.conf contains this stanza:\n\[search\]\nfilename = foo.py\nsupports_getinfo = true\nsupports_rawargs = true\noutputheader = true"' \
            '\r\n'
        )

        command = SearchCommand()
        result = StringIO()

        self.assertRaises(
            SystemExit, command.process, ['foo.py'], output_file=result)

        result.reset()
        observed = result.read()
        self.assertTrue(expected_pattern.match(observed))
Example #8
0
	def jardestroyer(self):
		"""
		disconnection helper function
		
		For all connections established in jarbuilder send the
		disconnect command to the server
		
		:returns: server response (empty string if successful)
		"""
		c = pycurl.Curl()
		buf = SIO()
		uri = uri = '{base}/JSESSION'.format(base=self.page)
		c.setopt(pycurl.URL, uri)
		body = ''
		for cookie in self.cj:
			print "closing cxn: %s"%cookie
			c.setopt(pycurl.COOKIE, "JSESSIONID=%s"%cookie)
			c.setopt(pycurl.SSLVERSION, pycurl.SSLVERSION_SSLv3)
			c.setopt(pycurl.CUSTOMREQUEST, 'DELETE')
			c.setopt(pycurl.WRITEDATA, buf)
			c.perform()
			buf.truncate()
			body+=buf.getvalue()
			buf.reset()
		for c in self.h:
			if c.buf is not None:
				c.buf.close()
				c.buf = None
			c.close()
		self.m.close()
		
		return body
Example #9
0
    def format(self, p):
        # prepare an input buffer, containing the content of the template
        f = open(path.join(XMLFormatter.TemplateDir, 'template.xml'))
        i = StringIO(f.read())
        f.close()
        # prepare an output buffer to write the generated XML to
        o = StringIO()

        sgml = SGMLTemplate({ 'report': self.report, 'asset_path': path.basename(p + '_files') }, ouf=o)
        # use the SGMLTemplate to generate the formatted report
        sgml.xcopy(i)

        o.reset()
        # write the contents of the output buffer to file, using ui.write_file()
        # to allow the output path to be dynamically changed
        self.write_file(p, o.read())
        # create a directory to write the various assets and linked files to
        self.create_directory(p + '_files')
        # copy any static assets into the output directory
        for asset in XMLFormatter.Assets:
            self.copy_file(path.join(XMLFormatter.TemplateDir, asset), path.join(p + '_files', asset))
        # write the logs and pcap data for each result to file
        for suite in self.report.results():
            for result in suite.results():
                self.create_directory(path.join(p + '_files', str(id(result))))

                # write the test case log file
                self.write_file(path.join(p + '_files', str(id(result)), "veripy.log"), result.outcome.log)
                # write the network pcap data into a log file
                for (link, pcap) in result.outcome.network_dumps:
                    if len(pcap) > 0:
                        wrpcap(path.join(p + '_files', str(id(result)), "link-%s.pcap" % link), pcap)
Example #10
0
File: aemo.py Project: Bengt/windml
    def download(self, location, urlstr):
        fileh = file(location, "w")

        num_units = 40

        fhandle = urllib2.urlopen(urlstr)

        total_size = int(fhandle.info().getheader('Content-Length').strip())
        chunk_size = total_size / num_units

        print "Downloading %s" % urlstr
        nchunks = 0
        buf = StringIO()
        total_size_str = self.bytes_to_string(total_size)

        while True:
            next_chunk = fhandle.read(chunk_size)
            nchunks += 1

            if next_chunk:
                buf.write(next_chunk)
                s = ('[' + nchunks * '='
                     + (num_units - 1 - nchunks) * ' '
                     + ']  %s / %s   \r' % (self.bytes_to_string(buf.tell()),
                                            total_size_str))
            else:
                sys.stdout.write('\n')
                break

            sys.stdout.write(s)
            sys.stdout.flush()

        buf.reset()
        fileh.write(buf.getvalue())
        fileh.close()
Example #11
0
def sanitize_metadata(stream, content_type, strip_metadata):
    text_plain = content_type == 'text/plain'

    s = None
    t = None
    clean_file = False

    if strip_metadata and not text_plain:
        t = tempfile.NamedTemporaryFile(delete = False)
        copyfileobj(stream, t)
        t.flush()
        file_meta = metadata_handler(t.name)

        if not file_meta.is_clean():
            file_meta.remove_all()
            f = open(t.name)
            s = StringIO()
            s.write(f.read())
            f.close()
            s.reset()
            secure_unlink(t.name, do_verify = False)
            t.close()
        else:
            secure_unlink(t.name, do_verify = False)
            t.close()

    return s
Example #12
0
def test_append_single_chunk():
    orig, new, dcmp = StringIO(), StringIO(), StringIO()
    create_array_fp(1, new)
    new_size = new.tell()
    new.reset()
    chunking = calculate_nchunks(new_size, chunk_size=new_size)
    source = PlainFPSource(new)
    sink = CompressedFPSink(orig)
    pack(source, sink, *chunking)
    orig.reset()
    new.reset()

    # append a single chunk
    reset_append_fp(orig, new, new_size)
    bloscpack_header = reset_read_beginning(orig)[0]
    nt.assert_equal(bloscpack_header['nchunks'], 2)

    # append a large content, that amounts to two chunks
    new_content = new.read()
    new.reset()
    reset_append_fp(orig, StringIO(new_content * 2), new_size * 2)
    bloscpack_header = reset_read_beginning(orig)[0]
    nt.assert_equal(bloscpack_header['nchunks'], 4)

    # append half a chunk
    reset_append_fp(orig, StringIO(new_content[:len(new_content)]), new_size//2)
    bloscpack_header = reset_read_beginning(orig)[0]
    nt.assert_equal(bloscpack_header['nchunks'], 5)

    # append a few bytes
    reset_append_fp(orig, StringIO(new_content[:1023]), 1024)
    # make sure it is squashed into the lat chunk
    bloscpack_header = reset_read_beginning(orig)[0]
    nt.assert_equal(bloscpack_header['nchunks'], 5)
    def test_exit_error_on_parser_error(self):
        # Command.process should produce an error message and exit on parser
        # errors, if invoked to execute. Same error message as expected_pattern
        # defined above

        expected_pattern = re.compile(
            '\r\n' \
            'ERROR\r\n' \
            '"ValueError at ""' + \
            os.path.abspath(os.path.join(os.path.dirname(__file__),
                                         "../../splunklib/searchcommands/search_command_internals.py")) + \
            '"", line \d\d\d : ' \
            'Unrecognized option: undefined_option = value"\r\n'
        )

        command = SearchCommand()
        result = StringIO()

        try:
            command.process(
                args=['foo.py', '__EXECUTE__', 'undefined_option=value'],
                input_file=StringIO('\r\n'), output_file=result)
        except SystemExit as e:
            result.reset()
            observed = result.read()
            self.assertNotEqual(e.code, 0)
            self.assertTrue(expected_pattern.match(observed))
        except BaseException as e:
            self.fail("Expected SystemExit, but caught %s" % type(e))
        else:
            self.fail("Expected SystemExit, but no exception was raised")
def encode_multipart_formdata(fields={}, files=[]):
    """
    fields is a sequence of (name, value) elements for regular form fields.
    files is a sequence of (name, filename, value) elements for data to be
    uploaded as files Return (content_type, body) ready for pycurl instance.
    """

    BOUNDARY = get_alpha_nonce(32)
    CRLF = "\r\n"
    buf = StringIO()

    # if it's dict like then use the items method to get the fields
    if hasattr(fields, "items"):
        fields = fields.items()

    for (key, value) in fields:
        buf.write("--%s%s" % (BOUNDARY, CRLF))
        buf.write('Content-Disposition: form-data; name="%s"%s' % (key, CRLF))
        buf.write(CRLF)
        buf.write("%s%s" % (value, CRLF))

    for (name, filename, value) in files:
        buf.write("--%s%s" % (BOUNDARY, CRLF))
        buf.write('Content-Disposition: form-data; name="%s"; filename="%s"%s' % (name, filename, CRLF))
        buf.write("Content-Type: %s%s" % (get_content_type(filename), CRLF))
        buf.write(CRLF)
        buf.write("%s%s" % (value, CRLF))

    buf.write("--%s--%s" % (BOUNDARY, CRLF))
    buf.write(CRLF)
    buf.reset()

    content_type = "multipart/form-data; boundary=%s" % BOUNDARY

    return content_type, buf.getvalue()
Example #15
0
def format_association_end(el):
    """
    Format association end.
    """
    name = ''
    n = StringIO()
    if el.name:
        n.write(vis_map[el.visibility])
        n.write(' ')
        if el.isDerived:
            n.write('/')
        if el.name:
            n.write(el.name)
        n.reset()
        name = n.read()

    m = StringIO()
    if el.upperValue:
        if el.lowerValue:
            m.write('%s..%s' % (el.lowerValue, el.upperValue))
        else:
            m.write('%s' % el.upperValue)

    slots = []
    for slot in el.appliedStereotype[:].slot:
        if slot:
            slots.append('%s=%s' % (slot.definingFeature.name, slot.value))
    if slots:
        m.write(' { %s }' % ',\n'.join(slots))
    m.reset()
    mult = m.read()

    return name, mult
Example #16
0
  def make_thumb(self,width=0):
    "creates and stores a jpeg thumbnail of the image"
#    print "making thumb for: ",self.uid
    im=open(self.file_loc())
    x=pim.open(im)
#    print 'x.size:',x.size[1], x.size[0]
    size=width or safeint(self.get_pref('thumb_size'))
#    print ">>>>>>>>>>>>>>>>>>>>> creating thumb:",size
#    print ">>>>>>>>>>>>>>>>>>>>> thumb_size:",self.get_pref('thumb_size')
#    print ">>>>>>>>>>>>>>>>>>",size, x.size[1], x.size[0]
    try:
      x.thumbnail((size,size * x.size[1] / x.size[0]),pim.ANTIALIAS)
    except Exception as e:
      print "ERROR MAKING THUMBNAIL for %s:%s" % (self.uid,e) 
    f = StringIO()
#    extension=self.get_extension().upper()
#    x.convert(extension!='GIF' and 'RGB' or None).save(f,extension) #this creates thumbs of correct type but the non-jpg ones can be much bigger than the originals!
    x.convert('RGB').save(f,'JPEG')
    f.reset()
    self.save_file(f.read(),self.thumb_folder(),self.thumb_name())
    # update thumb width and height in stage 
    data=self.get_stage_data()
    data['thumb_width'],data['thumb_height']=x.size
    self.set_stage(data)
    self.flush()
Example #17
0
 def data_from_transient_file(self, filename):
     full_file = self.zfile.read(filename)
     data_string = '\n'.join(full_file.split('\r\n')[13:-2])
     data_string_file = StringIO()
     data_string_file.write(data_string)
     data_string_file.reset()
     return np.loadtxt(data_string_file, delimiter=',').T
Example #18
0
    def __str__(self):
        root = ET.Element('martif',{'type':'TBX',
                                    'xml:lang': self.sourceLanguage})
        martifHeader = ET.SubElement(root,'martifHeader')
        fileDesc = ET.SubElement(martifHeader,'fileDesc')
        sourceDesc = ET.SubElement(fileDesc,'sourceDesc')
        sourceDescP = ET.SubElement(sourceDesc,'p')
        sourceDescP.text = self.sourceDesc

        encoding = ET.SubElement(martifHeader,'encodingDesc')
        encodingP = ET.SubElement(encoding,'p',{'type': 'XCSURI'})
        text = ET.SubElement(root,"text")
        body = ET.SubElement(text,"body")

        eid = 1
        for unit in self.units:
            unit.element.set("id","term-entry-%d" % eid)
            body.append(unit.element)
            eid+=1
        xml = ET.ElementTree(root)
        f = StringIO()
        #xml.write(f,encoding='utf-8')
        xml.write(f)
        f.reset()
        return f.read()
Example #19
0
def get_record(fasta_file, n):
    """
    Process the n-th record from the fasta file.
    @param n: Number record to process from fasta. Starts with 1.
    @deprecated: Not based on N records, but N fasta files
    """
    raise Exception("NO MORE")
    i = 0
    i = int(2*n)
    cmd = "head -%i %s" % (i,fasta_file)
    out,err = subprocess.Popen(cmd,shell=True,stdout=subprocess.PIPE).communicate()
    io = StringIO()
    #io.write(out)
    with open(fasta_file) as handle:
        for line in handle:
            i+=1
            if n*2==i or n*2-1==i:
                io.write(line)
            if i>n*2:
                break

    #cmd = "head -%i %s | tail -2" % (i,fasta_file)
    #out,err = subprocess.Popen(cmd,shell=True,stdout=subprocess.PIPE).communicate()
    #_debug(out)
    # Flush and point to beginning of handle for reading
    io.flush()
    io.reset()
    _debug(io.getvalue())
    records = list(SeqIO.parse(io, "fasta"))
    assert len(records)==1
    return records[0]
Example #20
0
class TestWrappObserver(object):
    def setup(self):
        self.out = StringIO()
        start_logging(self.out)
        self.log = Logger()

    def test_debug(self):
        self.log.debug('Hello!')
        self.assert_output('DEBUG {"level": "debug", "msg": "Hello!", "namespace": "tests"}\n')

    def test_info(self):
        self.log.info('Hello!')
        self.assert_output('INFO {"level": "info", "msg": "Hello!", "namespace": "tests"}\n')

    def test_warning(self):
        self.log.warning('Hello!')
        self.assert_output('WARNING {"level": "warning", "msg": "Hello!", "namespace": "tests"}\n')

    def test_error(self):
        self.log.error('Hello!')
        self.assert_output('ERROR {"level": "error", "msg": "Hello!", "namespace": "tests"}\n')

    def get_output(self):
        self.out.reset()
        return self.out.read()

    def assert_output(self, expected):
        actual = self.get_output()
        assert actual == expected, repr(actual)
class TestPrefilterFrontEnd(PrefilterFrontEnd):
    
    input_prompt_template = string.Template('')
    output_prompt_template = string.Template('')
    banner = ''

    def __init__(self):
        ipython0 = get_ipython0().IP
        self.out = StringIO()
        PrefilterFrontEnd.__init__(self, ipython0=ipython0)
        # Clean up the namespace for isolation between tests
        user_ns = self.ipython0.user_ns
        # We need to keep references to things so that they don't
        # get garbage collected (this stinks).
        self.shadow_ns = dict()
        for i in self.ipython0.magic_who_ls():
            self.shadow_ns[i] = user_ns.pop(i)
        # Some more code for isolation (yeah, crazy)
        self._on_enter()
        self.out.flush()
        self.out.reset()
        self.out.truncate()

    def write(self, string, *args, **kwargs):
       self.out.write(string) 

    def _on_enter(self):
        self.input_buffer += '\n'
        PrefilterFrontEnd._on_enter(self)
Example #22
0
def is_valid_image(supposed_image):
    """Verifies that the file (like) object is a valid image"""
    # We need to get a file object for PIL. We might have a path or we might
    # have to read the data into memory.
    if hasattr(supposed_image, 'temporary_file_path'):
        file = supposed_image.temporary_file_path()
    else:
        if hasattr(supposed_image, 'read'):
            file = StringIO(supposed_image.read())
        else:
            file = StringIO(supposed_image['content'])

    try:
        # load() is the only method that can spot a truncated JPEG,
        #  but it cannot be called sanely after verify()
        trial_image = Image.open(file)
        trial_image.load()

        # Since we're about to use the file again we have to reset the
        # file object if possible.
        if hasattr(file, 'reset'):
            file.reset()

        # verify() is the only method that can spot a corrupt PNG,
        #  but it must be called immediately after the constructor
        trial_image = Image.open(file)
        trial_image.verify()
    except Exception: # Python Imaging Library doesn't recognize it as an image
        return False
    if hasattr(supposed_image, 'seek') and callable(supposed_image.seek):
        supposed_image.seek(0)
    
    return True
Example #23
0
class LogWrapper(object):
    def setUp(self, level=log.INFO, encoding='utf-8'):
        self.f = StringIO()
        self.flo = log.CrawlmiFileLogObserver(self.f, level, encoding)
        self.flo.start()

    def tearDown(self):
        self.flo.stop()

    def clear(self):
        self.f.reset()
        self.f.truncate()

    def get_logged(self, clear=True):
        logged = self.f.getvalue()
        if clear:
            self.clear()
        return logged

    def get_lines(self, strip=True, clear=True):
        lines = self.get_logged(clear=clear).splitlines()
        if strip:
            lines = map(lambda l: l.strip()[25:], lines)
        return lines

    def get_first_line(self, strip=True, clear=True):
        lines = self.get_lines(strip=strip, clear=clear)
        return lines[0] if lines else ''
Example #24
0
 def do_test(to_write):
   buf = StringIO()
   writable.write_vint(buf, to_write)
   buf.reset()
   print "encoded ", to_write, ": ", repr(buf.getvalue())
   readback = writable.read_vint(buf)
   self.assertEqual(to_write, readback)
Example #25
0
    def clean(self, data, initial=None):
        """
        Checks that the file-upload field data contains a valid image (GIF, JPG,
        PNG, possibly others -- whatever the Python Imaging Library supports).
        """
        f = super(ImageField, self).clean(data, initial)
        if f is None:
            return None
        elif not data and initial:
            return initial

        if data.name.split(".")[-1].lower() not in self.valid_file_extensions:
            raise forms.ValidationError(
                self.error_messages['invalid_extension'],
                )
            
        if data.size > self.max_file_size:
            raise forms.ValidationError(self.error_messages['too_large_file'])

        from PIL import Image

        # We need to get a file object for PIL. We might have a path or we might
        # have to read the data into memory.
        if hasattr(data, 'temporary_file_path'):
            file = data.temporary_file_path()
        else:
            if hasattr(data, 'read'):
                file = StringIO(data.read())
            else:
                file = StringIO(data['content'])

        try:
            # load() is the only method that can spot a truncated JPEG,
            #  but it cannot be called sanely after verify()
            trial_image = Image.open(file)
            trial_image.load()
            # Since we're about to use the file again we have to reset the
            # file object if possible.
            if hasattr(file, 'reset'):
                file.reset()

            # verify() is the only method that can spot a corrupt PNG,
            #  but it must be called immediately after the constructor
            trial_image = Image.open(file)
            trial_image.verify()
        except:
            raise forms.ValidationError(self.error_messages['invalid_image'])
        else:
            width, height = trial_image.size
            if width < self.min_dimensions[0] or height < self.min_dimensions[1]:
                raise forms.ValidationError(
                    self.error_messages['too_small_dimensions'],
                    )
            if 1.0 * max(width, height) / min(width, height) > self.max_ratio:
                raise forms.ValidationError(
                    self.error_messages['too_large_ratio'],
                    )
        if hasattr(data, 'seek') and callable(data.seek):
            data.seek(0)
        return data
Example #26
0
def string2file(raw_string):
    """The function return a file like object contaiing the given string.
    """
    filelike = StringIO()
    filelike.write(raw_string)
    filelike.reset()
    return filelike
Example #27
0
class DiscoZipFile(ZipFile, object):
    def __init__(self):
        self.buffer = StringIO()
        super(DiscoZipFile, self).__init__(self.buffer, 'w', ZIP_DEFLATED)

    def writepath(self, pathname, exclude=()):
        for file in files(pathname):
            name, ext = os.path.splitext(file)
            if ext not in exclude:
                self.write(file, file)

    def writemodule(self, module, arcname=None):
        if isinstance(module, basestring):
            module = __import__(module)
        self.write(getsourcefile(module), arcname=arcname)

    def writesource(self, object):
        self.writepath(getsourcefile(getmodule(object)))

    def dump(self, handle):
        handle.write(self.dumps())

    def dumps(self):
        self.buffer.reset()
        return self.buffer.read()
    def run(self):
        if self.options.zenpack:
            self.zenpack = self.dmd.ZenPackManager.packs._getOb(
                self.options.zenpack, None)
            if not self.zenpack:
                print "%s is not a valid Zenpack. Exiting...." % self.options.zenpack
                sys.exit(1)

        packables = eliminateDuplicates(self.zenpack.packables())
        for obj in packables:
            deviceClasses = []
            if isinstance(obj, DeviceClass):
                deviceClasses = obj.getSubOrganizers()
                deviceClasses.insert(0, obj)

            for dc in deviceClasses:
                xml = StringIO()
                dc.exportXmlProperties(xml)
                xml.reset()
                xmldoc = "<obj>%s</obj>" % xml.read().strip()
                tree = etree.parse(StringIO(xmldoc))
                dc = IInfo(dc)
                print dc.uid
                import pdb
                pdb.set_trace()
                for obj in tree.xpath('//property[@type="lines"]')
Example #29
0
def package_project(name, spiders, repo=None, branch='master'):
    zbuff = StringIO()
    spider_paths = ['spiders/%s.json' % spider for spider in spiders]
    files = list(set(spider_paths) | REQUIRED_FILES)
    _archive_project(name, zbuff, files, repo, branch, ignore_deleted=True)
    zbuff.reset()
    return zbuff
Example #30
0
def test_append_into_last_chunk():
    # first create an array with a single chunk
    orig, new, dcmp = StringIO(), StringIO(), StringIO()
    create_array_fp(1, new)
    new_size = new.tell()
    new.reset()
    chunking = calculate_nchunks(new_size, chunk_size=new_size)
    source = PlainFPSource(new)
    sink = CompressedFPSink(orig)
    pack(source, sink, *chunking)
    orig.reset()
    new.reset()
    # append a few bytes, creating a new, smaller, last_chunk
    new_content = new.read()
    new.reset()
    nchunks = reset_append_fp(orig, StringIO(new_content[:1023]), 1023)
    bloscpack_header = reset_read_beginning(orig)[0]
    nt.assert_equal(nchunks, 1)
    nt.assert_equal(bloscpack_header['last_chunk'], 1023)
    # now append into that last chunk
    nchunks = reset_append_fp(orig, StringIO(new_content[:1023]), 1023)
    bloscpack_header = reset_read_beginning(orig)[0]
    nt.assert_equal(nchunks, 0)
    nt.assert_equal(bloscpack_header['last_chunk'], 2046)

    # now check by unpacking
    source = CompressedFPSource(orig)
    sink = PlainFPSink(dcmp)
    unpack(source, sink)
    dcmp.reset()
    new.reset()
    new_str = new.read()
    dcmp_str = dcmp.read()
    nt.assert_equal(len(dcmp_str), len(new_str) + 2046)
    nt.assert_equal(dcmp_str, new_str + new_str[:1023] * 2)
def test_write_words_underscore():
    cfg = conf.ceph.CephConf()
    cfg.add_section('foo')
    cfg.set('foo', 'bar thud quux', 'baz')
    f = StringIO()
    cfg.write(f)
    f.reset()
    assert f.readlines() == ['[foo]\n', 'bar_thud_quux = baz\n', '\n']
Example #32
0
def test_write_words_underscore():
    cfg = conf.CephConf()
    cfg.add_section("foo")
    cfg.set("foo", "bar thud quux", "baz")
    f = StringIO()
    cfg.write(f)
    f.reset()
    assert f.readlines() == ["[foo]\n", "bar_thud_quux = baz\n", "\n"]
Example #33
0
def test_write_words_underscore():
    cfg = conf.ceph.CephConf()
    cfg.add_section('foo')
    cfg.set('foo', 'bar thud quux', 'baz')
    f = StringIO()
    cfg.write(f)
    f.reset()
    assert f.readlines() == ['[foo]\n', 'bar_thud_quux = baz\n','\n']
Example #34
0
def render_element(element, ind=""):
    """
    call the render method of an elemnet and return elements as a string
    """
    f = StringIO()
    element.render(f, ind)
    f.reset()
    output = f.read()
    return output
Example #35
0
def format_attribute(el, visibility=False, is_derived=False, type=False,
                           multiplicity=False, default=False, tags=False):
    """
    Create a OCL representation of the attribute,
    Returns the attribute as a string.
    If one or more of the parameters (visibility, is_derived, type,
    multiplicity, default and/or tags) is set, only that field is rendered.
    Note that the name of the attribute is always rendered, so a parseable
    string is returned.

    Note that, when some of those parameters are set, parsing the string
    will not give you the same result.
    """
    name = el.name
    if not name:
        name = ''

    if no_render_pat.match(name):
        name = ''

    # Render all fields if they all are set to False
    if not (visibility or is_derived or type or multiplicity or default):
       visibility = is_derived = type = multiplicity = default = True

    s = StringIO()

    if visibility:
        s.write(vis_map[el.visibility])
        s.write(' ')

    if is_derived:
        if el.isDerived: s.write('/')

    s.write(name)
    
    if type and el.typeValue:
        s.write(': %s' % el.typeValue)

    if multiplicity and el.upperValue:
        if el.lowerValue:
            s.write('[%s..%s]' % (el.lowerValue, el.upperValue))
        else:
            s.write('[%s]' % el.upperValue)

    if default and el.defaultValue:
        s.write(' = %s' % el.defaultValue)

    if tags:
        slots = []
        for slot in el.appliedStereotype[:].slot:
            if slot:
                slots.append('%s=%s' % (slot.definingFeature.name, slot.value))
        if slots:
            s.write(' { %s }' % ', '.join(slots))
    s.reset()
    return s.read()
Example #36
0
def download(request):
    'Return a spreadsheet of selected patents'
    # Load desired patents
    params = request.params
    phrase = params.get('phrase', '')
    patentIDs = params.get('ids', '').split()
    patentByID = dict((str(x.id), x) for x in get_patents())
    # Prepare CSV
    stringIO = StringIO()
    csvWriter = csv.writer(stringIO)
    csvWriter.writerow([
        'Case',
        'Lead Inventor',
        'Status',
        'Type',
        'Date Filed',
        'Firm',
        'Firm Ref',
        'Country',
        'Title',
    ])
    for patentID in patentIDs:
        try:
            patent = patentByID[patentID]
        except KeyError:
            continue
        contact = patent.lead_contact
        contactSummary = '%s\n%s\n%s' % (
            contact.name_last, contact.email, '\n'.join(
                '%s %s' % (phone.number, phone.type)
                for phone in contact.phones)) if contact else ''
        csvWriter.writerow([
            patent.technology.ref if patent.technology else '',
            contactSummary,
            patent.status.name if patent.status else '',
            patent.type.name if patent.type else '',
            patent.date_filed.strftime('%m/%d/%y')
            if patent.date_filed else '',
            patent.firm.name if patent.firm else '',
            patent.firm_ref,
            patent.country.name if patent.country else '',
            patent.name,
        ])
    # Sanitize filename
    alphabet = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_.,() '
    phrase = re.sub(r'[^%s]' % alphabet, ' ', phrase)  # Whitelist characters
    phrase = re.sub(r'\s+', ' ', phrase).strip()  # Remove excess whitespace
    filename = '%s patents%s.csv' % (datetime.datetime.utcnow().strftime(
        '%Y%m%d'), ' ' + phrase if phrase else '')
    # Generate
    stringIO.reset()
    body = stringIO.read()
    # Return
    return Response(body=body,
                    content_length=len(body),
                    content_disposition='attachment; filename="%s"' % filename)
Example #37
0
 def archive(self, spiders=None):
     """
     Zip the contents or a subset of the contents in this project together
     """
     zbuff = StringIO()
     self._archive = zipfile.ZipFile(zbuff, "w", zipfile.ZIP_DEFLATED)
     self._add_files(spiders)
     self._archive.close()
     zbuff.reset()
     return zbuff
Example #38
0
def save_file_submission(sid, filename, stream):
    sanitized_filename = secure_filename(filename)

    s = StringIO()
    with zipfile.ZipFile(s, 'w') as zf:
        zf.writestr(sanitized_filename, stream.read())
    s.reset()

    file_loc = path(sid, "%s_doc.zip.gpg" % uuid.uuid4())
    crypto_util.encrypt(config.JOURNALIST_KEY, s, file_loc)
Example #39
0
def serialize_csv_trace_obsels(graph, resource, bindings=None):
    sio = StringIO()
    csvw = csv_writer(sio)
    for row in iter_csv_rows(resource.trace.uri, graph):
        csvw.writerow([ i.encode('utf-8') for i in row ])
        # immediately yield each line
        yield sio.getvalue()
        # then empty sio before writing next line
        sio.reset()
        sio.truncate()
Example #40
0
class TestWrappObserver(object):
    @patch('txwrapplog._timestamp')
    def setup(self, timestamp_mock):
        self.timestamp = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
        timestamp_mock.return_value = self.timestamp
        self.msg = 'Hello'
        self.service = "api"
        self.out = StringIO()
        self.log = Logger(observer=wrapp_observer(self.out, service=self.service))

    def _generate_output(self, level):
        res = collections.OrderedDict()
        res['level'] = level
        res['msg'] = self.msg
        res['service'] = self.service
        res['timestamp'] = self.timestamp
        res['namespace'] = 'tests'
        return '%s\n' % (json.dumps(res))

    def test_debug(self):
        self.log.debug(self.msg)
        self.assert_output(self._generate_output('debug'))

    def test_info(self):
        self.log.info(self.msg)
        self.assert_output(self._generate_output('info'))

    def test_warn(self):
        self.log.warn(self.msg)
        self.assert_output(self._generate_output('warning'))

    def test_error(self):
        self.log.error(self.msg)
        self.assert_output(self._generate_output('error'))

    def test_critical(self):
        self.log.critical(self.msg)
        self.assert_output(self._generate_output('error'))

    def test_failure(self):
        try:
            1/0
        except Exception:
            self.log.failure(self.msg)
        actual = self.get_output()
        expected_start = '{"level": "error", "msg": "%s"' % (self.msg)
        assert actual.startswith(expected_start)

    def get_output(self):
        self.out.reset()
        return self.out.read()

    def assert_output(self, expected):
        actual = self.get_output()
        assert actual == expected, repr(actual)
Example #41
0
def read(fobj):
    s = StringIO(fobj.read())
    first = s.readline()
    if _FASTA_FIRST_LINE_RE.match(first):
        so = StringIO()
        for line in s:
            so.write(line.strip())
        so.reset()
        return so
    s.reset()
    return s
 def test_export_multiple_items(self):
     i1 = TestItem(name='hello', age='world')
     i2 = TestItem(name='bye', age='world')
     f = StringIO()
     ie = PickleItemExporter(f)
     ie.start_exporting()
     ie.export_item(i1)
     ie.export_item(i2)
     ie.finish_exporting()
     f.reset()
     self.assertEqual(pickle.load(f), i1)
     self.assertEqual(pickle.load(f), i2)
Example #43
0
 def grab_frame(self, **savefig_kwargs):
     if self.embed_frames:
         suffix = '.' + self.frame_format
         f = StringIO()
         self.fig.savefig(f,
                          format=self.frame_format,
                          dpi=self.dpi,
                          **savefig_kwargs)
         f.reset()
         self._saved_frames.append(f.read().encode('base64'))
     else:
         return super(HTMLWriter, self).grab_frame(**savefig_kwargs)
Example #44
0
def _sniff_plist(pbx_file):
    buffer = StringIO(pbx_file.read())
    pbx_file.close()
    first_line = buffer.readline()
    if "<?xml" in first_line:
        reader_impl = XMLPlistReader
    else:
        reader_impl = NSPlistReader

    #reset buffer
    buffer.reset()
    return (reader_impl, buffer)
Example #45
0
 def test_zshCode(self):
     """
     Generate a completion function, and test the textual output
     against a known correct output
     """
     cmd_name = 'testprog'
     opts = CodeTestOptions()
     f = StringIO()
     b = zshcomp.Builder(cmd_name, opts, f)
     b.write()
     f.reset()
     self.failUnlessEquals(f.read(), testOutput1)
Example #46
0
    def clean(self, data, initial=None):
        """
        Checks that the file-upload field data contains a valid image (GIF, JPG,
        PNG, possibly others -- whatever the Python Imaging Library supports).
        """
        f = super(ImageField, self).clean(data, initial)
        if f is None:
            return None
        elif not data and initial:
            return initial

        # Try to import PIL in either of the two ways it can end up installed.
        try:
            from PIL import Image
        except ImportError:
            import Image

        # We need to get a file object for PIL. We might have a path or we might
        # have to read the data into memory.
        if hasattr(data, 'temporary_file_path'):
            file = data.temporary_file_path()
        else:
            if hasattr(data, 'read'):
                file = StringIO(data.read())
            else:
                file = StringIO(data['content'])

        try:
            # load() is the only method that can spot a truncated JPEG,
            #  but it cannot be called sanely after verify()
            trial_image = Image.open(file)
            trial_image.load()

            # Since we're about to use the file again we have to reset the
            # file object if possible.
            if hasattr(file, 'reset'):
                file.reset()

            # verify() is the only method that can spot a corrupt PNG,
            #  but it must be called immediately after the constructor
            trial_image = Image.open(file)
            trial_image.verify()
        except ImportError:
            # Under PyPy, it is possible to import PIL. However, the underlying
            # _imaging C module isn't available, so an ImportError will be
            # raised. Catch and re-raise.
            raise
        except Exception:  # Python Imaging Library doesn't recognize it as an image
            raise ValidationError(self.error_messages['invalid_image'])
        if hasattr(f, 'seek') and callable(f.seek):
            f.seek(0)
        return f
Example #47
0
def parse_ldif(ldif):
    """convert ldif to list; the item of list is tuple,
    the first item is dn, the second is a dict contains ldap attributes.
    """
    input = StringIO()
    input.write(ldif)
    # reset the file pointer, to make it point the begin of StringIO
    input.reset()
    parser = _MDMiLDIFParser(input)
    parser.parse()
    input.close()

    return parser.all_records
Example #48
0
    def load(self):
        sql_query = '''select links from experiment2_routes where od_route_index < 50;'''
        cursor = connection.cursor()
        cursor.execute(sql_query)

        sio = StringIO()
        for id, (links, ) in enumerate(cursor):
            sio.write('\t'.join([str(id), E2Routes._array_to_postgres(links)]))
            sio.write('\n')

        sio.reset()
        cursor.copy_from(sio, 'phidata_route')
        return self
Example #49
0
class CaptureStdout(object):
    def __init__(self):
        self.io = StringIO()

    def __enter__(self):
        self.io.reset()
        self.io.truncate()
        self.stdout = sys.stdout
        sys.stdout = self.io

    def __exit__(self, *args):
        sys.stdout = self.stdout
        self.content = self.io.getvalue()
Example #50
0
def render_page(page, filename):
    """
    render the tree of elements
    """

    f = StringIO()
    page.render(f, "    ")

    f.reset()

    print f.read()

    f.reset()
    open(filename, 'w').write(f.read())
Example #51
0
def _image_to_png(image_data):
    png_buffer = StringIO()
    pyplot.imsave(png_buffer, image_data, cmap=cm.gray, format='png')
    png_buffer.reset()
    image = Image.open(png_buffer)
    scale = int(request.args.get('scale', 1))
    if scale != 1:
        (width, height) = image.size
        image = image.resize((width * scale, height * scale), Image.NEAREST)
    png_buffer = StringIO()
    image.save(png_buffer, format="PNG")
    png = png_buffer.getvalue()
    png_buffer.close()
    return png
Example #52
0
def imprimir(temp, data, tittle):
    template = get_template(temp)
    pdf_data = template.render(Context(data))
    # Write PDF to file

    pdf = StringIO()
    try:
        pisa.CreatePDF(StringIO(pdf_data.encode('utf-8')), pdf)
    except:
        return HttpResponse('Errors')
    pdf.reset()
    response = HttpResponse(pdf.read(), content_type='application/pdf')
    response['Content-Disposition'] = 'attachment; filename="' + tittle + '"'
    return response
Example #53
0
class ProtocolBuffer(object):
    def __init__(self):
        self.buffer = StringIO()
        self.header_size = MessageHeaderSerializer.calcsize()

    def write(self, data):
        self.buffer.write(data)

    def receive_message(self):
        """This method will attempt to extract a header and message.
        It will return a tuple of (header, message) and set whichever
        can be set so far (None otherwise).
        """
        # Calculate the size of the buffer
        self.buffer.seek(0, os.SEEK_END)
        buffer_size = self.buffer.tell()

        # Check if a complete header is present
        if buffer_size < self.header_size:
            return (None, None)

        # Go to the beginning of the buffer
        self.buffer.reset()

        message_model = None
        message_header_serial = MessageHeaderSerializer()
        message_header = message_header_serial.deserialize(self.buffer)
        total_length = self.header_size + message_header.length

        # Incomplete message
        if buffer_size < total_length:
            self.buffer.seek(0, os.SEEK_END)
            return (message_header, None)

        payload = self.buffer.read(message_header.length)
        remaining = self.buffer.read()
        self.buffer = StringIO()
        self.buffer.write(remaining)
        payload_checksum = MessageHeaderSerializer.calc_checksum(payload)

        # Check if the checksum is valid
        if payload_checksum != message_header.checksum:
            msg = "Bad checksum for command %s" % message_header.command
            raise InvalidMessageChecksum(msg)

        if message_header.command in MESSAGE_MAPPING:
            deserializer = MESSAGE_MAPPING[message_header.command]()
            message_model = deserializer.deserialize(StringIO(payload))

        return (message_header, message_model)
Example #54
0
    def test_conf_settings_during_getinfo(self):
        # Command.process should return configuration settings on Getinfo probe

        expected = \
            '\r\n' \
            'changes_colorder,clear_required_fields,enableheader,generating,local,maxinputs,needs_empty_results,outputheader,overrides_timeorder,passauth,perf_warn_limit,required_fields,requires_srinfo,retainsevents,run_in_preview,stderr_dest,streaming,supports_multivalues,supports_rawargs,__mv_changes_colorder,__mv_clear_required_fields,__mv_enableheader,__mv_generating,__mv_local,__mv_maxinputs,__mv_needs_empty_results,__mv_outputheader,__mv_overrides_timeorder,__mv_passauth,__mv_perf_warn_limit,__mv_required_fields,__mv_requires_srinfo,__mv_retainsevents,__mv_run_in_preview,__mv_stderr_dest,__mv_streaming,__mv_supports_multivalues,__mv_supports_rawargs\r\n' \
            '1,0,1,0,0,0,1,1,0,0,0,,0,1,1,log,1,1,1,,,,,,,,,,,,,,,,,,,\r\n'

        command = SearchCommand()
        result = StringIO()
        command.process(['foo.py', '__GETINFO__'], output_file=result)
        result.reset()
        observed = result.read()
        self.assertEqual(expected, observed)
Example #55
0
    def download_with_progress_bar(self, data_url, return_buffer=False):
        """Download a file, showing progress.

        Parameters
        ----------
        data_url : string
                   web address.
        return_buffer : boolean (optional)
                        if true, return a StringIO buffer rather than a string.

        Returns
        -------
        str
            Content of the file.
        """

        num_units = 40

        fhandle = urllib2.urlopen(data_url)
        total_size = int(fhandle.info().getheader('Content-Length').strip())
        chunk_size = total_size / num_units

        print "Downloading %s" % data_url
        nchunks = 0
        buf = StringIO()
        total_size_str = self.bytes_to_string(total_size)
        #total_size_str=total_size.decode('utf-8')

        while True:
            next_chunk = fhandle.read(chunk_size)
            nchunks += 1

            if next_chunk:
                buf.write(next_chunk)
                s = ('[' + nchunks * '=' + (num_units - 1 - nchunks) * ' ' +
                     ']  %s / %s   \r' %
                     (self.bytes_to_string(buf.tell()), total_size_str))
            else:
                sys.stdout.write('\n')
                break

            sys.stdout.write(s)
            sys.stdout.flush()

        buf.reset()
        if return_buffer:
            return buf
        else:
            return buf.getvalue()
Example #56
0
def readTerm(data):
    """ helper file of readGO() """
    isTerm = False
    curItem = StringIO()
    for line in data:
        if isTerm:
            if not line or line == '\n':
                isTerm = False
                curItem.reset()
                yield curItem
                curItem.reset()
                curItem.truncate()
            curItem.write(line)
        elif line == '[Term]\n':
            isTerm = True
Example #57
0
    def test_finalize(self):
        myinput, outstream = StringIO(), StringIO()

        # Extract the app data
        tmpdir = tempfile.mkdtemp("tmp_test_finalize")
        try:
            app_zip = os.path.join(os.path.dirname(__file__), "data", "AwesomeApp.zip")
            decompress(app_zip, tmpdir)

            # Create a mock residue from run
            main_act_path = os.path.join(tmpdir,TestScript.JAVAFILE)
            input_res = {
                "residue": {
                    "compilation_infos" : [{"cwd" : "", "cmd" : "", "args" : "",
                                            "classpath" : [],
                                            "files": [main_act_path]}
                                           ]},
                "toolNotes": []
            }
            myinput.write(json.dumps(input_res))
            myinput.reset()

            # Start a mock service
            service = TestScript.TestSearchService()
            try:
                args = TestScript.get_args("finalize")
                args[1] = tmpdir # set the working directory

                api_res = main(args, myinput, outstream, biggroum_api_map)
                self.assertTrue(api_res == 0)

                out_json = json.loads(outstream.getvalue())

                res_path = os.path.abspath(os.path.dirname(fixrgraph.musedev.test.__file__))
                res_path = os.path.join(res_path, "data", "finalize_result.json")
                with open(res_path, 'r') as f:
                    expected_res = json.load(f)

                self.assertTrue(compare_json_obj(out_json["toolNotes"],
                                                 expected_res["toolNotes"]))

                self.assertTrue(compare_json_obj(out_json["residue"]["anomalies"],
                                                 expected_res["residue"]["anomalies"]))

            finally:
                service.stop()
        finally:
            shutil.rmtree(tmpdir)
Example #58
0
class TextBufferingContentHandler(sax.handler.ContentHandler):
    """
    Keeps track of the text in the current XML element.
    """

    def __init__(self):
        self._buffer = StringIO()
        self._text = None

    @property
    def text(self):
        """
        Read-only access to the current element's text.
        """
        return self._text

    def startElementNS(self, name, qname, attrs):
        """
        A SAX callback indicating the start of an element. Includes namespace
        information.

        This implementation resets and truncates the buffer.
        """
        self._reset_truncate()

    def endElementNS(self, name, qname):
        """
        A SAX callback indicating the end of an element. Includes namespace
        information.

        This implementation saves the text from the buffer. Then it resets and
        truncates the buffer.
        """
        self._text = self._buffer.getvalue()
        self._reset_truncate()

    def characters(self, content):
        """
        A SAX callback indicating characters from the text portion of the
        current XML element.

        This implementation writes to the buffer.
        """
        self._buffer.write(content)

    def _reset_truncate(self):
        self._buffer.reset()
        self._buffer.truncate()
Example #59
0
def test_reset():
    """
    Test that the reset method of cStringIO objects sets the position
    marker to the beginning of the stream.
    """
    stream = StringIO()
    stream.write(data)
    assert stream.read() == ''
    stream.reset()
    assert stream.read() == data

    stream = StringIO(data)
    assert stream.read() == data
    assert stream.read() == ''
    stream.reset()
    assert stream.read() == data
Example #60
0
    def load(self, density):
        sql_query = '''select id, ST_AsEWKT(geom), ST_AsEWKT(location) from orm_waypoint where density_id = %s;'''
        cursor = connection.cursor()
        cursor.execute(sql_query, [density])

        sio = StringIO()
        for id, geom, location in cursor:
            if geom is None:
                continue
            print geom
            sio.write('\t'.join([str(id), geom, location]))
            sio.write('\n')

        sio.reset()
        cursor.copy_from(sio, 'phidata_cell')
        return self