Example #1
0
class SFTPStorageFile(File):

    def __init__(self, name, storage, mode):
        self._name = name
        self._storage = storage
        self._mode = mode
        self._is_dirty = False
        self.file = StringIO()
        self._is_read = False

    @property
    def size(self):
        if not hasattr(self, '_size'):
            self._size = self._storage.size(self._name)
        return self._size

    def read(self, num_bytes=None):
        if not self._is_read:
            self.file = self._storage._read(self._name)
            self._is_read = True

        return self.file.read(num_bytes)

    def write(self, content):
        if 'w' not in self._mode:
            raise AttributeError("File was opened for read-only access.")
        self.file = StringIO(content)
        self._is_dirty = True
        self._is_read = True

    def close(self):
        if self._is_dirty:
            self._storage._save(self._name, self.file.getvalue())
        self.file.close()
Example #2
0
 def create_thumbnail(self, size):
     try:
         orig = self.avatar.storage.open(self.avatar.name, 'rb').read()
         image = Image.open(StringIO(orig))
     except IOError:
         return # What should we do here?  Render a "sorry, didn't work" img?
     (w, h) = image.size
     if w != size or h != size:
         if w > h:
             diff = (w - h) / 2
             image = image.crop((diff, 0, w - diff, h))
         else:
             diff = (h - w) / 2
             image = image.crop((0, diff, w, h - diff))
         image = image.resize((size, size), AVATAR_RESIZE_METHOD)
         if image.mode != "RGB":
             image = image.convert("RGB")
         thumb = StringIO()
         image.save(thumb, "JPEG")
         thumb_file = ContentFile(thumb.getvalue())
     else:
         thumb_file = ContentFile(orig)
     thumb = self.avatar.storage.save(\
             upload_avatar_file_path(instance=self, size=size), \
             thumb_file)
Example #3
0
        def wrapped_func(*args, **kwargs):
            try:
                import cProfile
                profiler = cProfile.Profile()
                profiler.enable()
                log.debug("Agent profiling is enabled")
            except Exception:
                log.warn("Cannot enable profiler")

            # Catch any return value before disabling profiler
            ret_val = func(*args, **kwargs)

            # disable profiler and printout stats to stdout
            try:
                profiler.disable()
                import pstats
                from cStringIO import StringIO
                s = StringIO()
                ps = pstats.Stats(profiler, stream=s).sort_stats("cumulative")
                ps.print_stats(AgentProfiler.PSTATS_LIMIT)
                log.info(s.getvalue())
            except Exception:
                log.warn("Cannot disable profiler")

            return ret_val
Example #4
0
 def _get_store_info(self, value, min_compress_len):
     flags = 0
     if isinstance(value, unicode):
         value = value.encode("utf-8")
         min_compress_len = 0
     elif isinstance(value, str):
         pass
     elif isinstance(value, int):
         flags |= _FLAG_INTEGER
         value = "%d" % value
         min_compress_len = 0
     elif isinstance(value, long):
         flags |= _FLAG_LONG
         value = "%d" % value
     else:
         flags |= _FLAG_PICKLE
         f = StringIO()
         pickler = pickle.Pickler(f)
         pickler.dump(value)
         value = f.getvalue()
     lv = len(value)
     if min_compress_len and lv > min_compress_len:
         comp_val = zlib.compress(value)
         if len(comp_val) < lv:
             flags |= _FLAG_COMPRESSED
             value = comp_val
     return flags, value
Example #5
0
def createZip(path):

    def walktree (top = ".", depthfirst = True):
        names = os.listdir(top)
        if not depthfirst:
            yield top, names
        for name in names:
            try:
                st = os.lstat(os.path.join(top, name))
            except os.error:
                continue
            if stat.S_ISDIR(st.st_mode):
                for (newtop, children) in walktree (os.path.join(top, name),
                                                    depthfirst):
                    yield newtop, children
        if depthfirst:
            yield top, names

    list=[]
    for (basepath, children) in walktree(path,False):
          for child in children:
              f=os.path.join(basepath,child)
              if os.path.isfile(f):
                    f = f.encode(sys.getfilesystemencoding())
                    list.append( f )

    f=StringIO()
    file = zipfile.ZipFile(f, "w")
    for fname in list:
        nfname=os.path.join(os.path.basename(path),fname[len(path)+1:])
        file.write(fname, nfname , zipfile.ZIP_DEFLATED)
    file.close()

    f.seek(0)
    return f
Example #6
0
    def downloadmessage(self, msgidx, foldername):
        ''' dowloads one message and returns the converted mbox-style mail '''
        pageurl = "%sGetMessageSource.aspx?msgid=%s" % (self.baseurl, msgidx)
        r = self.getpage(pageurl)
        messageblock = r.read()

        try:
            pre = self.findvar(messageblock, 'messageblock', "<pre>(.*)</pre>")
        except ValueError:
            return None
        try:
            unescapedmsg = self.htmlparser.unescape(pre).encode('latin1')
        except:
            logger.error("Unable to unescape html of message\n%s", pre)
            return None
        # create a message object to convert it to mbox format
        try:
            msg = email.message_from_string(unescapedmsg)
        except:
            logger.error(
                "Unable to create message object from text\n%s", unescapedmsg)
        # add headers
        msg.add_header("X-GetOutlook-Version", self.version())
        msg.add_header("X-GetOutlook-msgidx", msgidx)
        msg.add_header("X-GetOutlook-Folder", foldername)
        # make flat
        msg_out = StringIO()
        msg_gen = Generator(msg_out, mangle_from_=True)
        msg_gen.flatten(msg, unixfrom=True)
        return msg_out.getvalue()
Example #7
0
def scale_image(img_upload, img_max_size):
    """Crop and scale an image file."""
    try:
        img = Image.open(img_upload)
    except IOError:
        return None

    src_width, src_height = img.size
    src_ratio = float(src_width) / float(src_height)
    dst_width, dst_height = img_max_size
    dst_ratio = float(dst_width) / float(dst_height)

    if dst_ratio < src_ratio:
        crop_height = src_height
        crop_width = crop_height * dst_ratio
        x_offset = int(float(src_width - crop_width) / 2)
        y_offset = 0
    else:
        crop_width = src_width
        crop_height = crop_width / dst_ratio
        x_offset = 0
        y_offset = int(float(src_height - crop_height) / 3)

    img = img.crop(
        (x_offset, y_offset,
         x_offset + int(crop_width), y_offset + int(crop_height)))
    img = img.resize((dst_width, dst_height), Image.ANTIALIAS)

    if img.mode != "RGB":
        img = img.convert("RGB")
    new_img = StringIO()
    img.save(new_img, "JPEG")
    img_data = new_img.getvalue()

    return ContentFile(img_data)
Example #8
0
 def next(self):
     resultBuilder = StringIO()
     for i in range(0, self.length):
         pos = self.rv.randint(0, len(self.alphabet) - 1)
         resultBuilder.write(self.alphabet[pos])
         i += 1
     return resultBuilder.getvalue()
Example #9
0
 def testReadLines(self):
     # "Test BZ2File.readlines()"
     self.createTempFile()
     with BZ2File(self.filename) as bz2f:
         self.assertRaises(TypeError, bz2f.readlines, None)
         sio = StringIO(self.TEXT)
         self.assertEqual(bz2f.readlines(), sio.readlines())
Example #10
0
def fetch_image_from_url(file_url):
    """Returns an UploadedFile object after retrieving the file at the given URL."""
    inStream = urllib2.urlopen(file_url)

    parser = ImageFile.Parser()
    file_size = 0
    max_file_size = 20 * 1024 * 1024 # 20 megabytes
    read_size = 1024
    while True:
        s = inStream.read(read_size)
        file_size += len(s)
        if not s:
            break
        if file_size > max_file_size:
            raise Exception("file size exceeded max size: %s bytes" % max_file_size)
        parser.feed(s)

    inImage = parser.close()
    # convert to RGB to avoid error with png and tiffs
    #if inImage.mode != "RGB":
    #    inImage = inImage.convert("RGB")

    img_temp = StringIO()
    inImage.save(img_temp, 'PNG')
    img_temp.seek(0)

    file_object = File(img_temp, 'img_temp.png')
    uploaded_file = UploadedFile(file=file_object, name=file_object.name, content_type='image/png', size=file_size, charset=None)

    return uploaded_file
Example #11
0
def email_as_string(mail):
    """
    Converts the given message to a string, without mangling "From" lines
    (like as_string() does).

    :param mail: email to convert to string
    :rtype: str
    """
    fp = StringIO()
    g = Generator(fp, mangle_from_=False, maxheaderlen=78)
    g.flatten(mail)
    as_string = RFC3156_canonicalize(fp.getvalue())

    if isinstance(mail, MIMEMultipart):
        # Get the boundary for later
        boundary = mail.get_boundary()

        # Workaround for http://bugs.python.org/issue14983:
        # Insert a newline before the outer mail boundary so that other mail
        # clients can verify the signature when sending an email which contains
        # attachments.
        as_string = re.sub(r'--(\r\n)--' + boundary,
                           '--\g<1>\g<1>--' + boundary,
                           as_string, flags=re.MULTILINE)

    return as_string
Example #12
0
def create_zip_deck_file(deck):
    """Creates a zipped file containing the contents of the deck (XLS and media objects."""

    # create the string buffer to hold the contents of the zip file
    s = StringIO()

    # create the zipfile object
    zfile = zipfile.ZipFile(s, "w")

    # write the deck XLS file to the zip
    deck_file_output = utils.create_deck_file(deck.id)
    temp_dirpath = tempfile.mkdtemp()
    temp_filepath = os.path.join(temp_dirpath, "deck.xls")
    deck_file_output.save(temp_filepath)
    zfile.write(temp_filepath, arcname=os.path.split(temp_filepath)[1])
    shutil.rmtree(temp_dirpath) # must delete temp dir when we're done

    # lookup the unique field values in the deck of cards,
    # where the field values are the media object names
    card_list = queries.getDeckCardsList(deck.id)
    field_set = set()
    for c in card_list:
        for f in c['fields']:
            if f['type'] not in ('T', 'M'):
                field_set.add(f['value'])

    # add each media object ot the zip file
    for file_name in field_set:
        file_contents = MediaStoreService.readFileContents(file_name)
        if file_contents is not None:
            zfile.writestr(file_name, file_contents)

    zfile.close()

    return s.getvalue()
Example #13
0
    def _download_manifest(self):
        """
        Download the manifest file, and process it to return an ISOManifest.

        :return: manifest of available ISOs
        :rtype:  pulp_rpm.plugins.db.models.ISOManifest
        """
        manifest_url = urljoin(self._repo_url, models.ISOManifest.FILENAME)
        # I probably should have called this manifest destination, but I couldn't help myself
        manifest_destiny = StringIO()
        manifest_request = request.DownloadRequest(manifest_url, manifest_destiny)
        self.downloader.download([manifest_request])
        # We can inspect the report status to see if we had an error when retrieving the manifest.
        if self.progress_report.state == self.progress_report.STATE_MANIFEST_FAILED:
            raise IOError(_("Could not retrieve %(url)s") % {'url': manifest_url})

        manifest_destiny.seek(0)
        try:
            manifest = models.ISOManifest(manifest_destiny, self._repo_url)
        except ValueError:
            self.progress_report.error_message = _('The PULP_MANIFEST file was not in the ' +
                                                   'expected format.')
            self.progress_report.state = self.progress_report.STATE_MANIFEST_FAILED
            raise ValueError(self.progress_report.error_message)

        return manifest
Example #14
0
def log_error(message, filename, action=None, label='Error'):
    """Writer error message to log file.

    Helper function for :func:`flush_log`, :func:`process_error`.

    :param message: error message
    :type message: string
    :param filename: image filename
    :type filename: string
    :param label: ``'Error'`` or ``'Warning'``
    :type label: string
    :returns: error log details
    :rtype: string
    """
    global ERROR_LOG_COUNTER
    details = ''
    if action:
        details += os.linesep + 'Action:' + \
                    pprint.pformat(action.dump())
    ERROR_LOG_FILE.write(os.linesep.join([
        u'%s %d:%s' % (label, ERROR_LOG_COUNTER, message),
        details,
        os.linesep,
    ]))
    try:
        traceback.print_exc(file=ERROR_LOG_FILE)
    except UnicodeDecodeError:
        stringio = StringIO()
        traceback.print_exc(file=stringio)
        traceb = stringio.read()
        ERROR_LOG_FILE.write(unicode(traceb, ENCODING, 'replace'))
    ERROR_LOG_FILE.write('*' + os.linesep)
    ERROR_LOG_FILE.flush()
    ERROR_LOG_COUNTER += 1
    return details
Example #15
0
def error(component, message, stdout=False):
    """log an error message"""
    # In case of error print also the stacktrace
    stacktrace = StringIO()
    print_exc(file=stacktrace)
    emessage = "%s\n%s" % (message, stacktrace.getvalue())
    logger('error', component, emessage, stdout=stdout)
Example #16
0
 def testXReadLines(self):
     # "Test BZ2File.xreadlines()"
     self.createTempFile()
     bz2f = BZ2File(self.filename)
     sio = StringIO(self.TEXT)
     self.assertEqual(list(bz2f.xreadlines()), sio.readlines())
     bz2f.close()
Example #17
0
def make_thumbnail(record):
    """Make small and medium thumbnails of given record."""
    id = record.get_header().subject_uri.split('/')[-1].split('.')[0]
    id = "%010d" % int(id)
    path = "/".join([id[0:3], id[3:6], id[6:9]])

    data = record.get_data()
    image = Image.open(StringIO(data))

    sizes = dict(S=(116, 58), M=(180, 360), L=(500, 500))

    yield id + "-O.jpg", data

    for size in "SML":
        imgpath = "%s-%s.jpg" % (id, size)
        try:
            if image.mode != 'RGB':
                image = image.convert('RGB')

            thumbnail = StringIO()
            image.resize(sizes[size], resample=Image.ANTIALIAS).save(thumbnail, format='jpeg')
            yield imgpath, thumbnail.getvalue()
        except Exception, e:
            print 'ERROR:', id, str(e)
            sys.stdout.flush()
Example #18
0
	def pack(self):
		"""Pack data for transfer."""
		s = StringIO()
		p = pickle.Pickler(s)
		p.dump(self)
		data = s.getvalue()
		return struct.pack('!HHI', VERSION[0], VERSION[1], len(data)) + data
Example #19
0
 def zipAdded(self):
     "Add files to a zip until over SYNC_ZIP_SIZE. Return zip data."
     f = StringIO()
     z = zipfile.ZipFile(f, "w", compression=zipfile.ZIP_DEFLATED)
     sz = 0
     cnt = 0
     files = {}
     cur = self.db.execute(
         "select fname from log where type = ?", MEDIA_ADD)
     fnames = []
     while 1:
         fname = cur.fetchone()
         if not fname:
             # add a flag so the server knows it can clean up
             z.writestr("_finished", "")
             break
         fname = fname[0]
         fnames.append([fname])
         z.write(fname, str(cnt))
         files[str(cnt)] = fname
         sz += os.path.getsize(fname)
         if sz > SYNC_ZIP_SIZE:
             break
         cnt += 1
     z.writestr("_meta", simplejson.dumps(files))
     z.close()
     return f.getvalue(), fnames
Example #20
0
    def __resize(self, display):
        #resize and resample photo
        original_id = self._getDisplayId()
        string_image = StringIO(str(self.get_data(original_id)))
        if display == 'Original':
            return string_image

        crop = False
        width, height = self.displays.get(display, (0, 0))
        # Calculate image width, size
        if not (width and height):
            size = LISTING_DISPLAYS.get(display, self.width())
            width, height = self.__get_crop_aspect_ratio_size(size)
            crop = True
        else:
            width, height = self.__get_aspect_ratio_size(width, height)
        
        # Resize image
        newimg = StringIO()
        img = Image.open(string_image)
        fmt = img.format
        try: img = img.resize((width, height), Image.ANTIALIAS)
        except AttributeError: img = img.resize((width, height))
        
        # Crop if needed
        if crop:
            box = self.__get_crop_box(width, height)
            img = img.crop(box)
            #img.load()
        quality = self._photo_quality(string_image)
        img.save(newimg, fmt, quality=quality)
        newimg.seek(0)
        return newimg
def load_url (url, referrer=None):
    """Attempt to load the url using pycurl and return the data
    (which is None if unsuccessful)"""

    data = None
    databuffer = StringIO()

    curl = pycurl.Curl()
    curl.setopt(pycurl.URL, url)
    curl.setopt(pycurl.FOLLOWLOCATION, 1)
    curl.setopt(pycurl.CONNECTTIMEOUT, 5)
    curl.setopt(pycurl.TIMEOUT, 8)
    curl.setopt(pycurl.WRITEFUNCTION, databuffer.write)
    curl.setopt(pycurl.USERAGENT, UA)
    curl.setopt(pycurl.COOKIEFILE, '')
    if referrer is not None:
        curl.setopt(pycurl.REFERER, referrer)
    try:
        curl.perform()
        data = databuffer.getvalue()
    except Exception:
        pass
    curl.close()

    return data
Example #22
0
 def _apply_watermark(self, datafile):
     text = self.aq_parent.watermark_text
     FONT = os.path.join(os.path.dirname(__file__), 'fonts', 'VeraSeBd.ttf')
     img = Image.open(datafile)
     newimg = StringIO()
     fmt = img.format
     watermark = Image.new("RGBA", (img.size[0], img.size[1]))
     draw = ImageDraw.ImageDraw(watermark, "RGBA")
     size = 0
     while True:
         size += 1
         nextfont = ImageFont.truetype(FONT, size)
         nexttextwidth, nexttextheight = nextfont.getsize(text)
         if nexttextwidth+nexttextheight/3 > watermark.size[0]:
             break
         font = nextfont
         textwidth, textheight = nexttextwidth, nexttextheight
     draw.setfont(font)
     draw.text(((watermark.size[0]-textwidth)/2,
                (watermark.size[1]-textheight)/2), text)
     watermark = watermark.rotate(degrees(atan(float(img.size[1])/img.size[0])),
                              Image.BICUBIC)
     mask = watermark.convert("L").point(lambda x: min(x, 88))
     watermark.putalpha(mask)
     img.paste(watermark, None, watermark)
     quality = self._photo_quality(datafile)
     img.save(newimg, fmt, quality=quality)
     newimg.seek(0)
     return newimg
Example #23
0
File: srj.py Project: ox-it/humfrey
    def _iter(self, sparql_results_type, fields, bindings, boolean, triples):
        if sparql_results_type not in ('resultset', 'boolean'):
            raise TypeError("Unexpected results type: {0}".format(sparql_results_type))

        # We'll spool to a buffer, and only yield when it gets a bit big.
        buffer = StringIO()

        # Do these attribute lookups only once.
        json_dumps, json_dump, buffer_write = json.dumps, json.dump, buffer.write

        buffer_write('{\n')
        if sparql_results_type == 'boolean':
            buffer_write('  "head": {},\n')
            buffer_write('  "boolean": %s' % ('true' if boolean else 'false'))
        elif sparql_results_type == 'resultset':
            buffer_write('  "head": {\n')
            buffer_write('    "vars": [ %s ]\n' % ', '.join(json_dumps(field) for field in fields))
            buffer_write('  },\n')
            buffer_write('  "results": {\n')
            buffer_write('    "bindings": [\n')
            for i, binding in enumerate(bindings):
                buffer_write('      {' if i == 0 else ',\n      {')
                j = 0
                for field in fields:
                    value = binding.get(field)
                    if value is None:
                        continue
                    buffer_write(',\n        ' if j > 0 else '\n        ')
                    json_dump(field, buffer)
                    if isinstance(value, rdflib.URIRef):
                        buffer_write(': { "type": "uri"')
                    elif isinstance(value, rdflib.BNode):
                        buffer_write(': { "type": "bnode"')
                    elif value.datatype is not None:
                        buffer_write(': { "type": "typed-literal", "datatype": ')
                        json_dump(value.datatype, buffer)
                    elif value.language is not None:
                        buffer_write(': { "type": "literal", "xml:lang": ')
                        json_dump(value.language, buffer)
                    else:
                        buffer_write(': { "type": "literal"')
                    buffer_write(', "value": ')
                    json_dump(value, buffer)
                    buffer_write(' }')

                    j += 1

                buffer_write('\n      }')
            buffer_write('\n    ]')
            buffer_write('\n  }')


            if buffer.tell() > 65000: # Almost 64k
                yield buffer.getvalue()
                buffer.seek(0)
                buffer.truncate()

        buffer_write('\n}')
        yield buffer.getvalue()
        buffer.close()
def get_build_info():
    """Returns a string containing the build info."""
    global __build_info__
    if __build_info__ is not None:
        return __build_info__

    build_info_buffer = StringIO()
    original_dir = os.getcwd()

    try:
        # We need to execute the git command in the source root.
        os.chdir(__source_root__)
        # Add in the e-mail address of the user building it.
        (_, packager_email) = run_command('git config user.email', exit_on_fail=True, command_name='git')
        print >>build_info_buffer, 'Packaged by: %s' % packager_email.strip()

        # Determine the last commit from the log.
        (_, commit_id) = run_command('git log --summary -1 | head -n 1 | cut -d \' \' -f 2',
                                     exit_on_fail=True, command_name='git')
        print >>build_info_buffer, 'Latest commit: %s' % commit_id.strip()

        # Include the branch just for safety sake.
        (_, branch) = run_command('git branch | cut -d \' \' -f 2', exit_on_fail=True, command_name='git')
        print >>build_info_buffer, 'From branch: %s' % branch.strip()

        # Add a timestamp.
        print >>build_info_buffer, 'Build time: %s' % strftime("%Y-%m-%d %H:%M:%S UTC", gmtime())

        __build_info__ = build_info_buffer.getvalue()
        return __build_info__
    finally:
        os.chdir(original_dir)

        if build_info_buffer is not None:
            build_info_buffer.close()
Example #25
0
def test():
    import sys
    base = ''
    if sys.argv[1:]:
        fn = sys.argv[1]
        if fn == '-':
            fp = sys.stdin
        else:
            fp = open(fn)
    else:
        try:
            from cStringIO import StringIO
        except ImportError:
            from StringIO import StringIO
        fp = StringIO(test_input)
    while 1:
        line = fp.readline()
        if not line: break
        words = line.split()
        if not words:
            continue
        url = words[0]
        parts = urlparse(url)
        print '%-10s : %s' % (url, parts)
        abs = urljoin(base, url)
        if not base:
            base = abs
        wrapped = '<URL:%s>' % abs
        print '%-10s = %s' % (url, wrapped)
        if len(words) == 3 and words[1] == '=':
            if wrapped != words[2]:
                print 'EXPECTED', words[2], '!!!!!!!!!!'
    def test_exit_error_on_parser_error(self):
        # Command.process should produce an error message and exit on parser
        # errors, if invoked to execute. Same error message as expected_pattern
        # defined above

        expected_pattern = re.compile(
            '\r\n' \
            'ERROR\r\n' \
            '"ValueError at ""' + \
            os.path.abspath(os.path.join(os.path.dirname(__file__),
                                         "../../splunklib/searchcommands/search_command_internals.py")) + \
            '"", line \d\d\d : ' \
            'Unrecognized option: undefined_option = value"\r\n'
        )

        command = SearchCommand()
        result = StringIO()

        try:
            command.process(
                args=['foo.py', '__EXECUTE__', 'undefined_option=value'],
                input_file=StringIO('\r\n'), output_file=result)
        except SystemExit as e:
            result.reset()
            observed = result.read()
            self.assertNotEqual(e.code, 0)
            self.assertTrue(expected_pattern.match(observed))
        except BaseException as e:
            self.fail("Expected SystemExit, but caught %s" % type(e))
        else:
            self.fail("Expected SystemExit, but no exception was raised")
Example #27
0
def test_latex_units():
    """
    Check to make sure that Latex and AASTex writers attempt to fall
    back on the **unit** attribute of **Column** if the supplied
    **latexdict** does not specify units.
    """
    t = table.Table([table.Column(name='date', data=['a','b']),
               table.Column(name='NUV exp.time', data=[1,2])])
    latexdict = copy.deepcopy(ascii.latexdicts['AA'])
    latexdict['units'] = {'NUV exp.time':'s'}
    out = StringIO()
    expected = '''\
\\begin{table}{cc}
\\tablehead{\\colhead{date} & \\colhead{NUV exp.time}\\\\ \\colhead{ } & \\colhead{s}}
\\startdata
a & 1 \\\\
b & 2 \\\\
\\enddata
\\end{table}
'''
    ascii.write(t, out, format='aastex', latexdict=latexdict)
    assert out.getvalue() == expected
    # use unit attribute instead
    t['NUV exp.time'].unit = units.s
    t['date'].unit = units.yr
    out = StringIO()
    ascii.write(t, out, format='aastex', latexdict=ascii.latexdicts['AA'])
    assert out.getvalue() == expected.replace(
        'colhead{s}', 'colhead{$\mathrm{s}$}').replace(
        'colhead{ }', 'colhead{$\mathrm{yr}$}')
    def test_error_when_getinfo_false(self):

        # Command.process should complain if supports_getinfo == False
        # We support dynamic configuration, not static

        # The exception line number may change, so we're using a regex match instead
        expected_pattern = re.compile(
            '\r\n'
            'ERROR' \
            '\r\n' \
            '"NotImplementedError at ' \
            '\"\"' + \
            os.path.abspath(os.path.join(os.path.dirname(__file__),
                                         "../../splunklib/searchcommands/search_command.py")) + \
            '\"\"' \
            ', line \d\d\d : ' \
            'Command search appears to be statically configured and static configuration is unsupported by splunklib.searchcommands. Please ensure that default/commands.conf contains this stanza:\n\[search\]\nfilename = foo.py\nsupports_getinfo = true\nsupports_rawargs = true\noutputheader = true"' \
            '\r\n'
        )

        command = SearchCommand()
        result = StringIO()

        self.assertRaises(
            SystemExit, command.process, ['foo.py'], output_file=result)

        result.reset()
        observed = result.read()
        self.assertTrue(expected_pattern.match(observed))
Example #29
0
 def download_tweets_csv(self, request, queryset):
     f = StringIO()
     w = unicodecsv.writer(f, encoding='utf-8')
     for tweet in queryset:
         w.writerow((
             tweet['data']['id'],
             tweet['data']['text'],
             tweet['data']['timestamp'],
             tweet['data']['retweet_count'],
             tweet['data']['favorite_count'],
             tweet['data']['in_reply_to_status_id'],
             tweet['data']['in_reply_to_user_id'],
             tweet['data']['retweeted_status_id'],
             tweet['data']['coords'],
             tweet['data']['user']['screen_name'],
             tweet['data']['user']['id'],
             tweet['data']['user']['name'],
         ))
     f.seek(0)
     response = HttpResponse(
         f.read(),
         content_type='text/csv'
     )
     response['Content-Disposition'] = 'attachment;filename=export.csv'
     return response
Example #30
0
    def test_sending_crap_ujson(self):
        test_dir = self._get_dir()
        os.chdir(os.path.dirname(__file__))

        data = StringIO()
        filepath = 'test_here.py'
        zf = zipfile.ZipFile(data, "w", compression=zipfile.ZIP_DEFLATED)
        info = zipfile.ZipInfo('test_here.py')
        info.external_attr = os.stat(filepath).st_mode << 16L

        with open(filepath) as f:
            zf.writestr(info, f.read())

        zf.close()
        data = data.getvalue()

        args = get_runner_args(
            fqn='test_here.TestWebSite.test_something',
            agents=1,
            users=1,
            hits=1,
            test_dir=test_dir,
            include_file=['test_here.py'])

        args['crap'] = data
        self.assertRaises(ValueError, start_runner, args)