예제 #1
0
    def test_deqatn_8_skip(self):
        """
        based off nast/tpl/ptdmi1.dat

        What's going on with the last line?
        """
        model = BDF(debug=None)
        card = [
            'deqatn  2       f(x,y,z)= 1.;',
            '        L=1+2+3+',
            '        + 4/min(1,2);',
            '        b= 4.;',
            '        h= 2.;',
            '        t1= 200.;',
            '        t2= 300.;',
            '        t=t1*(L-x)/L+t2*x/L;',
            '        +4'
        ]
        model.add_card(card, 'DEQATN', is_list=False)
        model.cross_reference()

        bdf_file = StringIO()
        model.write_bdf(bdf_file, close=False)
        bdf_file.getvalue()
        bdf_file.close()
예제 #2
0
    def test_deqatn_10(self):
        """
        based off nast/tpl/ptdmi1.dat

        def f(x, y, z):
            f = 1.
            l = x+y
            return l
        """
        model = BDF(debug=None)
        card = [
            'deqatn  2       f(x,y,z)= 1.;',
            '        L=x+y',
        ]
        model.add_card(card, 'DEQATN', is_list=False)
        model.cross_reference()

        bdf_file = StringIO()
        model.write_bdf(bdf_file, close=False)
        bdf_file.getvalue()
        bdf_file.close()

        eq = model.dequations[2]
        x = np.ones(10, dtype='float32')
        y = 2. * np.ones(10, dtype='float32')
        z = 3. * np.ones(10, dtype='float32')
        #print(model.dequations[2].func_str)
        #z = ones(10, dtype='float32')
        #out = eq.func(x, y, z)
        #out = eq.func(x, y, z)
        assert np.array_equal(eq.func(x, y, z), z)
        assert eq.func(1.0, 2.0, 1.0) == 3.0
예제 #3
0
    def render(self):
        """This is the tricky part, whith the rendered_content create a PDF"""

        # The following is required for PDF generation

        import xhtml2pdf.pisa as pisa # The import is changed to xhtml2pdf

        if not self._is_rendered:

            # File pointer needed to create the PDF in memory
            buffer = StringIO()

            # Create the PDF object, using the StringIO object as its "file."
            pisa.CreatePDF(self.rendered_content, buffer,
                           link_callback=fetch_resources)

            # Get the value of the StringIO buffer and write it to the response.
            pdf = buffer.getvalue()
            buffer.close()
            self.write(pdf)

            # Sets the appropriate PDF headers.
            self['Content-Disposition'] = 'attachment; filename=%s' % (
                self.filename, )

            # The PDF has been rendered
            self._is_rendered = True

            for post_callback in self._post_render_callbacks:
                post_callback(self)
        return self
예제 #4
0
    def test_deqatn_7(self):
        """
        per nast/tpl/ptdmi1.dat

        def f(x, y, z):
            f = 1.
            l = 1+2+3++4/min(1,2)
            b = 4.
            h = 2.
            t1 = 200.
            t2 = 300.
            t = t1*(l-x)/l+t2*x/l
            return t
        """
        model = BDF(debug=None)
        card = [
            'deqatn  2       f(x,y,z)= 1.;',
            '        L=1+2+3+',
            '        + 4/min(1,2);',
            '        b= 4.;',
            '        h= 2.;',
            '        t1= 200.;',
            '        t2= 300.;',
            '        t=t1*(L-x)/L+t2*x/L',
        ]
        model.add_card(card, 'DEQATN', is_list=False)
        model.cross_reference()
        #print(model.dequations[2].func_str)

        bdf_file = StringIO()
        model.write_bdf(bdf_file, close=False)
        bdf_file.getvalue()
        bdf_file.close()
예제 #5
0
    def test_deqatn_10(self):
        """
        per nast/tpl/ptdmi1.dat
        """
        model = BDF(debug=None)
        model.cards_to_read.add('DEQATN')
        model.test_deqatn = True
        card = [
            'deqatn  2       f(x,y,z)= 1.;',
            '        L=x+y',
        ]
        model.add_card(card, 'DEQATN', is_list=False)
        model.cross_reference()

        s = StringIO()
        model.write_bdf(s, close=False)
        s.getvalue()
        s.close()
        eq = model.dequations[2]
        x = zeros(10., dtype='float32')
        y = zeros(11., dtype='float32')
        z = zeros(12., dtype='float32')
        #out = eq.func(x, y, z)
        out = eq.func(1.0, 2.0)
        print(out)
예제 #6
0
    def render(self):
        """This is the tricky part, whith the rendered_content create a CSV"""


        if not self._is_rendered:

            # File pointer needed to create the CSV in memory
            buffer = StringIO()
            writer = UnicodeWriter(buffer)

            for row in self.rows:
                writer.writerow([six.text_type(value) for value
                                 in row])

            # Get the value of the StringIO buffer and write it to the response.
            csv = buffer.getvalue()
            buffer.close()
            self.write(csv)

            # Sets the appropriate CSV headers.
            self['Content-Disposition'] = 'attachment; filename=%s' % (
                self.filename, )

            # The CSV has been generated
            self._is_rendered = True

            for post_callback in self._post_render_callbacks:
                post_callback(self)
        return self
예제 #7
0
    def test_deqatn_10(self):
        """
        per nast/tpl/ptdmi1.dat
        """
        model = BDF(debug=None)
        model.cards_to_read.add('DEQATN')
        model.test_deqatn = True
        card = [
            'deqatn  2       f(x,y,z)= 1.;',
            '        L=x+y',
        ]
        model.add_card(card, 'DEQATN', is_list=False)
        model.cross_reference()

        s = StringIO()
        model.write_bdf(s, close=False)
        s.getvalue()
        s.close()
        eq = model.dequations[2]
        x = zeros(10., dtype='float32')
        y = zeros(11., dtype='float32')
        z = zeros(12., dtype='float32')
        #out = eq.func(x, y, z)
        out = eq.func(1.0, 2.0)
        print(out)
예제 #8
0
    def test_deqatn_9(self):
        """
        per nast/tpl/ptdmi1.dat
        """
        model = BDF(debug=None)
        model.cards_to_read.add('DEQATN')
        model.test_deqatn = True
        card = [
            'deqatn  2       f(x,y,z)= 1.;',
            '        L=1+2+3+',
            '        + 4/min(1,2);',
            '        b= 4.;',
            '        h= 2.;',
            '        t1= 200.;',
            '        t2= 300.;',
            '        t=t1*(L-x)/L+t2*x/L',
            '        +4'
        ]
        model.add_card(card, 'DEQATN', is_list=False)
        model.cross_reference()

        s = StringIO()
        model.write_bdf(s, close=False)
        s.getvalue()
        s.close()
예제 #9
0
    def render_and_save_variation(self, name, content, variation):
        """
        Renders the image variations and saves them to the storage
        """
        content.seek(0)

        img = Image.open(content)

        if self.is_smaller(img, variation):
            factor = 1
            while (img.size[0] / factor > 2 * variation['width'] and
                                   img.size[1] * 2 / factor > 2 * variation['height']):
                factor *= 2
            if factor > 1:
                img.thumbnail((int(img.size[0] / factor),
                               int(img.size[1] / factor)), resample=resample)

            if variation['crop']:
                img = ImageOps.fit(img, (variation['width'], variation['height']), method=resample)
            else:
                img.thumbnail((variation['width'], variation['height']), resample=resample)
        variation_name = self.get_variation_name(self.instance, self.field, variation)
        file_buffer = StringIO()
        format = self.get_file_extension(name).lower().replace('jpg', 'jpeg')
        img.save(file_buffer, format)
        self.storage.save(variation_name, ContentFile(file_buffer.getvalue()))
        file_buffer.close()
예제 #10
0
파일: facade.py 프로젝트: wilsonc86/ledger
def generateParserSummary(files):
    valid = files['valid']
    other = files['other']
    failed = files['failed']
    processed = files['processed']

    output = StringIO()
    output.write('Successful Files with transactions:\n')
    # Successful Files
    for n,t in valid:
        output.write('  File Name: {}\n'.format(n))
        output.write('    Transactions:\n')
        for trans in t.transactions.all():
            output.write('      CRN: {}\n'.format(trans.crn))
    # Successful Files without transactions
    output.write('\nSuccessful Files without transactions:\n')
    for n,t in other:
        output.write('  File Name: {}\n'.format(n))
    # Failed files
    output.write('\nFailed Files:\n')
    for n,r in failed:
        output.write('  File Name: {}\n'.format(n))
        output.write('    Reason: {}\n'.format(r))
    # Already processed Files
    output.write('\nFiles previously processed:\n')
    for n,t in processed:
        output.write('  File Name: {}\n'.format(n))

    contents = output.getvalue()
    output.close()
    return contents
예제 #11
0
def dumps(collection, **json_args):
    """
    Dump a collection of JSON objects into a string.  Primarily included to
    match the `json` library's functionality.  This may be more appropriate:

        >>> os.linesep.join(list(map(json.dumps, collection))

    Parameters
    ----------
    collection : iter
        Iterable that produces one JSON object per iteration.
    json_args : **json_args, optional
        Additional keyword arguments for `NLJWriter()`.

    Returns
    -------
    str
    """

    f = StringIO()  # No __exit__ in older Python
    try:
        with NLJWriter(f, 'w', **json_args) as dst:
            for item in collection:
                dst.write(item)
            f.seek(0)
            return f.read()
    finally:
        f.close()
예제 #12
0
class CaptureStdOut(object):
    """
    An logger that both prints to stdout and writes to file.
    """
    def __init__(self, log_file_path=None, print_to_console=True):
        """
        :param log_file_path: The path to save the records, or None if you just want to keep it in memory
        :param print_to_console:
        """
        self._print_to_console = print_to_console
        if log_file_path is not None:
            # self._log_file_path = os.path.join(base_dir, log_file_path.replace('%T', now))
            make_file_dir(log_file_path)
            self.log = open(log_file_path, 'w')
        else:
            self.log = StringIO()
        self._log_file_path = log_file_path
        self.old_stdout = _ORIGINAL_STDOUT

    def __enter__(self):

        self.old_stdout = sys.stdout
        self.old_stderr = sys.stderr

        sys.stdout = self
        sys.stderr = self
        return self

    def __exit__(self, exc_type, exc_val, exc_tb):
        sys.stdout.flush()
        sys.stderr.flush()
        sys.stdout = self.old_stdout
        sys.stderr = self.old_stderr
        self.close()

    def get_log_file_path(self):
        assert self._log_file_path is not None, "You never specified a path when you created this logger, so don't come back and ask for one now"
        return self._log_file_path

    def write(self, message):
        if self._print_to_console:
            self.old_stdout.write(message)
        self.log.write(message)
        self.log.flush()

    def close(self):
        if self._log_file_path is not None:
            self.log.close()

    def read(self):
        if self._log_file_path is None:
            return self.log.getvalue()
        else:
            with open(self._log_file_path) as f:
                txt = f.read()
            return txt

    def __getattr__(self, item):
        return getattr(self.old_stdout, item)
예제 #13
0
def stdout():
    """Capture output to STDOUT."""
    old = sys.stdout
    sio = StringIO()
    sys.stdout = sio
    yield sio
    sio.close()
    sys.stdout = old
예제 #14
0
 def get(cls, year, month):
     try:
         import ssl
         try:
             _create_unverified_https_context = ssl._create_unverified_context
         except AttributeError:
             pass
         else:
             ssl._create_default_https_context = _create_unverified_https_context
     except ImportError:
         pass
     try:
         cls.down_lock.acquire()
         import csv
         import httplib
         key = '%(year)s%(month)02i' % locals()
         conn = None
         if key in cls._CACHE:
             logger.debug('Using CACHE for REEProfile {0}'.format(key))
             return cls._CACHE[key]
         perff_file = 'PERFF_%(key)s.gz' % locals()
         conn = httplib.HTTPSConnection(cls.HOST)
         conn.request('GET', '%s/%s' % (cls.PATH, perff_file))
         logger.debug('Downloading REEProfile from {0}/{1}'.format(
             cls.PATH, perff_file))
         r = conn.getresponse()
         if r.msg.type == 'application/x-gzip':
             import gzip
             c = StringIO(r.read())
             m = StringIO(gzip.GzipFile(fileobj=c).read())
             c.close()
             reader = csv.reader(m, delimiter=';')
             header = True
             cofs = []
             coeffs_list = get_tariff_coeffs_list(year, month)
             for vals in reader:
                 if header:
                     header = False
                     continue
                 if int(vals[3]) == 1:
                     n_hour = 1
                 dt = datetime(int(vals[0]), int(vals[1]), int(vals[2]))
                 day = TIMEZONE.localize(dt, is_dst=bool(not int(vals[4])))
                 day += timedelta(hours=n_hour)
                 n_hour += 1
                 cofs.append(
                     Coefficent(
                         TIMEZONE.normalize(day),
                         dict((k, float(vals[i]))
                              for i, k in enumerate(coeffs_list, 5))))
             cls._CACHE[key] = cofs
             return cofs
         else:
             raise Exception('Profiles from REE not found')
     finally:
         if conn is not None:
             conn.close()
         cls.down_lock.release()
예제 #15
0
 def test_deqatn_6(self):
     func_str  = 'def f(x, y, z):\n'
     func_str += '    c = 3\n'
     func_str += '    return x + y + z + c\n'
     #func = exec(fnc_str)
     s = StringIO()
     s.write(s)
     s.close()
     exec (func_str)
     f(1, 2, 3)
     #func = exec func_str
     assert f(1, 2, 3) == 9, func(1, 2, 3)
예제 #16
0
 def test_deqatn_6(self):
     func_str = 'def f(x, y, z):\n'
     func_str += '    c = 3\n'
     func_str += '    return x + y + z + c\n'
     #func = exec(fnc_str)
     s = StringIO()
     s.write(s)
     s.close()
     exec(func_str)
     f(1, 2, 3)
     #func = exec func_str
     assert f(1, 2, 3) == 9, func(1, 2, 3)
예제 #17
0
    def test_deqatn_3(self):
        model = BDF(debug=None)
        model.cards_to_read.add('DEQATN')
        model.test_deqatn = True
        card = ['DEQATN  1000', '        MAXDIFF(t1,t2)=abs(t2-t1)/t1']
        model.add_card(card, 'DEQATN', is_list=False)
        model.cross_reference()

        s = StringIO()
        model.write_bdf(s, close=False)
        s.getvalue()
        #print(s.getvalue())
        s.close()
예제 #18
0
파일: facade.py 프로젝트: wilsonc86/ledger
def generateTransactionsSummary(files,unmatched_only=False):
    try:
        # Split transactions into biller codes
        biller_codes = {}
        biller_code_emails = {}
        if unmatched_only:
            for f in files:
                for t in f.transactions.all():
                    if t.biller_code in biller_codes:
                        txns = list(biller_codes[t.biller_code])
                        txns.append(t)
                        biller_codes[t.biller_code] = txns
                    else:
                        biller_codes[t.biller_code] = [t]
        else:
            for n, f in files:
                for t in f.transactions.all():
                    if t.biller_code in biller_codes:
                        txns = list(biller_codes[t.biller_code])
                        txns.append(t)
                        biller_codes[t.biller_code] = txns
                    else:
                        biller_codes[t.biller_code] = [t]
        # Generate summaries per biller code
        for k,v in biller_codes.items():
            matched = []
            unmatched = []
            for t in v:
                if t.matched:
                    matched.append(t)
                else:
                    unmatched.append(t)
            output = StringIO()
            if not unmatched_only:
                # Matched txns
                output.write('Matched transactions:\n')
                for m in matched:
                    output.write('  CRN: {} Amount: ${}\n'.format(m.crn,m.amount))
            # Unmatched txns
            output.write('\nUnmatched transactions:\n')
            for u in unmatched:
                output.write('  CRN: {} Amount: ${}\n'.format(u.crn,u.amount))
            
            contents = output.getvalue()
            output.close()
            # Add the biller code email
            biller_code_emails[k] = contents
        return biller_code_emails
    except Exception as e:
        traceback.print_exc(e)
        raise
예제 #19
0
    def test_deqatn_3(self):
        model = BDF(debug=None)
        model.cards_to_read.add('DEQATN')
        model.test_deqatn = True
        card = ['DEQATN  1000',
                '        MAXDIFF(t1,t2)=abs(t2-t1)/t1']
        model.add_card(card, 'DEQATN', is_list=False)
        model.cross_reference()

        s = StringIO()
        model.write_bdf(s, close=False)
        s.getvalue()
        #print(s.getvalue())
        s.close()
예제 #20
0
def format_lp(nodes, constraints_x, qa, constraints_y, qb):
    """
    Maximize
     4 x1 + 2 x2 + 3 x3 + x4
    Subject To
     x1 + x2 <= 1
    End
    """
    lp_handle = StringIO()

    lp_handle.write("Maximize\n ")
    records = 0
    for i, score in nodes:
        lp_handle.write("+ %d x%d " % (score, i))
        # SCIP does not like really long string per row
        records += 1
        if records % 10 == 0:
            lp_handle.write("\n")
    lp_handle.write("\n")

    num_of_constraints = 0
    lp_handle.write("Subject To\n")
    for c in constraints_x:
        additions = " + ".join("x%d" % (x + 1) for x in c)
        lp_handle.write(" %s <= %d\n" % (additions, qa))
    num_of_constraints += len(constraints_x)

    # non-self
    if not (constraints_x is constraints_y):
        for c in constraints_y:
            additions = " + ".join("x%d" % (x + 1) for x in c)
            lp_handle.write(" %s <= %d\n" % (additions, qb))
        num_of_constraints += len(constraints_y)

    print(
        "number of variables (%d), number of constraints (%d)"
        % (len(nodes), num_of_constraints),
        file=sys.stderr,
    )

    lp_handle.write("Binary\n")
    for i, score in nodes:
        lp_handle.write(" x%d\n" % i)

    lp_handle.write("End\n")

    lp_data = lp_handle.getvalue()
    lp_handle.close()

    return lp_data
예제 #21
0
class File(object):
    def __init__(self, path, mode=None):
        if path in files:
            self.fp = StringIO(files[path])
        else:
            self.fp = StringIO(files[os.path.split(path)[-1]])

    def __enter__(self):
        return self.fp

    def __exit__(self, *args):
        return

    def close(self, *args, **kwargs):
        self.fp.close()
예제 #22
0
class File(object):
    def __init__(self, path, mode=None):
        if path in files:
            self.fp = StringIO(files[path])
        else:
            self.fp = StringIO(files[os.path.split(path)[-1]])

    def __enter__(self):
        return self.fp

    def __exit__(self, *args):
        return

    def close(self, *args, **kwargs):
        self.fp.close()
 def test_run_multiple_times_with_different_stdout_and_stderr(self):
     stdout, stderr = StringIO(), StringIO()
     self._run(stdout, stderr)
     self._assert_normal_stdout_stderr_are_empty()
     self._assert_output(stdout, [('My Suite', 2), ('My Test', 1)])
     self._assert_output(stderr, [('Hello, world!', 1)])
     stdout.close(); stderr.close()
     output = StringIO()
     self._run(output, output, variable='MESSAGE:Hi, again!')
     self._assert_normal_stdout_stderr_are_empty()
     self._assert_output(output, [('My Suite', 2), ('My Test', 1),
                                  ('Hi, again!', 1), ('Hello, world!', 0)])
     output.close()
     self._run(variable='MESSAGE:Last hi!')
     self._assert_output(sys.__stdout__, [('My Suite', 2), ('My Test', 1)])
     self._assert_output(sys.__stderr__, [('Last hi!', 1), ('Hello, world!', 0)])
예제 #24
0
class TMemoryBuffer(TTransportBase, CReadableTransport):
    """Wraps a cStringIO object as a TTransport.

  NOTE: Unlike the C++ version of this class, you cannot write to it
        then immediately read from it.  If you want to read from a
        TMemoryBuffer, you must either pass a string to the constructor.
  TODO(dreiss): Make this work like the C++ version.
  """
    def __init__(self, value=None):
        """value -- a value to read from for stringio

    If value is set, this will be a transport for reading,
    otherwise, it is for writing"""
        if value is not None:
            self._buffer = StringIO(value)
        else:
            self._buffer = StringIO()

    def isOpen(self):
        return not self._buffer.closed

    def open(self):
        pass

    def close(self):
        self._buffer.close()

    def read(self, sz):
        return self._buffer.read(sz)

    def write(self, buf):
        self._buffer.write(buf)

    def flush(self):
        pass

    def getvalue(self):
        return self._buffer.getvalue()

    # Implement the CReadableTransport interface.
    @property
    def cstringio_buf(self):
        return self._buffer

    def cstringio_refill(self, partialread, reqlen):
        # only one shot at reading...
        raise EOFError()
예제 #25
0
class StdoutCapture(object):
    def __init__(self):
        self.captured = StringIO()

    def start(self):
        sys.stdout = self.captured
        return self

    def stop(self):
        sys.stdout = sys.__stdout__
        return self

    def value(self):
        self.captured.flush()
        return self.captured.getvalue()

    def close(self):
        self.captured.close()
예제 #26
0
파일: utils.py 프로젝트: oinume/tomahawk
class StdoutCapture(object):
    def __init__(self):
        self.captured = StringIO()

    def start(self):
        sys.stdout = self.captured
        return self

    def stop(self):
        sys.stdout = sys.__stdout__
        return self

    def value(self):
        self.captured.flush()
        return self.captured.getvalue()

    def close(self):
        self.captured.close()
예제 #27
0
def pydot_to_image(pydot_graph):
    """
    References:
        http://stackoverflow.com/questions/4596962/display-graph-without-saving-using-pydot
    """
    from PIL import Image
    from six.moves import StringIO
    #from cStringIO import StringIO
    png_str = pydot_graph.create_png(prog='dot')
    sio = StringIO()
    sio.write(png_str)
    sio.seek(0)
    pil_img = Image.open(sio)
    img = np.asarray(pil_img.convert('RGB'))
    img = img[..., ::-1]  # to bgr
    pil_img.close()
    sio.close()
    return img
예제 #28
0
    def test_deqatn_9(self):
        """
        based off nast/tpl/ptdmi1.dat
        """
        model = BDF(debug=None)
        card = [
            'deqatn  2       f(x,y,z)= 1.;', '        L=1+2+3+',
            '        + 4/min(1,2);', '        b= 4.;', '        h= 2.;',
            '        t1= 200.;', '        t2= 300.;',
            '        t=t1*(L-x)/L+t2*x/L', '        +4'
        ]
        model.add_card(card, 'DEQATN', is_list=False)
        model.cross_reference()

        s = StringIO()
        model.write_bdf(s, close=False)
        s.getvalue()
        s.close()
 def test_run_multiple_times_with_different_stdout_and_stderr(self):
     stdout, stderr = StringIO(), StringIO()
     self._run(stdout, stderr)
     self._assert_normal_stdout_stderr_are_empty()
     self._assert_output(stdout, [('My Suite', 2), ('My Test', 1)])
     self._assert_output(stderr, [('Hello, world!', 1)])
     stdout.close()
     stderr.close()
     output = StringIO()
     self._run(output, output, variable='MESSAGE:Hi, again!')
     self._assert_normal_stdout_stderr_are_empty()
     self._assert_output(output, [('My Suite', 2), ('My Test', 1),
                                  ('Hi, again!', 1), ('Hello, world!', 0)])
     output.close()
     self._run(variable='MESSAGE:Last hi!')
     self._assert_output(sys.__stdout__, [('My Suite', 2), ('My Test', 1)])
     self._assert_output(sys.__stderr__, [('Last hi!', 1),
                                          ('Hello, world!', 0)])
예제 #30
0
    def run(self, data, delimiter=',', quote_char='"'):
        if len(data) == 0:
            raise InvalidActionParameterException("data has no rows")
        if not isinstance(data, list):
            raise InvalidActionParameterException("data must be a list")
        if not isinstance(data[0], dict):
            raise InvalidActionParameterException(
                "data must be a list of dict")

        fieldnames = data[0].keys()
        sh = StringIO()
        writer = csv.DictWriter(sh, fieldnames=fieldnames)

        writer.writeheader()
        for row in data:
            writer.writerow(row)

        out = sh.getvalue()
        sh.close()
        return out
예제 #31
0
def ssh_sign(private_key, message):
    """
    Sign a string message using the private key.

    :param private_key: The SSH RSA private key as a string.
    :param message: The message to sign as a string.
    :return: Base64 encoded signature as a string.
    """
    if PY3 and isinstance(message, str):
        message = message.encode()
    if not EXTRA_MODULES['paramiko']:
        raise MissingModuleException('The paramiko module is not installed or faulty.')
    sio_private_key = StringIO(private_key)
    priv_k = RSAKey.from_private_key(sio_private_key)
    sio_private_key.close()
    signature_stream = priv_k.sign_ssh_data(message)
    signature_stream.rewind()
    base64_encoded = base64.b64encode(signature_stream.get_remainder())
    if PY3:
        base64_encoded = base64_encoded.decode()
    return base64_encoded
예제 #32
0
    def test_deqatn_3(self):
        """
        Much simplier method of using add_card

        Creates the following equation:

        def maxdiff(t1,t2):
            maxdiff = abs(t2-t1)/t1
            return maxdiff
        """
        model = BDF(debug=None)
        card = ['DEQATN  1000', '        MAXDIFF(t1,t2)=abs(t2-t1)/t1']
        model.add_card(card, 'DEQATN', is_list=False)
        model.cross_reference()

        #print(model.dequations[1000].func_str)
        bdf_file = StringIO()
        model.write_bdf(bdf_file, close=False)
        bdf_file.getvalue()
        #print(bdf_file.getvalue())
        bdf_file.close()
class ClosableOutput(object):

    encoding = None

    def __init__(self, path):
        self._output = StringIO()
        self._path = path

    def __enter__(self):
        return self

    def __exit__(self, *args):
        self.close()

    def write(self, data):
        self._output.write(data)

    def close(self):
        self.value = self._output.getvalue()
        self._output.close()

    def __str__(self):
        return self._path
예제 #34
0
    def test_deqatn_4(self):
        """
        per nast/tpl/ptdmi1.dat
        """
        model = BDF(debug=None)
        #model.cards_to_read.add('DEQATN')
        model.test_deqatn = True
        card = [
            'deqatn  2       f(x,y,z)= 1.;',
            '        l=10.;',
            '        b= 4.;',
            '        h= 2.;',
            '        t1= 200.;',
            '        t2= 300.;',
            '        t=t1*(l-x)/l+t2*(x)/l',
        ]
        model.add_card(card, 'DEQATN', is_list=False)
        model.cross_reference()

        s = StringIO()
        model.write_bdf(s, close=False)
        s.getvalue()
        s.close()
예제 #35
0
class ClosableOutput(object):

    encoding = None

    def __init__(self, path):
        self._output = StringIO()
        self._path = path

    def __enter__(self):
        return self

    def __exit__(self, *args):
        self.close()

    def write(self, data):
        self._output.write(data)

    def close(self):
        self.value = self._output.getvalue()
        self._output.close()

    def __str__(self):
        return self._path
예제 #36
0
    def test_deqatn_4(self):
        """
        per nast/tpl/ptdmi1.dat
        """
        model = BDF(debug=None)
        #model.cards_to_read.add('DEQATN')
        model.test_deqatn = True
        card = [
            'deqatn  2       f(x,y,z)= 1.;',
            '        l=10.;',
            '        b= 4.;',
            '        h= 2.;',
            '        t1= 200.;',
            '        t2= 300.;',
            '        t=t1*(l-x)/l+t2*(x)/l',
         ]
        model.add_card(card, 'DEQATN', is_list=False)
        model.cross_reference()

        s = StringIO()
        model.write_bdf(s, close=False)
        s.getvalue()
        s.close()
예제 #37
0
파일: display.py 프로젝트: QUVA-Lab/artemis
class CaptureStdOut(object):
    """
    An logger that both prints to stdout and writes to file.
    """

    def __init__(self, log_file_path = None, print_to_console = True, prefix = None):
        """
        :param log_file_path: The path to save the records, or None if you just want to keep it in memory
        :param print_to_console:
        """
        self._print_to_console = print_to_console
        if log_file_path is not None:
            # self._log_file_path = os.path.join(base_dir, log_file_path.replace('%T', now))
            make_file_dir(log_file_path)
            self.log = open(log_file_path, 'w')
        else:
            self.log = StringIO()
        self._log_file_path = log_file_path
        self.old_stdout = _ORIGINAL_STDOUT
        self.prefix = None if prefix is None else prefix

    def __enter__(self):

        self.old_stdout = sys.stdout
        self.old_stderr = sys.stderr

        sys.stdout = self
        sys.stderr = self
        return self

    def __exit__(self, exc_type, exc_val, exc_tb):
        sys.stdout.flush()
        sys.stderr.flush()
        sys.stdout = self.old_stdout
        sys.stderr = self.old_stderr
        self.close()

    def get_log_file_path(self):
        assert self._log_file_path is not None, "You never specified a path when you created this logger, so don't come back and ask for one now"
        return self._log_file_path

    def write(self, message):
        if self._print_to_console:
            self.old_stdout.write(message if self.prefix is None or message=='\n' else self.prefix+message)
        self.log.write(message)
        self.log.flush()

    def close(self):
        if self._log_file_path is not None:
            self.log.close()

    def read(self):
        if self._log_file_path is None:
            return self.log.getvalue()
        else:
            with open(self._log_file_path) as f:
                txt = f.read()
            return txt

    def __getattr__(self, item):
        return getattr(self.old_stdout, item)
예제 #38
0
 def test_StringIO_read(self):
     with open(datafiles.PSF, "r") as f:
         obj = StringIO(f)
     assert_equal(util.isstream(obj), True)
     obj.close()
예제 #39
0
class DataTable(object):
    """
    import/export for a database table with encryption support.

    parameters:
    columns = names of supported columns
    coltypes = and their Python types
    required = names of required columns or False/True for none/all
    remap = optional remapping (dict) of objfield->column name
    gpg = GPGMIME instance with default key
    dataformat = name of data format
    fileformat = csv (file), json (list), jsondict (key/value), json-file,jsondict-file
    version = the major version is required version for reading, 
        the minor version is the (backwards-compatible) version of the writer
    dialect = dialect for csv format (None=autodetect)
    ignore = write: fill missing data (except required) with '', read: ignore unknown fields

    import: open in 'r' mode, for row in table
    export: open in 'w' mode: table.write(row) 

    CSV based table file format:
    format major.minor
    field1,field2,...
    data1.1,data1.2,...
    data2.1,data2.2,...
    ...

    JSON: fields, data=[row1,row2,...]
    JSONdict: fields, data=[{key=value,...},...]
    """

    def __init__(self,columns,coltypes=None,required=True,ignore=True,remap=None,gpg=None,
        dataformat='data',fileformat='csv',version=(1,0),pretty=True,dialect=csv.excel):
        self.columns = columns # supported columns
        assert not coltypes or type(coltypes)==dict, 'invalid coltypes'
        assert fileformat in ('csv','json','jsondict','json-file','jsondict-file'), 'invalid fileformat'
        self.coltypes = coltypes # and their types
        self.required = required # required columns
        self.ignore = ignore # whether to ignore unknown fields
        if not remap: remap = {}
        self.remap = remap # optional remapping {field:objfield}
        self.gpg = gpg
        self.dialect = dialect # csv format
        self.pretty = pretty
        self.dataformat = dataformat
        self.fileformat = fileformat
        self.version = version
        self.mode = '' # r or w
        self.csv = None # csv reader/writer
        self.file = None # the file to read/write unencrypted data
        self.origfile = None # the actual input/output file
        self.encrypt = False # recipients or False
        self.sign = False # False=no, True=default_key, other=sender
        self.fields = None # fields provided by input
        self.read_columns = None # (fields to read, and to ignore)
        self.rows = None # tmp store for JSON

    def get_columns(self): return self.read_columns

    def open(self,f=None,mode='r',encrypt=False,sign=False):
        """write: encrypt = list of recipients, sign = sender or bool(default_key)
        read: encrypt = encrypted data expected, sign= expected key or True=defaultkey
        """
        from six.moves import StringIO
        from six import next, PY3, BytesIO
        self.mode,self.encrypt,self.sign = mode,encrypt,sign
        if self.required==True: self.required = self.columns
        if encrypt or sign: assert self.gpg, 'gpg not intialized'
        self.origfile = self.file = f
        assert mode in ('r','w'), 'invalid mode'
        if mode=='r':
            if sign:
                if sign==True:
                    fingerprint = self.gpg.default_key
                    if type(fingerprint) == tuple: fingerprint = fingerprint[0]
                else:
                    if type(sign) == tuple: sign = sign[0]
                    fingerprint = self.gpg.find_key(sign)
                assert fingerprint, "sender key not found"
            if self.fileformat=='csv':
                import re
                if encrypt:
                    if PY3 and isinstance(f,StringIO):
                        result = self.gpg.decrypt_str(f.getvalue())
                    else:
                        result = self.gpg.decrypt_file(f)
                    assert result.ok, "decryption failed"
                    if sign: assert result.valid and result.fingerprint==fingerprint, 'invalid signature'
                    f = StringIO(str(result))
                elif sign:
                    if PY3 and isinstance(f,StringIO):
                        result = self.gpg.verify_str(f.getvalue())
                        f = StringIO(self.gpg.without_signature(f.getvalue()))
                    else:
                        result = self.gpg.verify_file(f)
                        f.seek(0)
                        f = StringIO(self.gpg.without_signature(f.read()))
                    assert result.valid and result.fingerprint==fingerprint, 'invalid signature'
                self.file = f
                dialect = self.dialect
                if not dialect:
                    pos = f.tell()
                    dialect = csv.Sniffer().sniff(f.read(1024))
                    f.seek(pos) # rewind
                reader = csv.reader(f,dialect=dialect)
                preamble = next(reader)
                assert len(preamble), 'invalid file format'
                assert preamble[0]==self.dataformat, "file format not supported"
                preamble = re.match(r'^(\d+).(\d+)',preamble[1])
                assert int(preamble.group(2))<=self.version[0], "format version not supported"
                fields = next(reader)
                self.csv = reader
            else: # self.fileformat in ('json','jsondict','json-file','jsondict-file'):
                import json
                if self.fileformat in ('json-file','jsondict-file'):
                    self.file = f = json.load(f)
                data, encrypted, signed, result = json_decrypt(f,self.gpg)
                assert data, 'invalid input'
                if encrypt: assert encrypted==bool(encrypt), 'encryption expected'
                if sign:
                    assert signed==bool(sign), 'signature expected'
                    assert result.valid and result.fingerprint==fingerprint, 'invalid signature'
                assert 'format' in data and data['format']==self.dataformat, "file format not supported"
                assert 'version' in data and data['version'][0]<=self.version[0], "file version not supported"
                assert 'fields' in data , "fields missing"
                fields = data['fields']
                self.rows = data['data']
            columns, unknown = [], []
            for field in fields:
                if field in self.columns: columns.append(field)
                elif self.ignore: unknown.append(field)
                else: assert False, "unknown field '%s'" % field
            if self.required:
                for field in self.required:
                    assert field in columns, "missing required field '%s'" % field
            self.fields = fields
            self.read_columns = (columns,unknown)
        elif mode=='w':
            assert self.fileformat in ('json','jsondict') or self.file, 'file missing'
            if self.fileformat=='csv':
                if encrypt or sign: self.file = StringIO()
                else: self.file = f
                self.csv = csv.writer(self.file,lineterminator='\n',dialect=self.dialect)
                self.csv.writerow((self.dataformat,'%i.%i' % tuple(self.version)))
                self.csv.writerow(self.columns)
            else: # self.fileformat in ('json','jsondict'):
                self.rows = []

    def close(self):
        "close input/output. StringIO output is left open"
        if not self.mode: return
        if self.mode=='r':
            if self.fileformat=='csv' and self.encrypt: self.file.close() # close tmp buffer
            elif self.fileformat in ('json','jsondict'): return
        elif self.fileformat in ('json','jsondict','json-file','jsondict-file'):
            import json
            data = {'format':self.dataformat,'version':self.version,
                'fields':self.columns,'data':self.rows}
            if self.fileformat in ('json','jsondict'): output = self.file
            else: output = None
            if self.encrypt or self.sign:
                data, result = json_encrypt(data, self.gpg, output=output,
                     encrypt=self.encrypt,sign=self.sign)
                assert data and result,'encryption failed'
            elif not output is None:
                output.update(data)
                data = output
            if self.fileformat in ('json','jsondict'): return data
            if self.pretty:
                json.dump(data,self.file, sort_keys=True, indent=2, separators=(',', ': '))
            else:
                json.dump(data,self.file)
        elif self.encrypt or self.sign:
            from six import PY3, BytesIO, StringIO
            if PY3 and isinstance(self.file,StringIO):
                data = self.file.getvalue()
                if self.encrypt:
                    result = self.gpg.encrypt_str(data,self.encrypt,default_key=self.sign)
                else: #sign
                    result = self.gpg.sign_str(data)
            else:
                self.file.seek(0)
                if self.encrypt:
                    result = self.gpg.encrypt_file(self.file,self.encrypt,default_key=self.sign)
                else: #sign
                    result = self.gpg.sign_str(self.file)
            assert result, "encryption failed"
            self.origfile.write(str(result))
            self.file.close()
        from six.moves import cStringIO
        import sys
        if type(self.origfile) == type(cStringIO()): return
        if self.origfile == sys.stdout: return
        self.origfile.close()

    def __iter__(self):
        assert self.mode=='r', 'file not opened for reading'
        if self.fileformat=='csv': rows = self.csv
        else: rows = self.rows
        for row in rows:
            if self.fileformat=='csv':
                assert len(row) == len(self.fields),\
                 "invalid number of columns in line %i" % self.csv.line_num
            else:
                assert len(row) == len(self.fields), "invalid number of columns"
            data = {}
            for i,field in enumerate(self.fields):
                if not field in self.read_columns[0]: continue
                if self.fileformat in ('jsondict','jsondict-file'): x = row[field]
                else: x = row[i]
                if self.coltypes and field in self.coltypes:
                     x = decode_field(x,self.coltypes[field])
                ofield = self.remap.get(field,field)
                data[ofield] = x
            yield data

    def write(self,data,extra={}):
        "extra has precedence"
        assert self.mode=='w', 'file not opened for writing'
        if self.fileformat in ('jsondict','jsondict-file'): row = {}
        else: row = []
        for field in self.columns:
            x = None
            ofield = self.remap.get(field,field)
            if type(data)==dict: 
                if ofield in extra: x = extra[ofield]
                elif ofield in data: x = data[ofield]
                else: assert self.ignore or not field in self.required, "field '%s' missing"
            else:
                if ofield in extra: x = extra[ofield]
                elif hasattr(data,ofield): x = getattr(data,ofield)
                else: assert self.ignore or not field in self.required, "field '%s' missing"
            if self.coltypes and field in self.coltypes:
                x = encode_field(x,self.coltypes[field],self.fileformat)
            if self.fileformat in ('jsondict','jsondict-file'): row[field] = x
            else: row.append(x)
        if self.fileformat=='csv': self.csv.writerow(row)
        else: self.rows.append(row)
예제 #40
0
 def test_StringIO_write(self):
     obj = StringIO()
     assert_equal(util.isstream(obj), True)
     obj.close()
예제 #41
0
 def test_StringIO_read(self):
     with open(datafiles.PSF, "r") as f:
         obj = StringIO(f.read())
     assert_equal(util.isstream(obj), True)
     obj.close()
예제 #42
0
 def test_StringIO_write(self):
     obj = StringIO()
     assert_equal(util.isstream(obj), True)
     obj.close()
예제 #43
0
class BaseCommandTestCase(SimpleTestCase):
    u"""Проверка базового класса для management-команд."""
    def __exit_handler(self):
        if self.__stdout and self.__stderr:
            print_(self.__stdout.getvalue())
            print_(self.__stderr.getvalue())

    def setUp(self):
        self.__stdout = StringIO()
        self.__stderr = StringIO()

    def tearDown(self):
        self.__stdout.close()
        self.__stdout = None

        self.__stderr.close()
        self.__stderr = None

    def __check_result(self, output, errors):
        self.assertFalse(errors)

        args, kwargs = map(json.loads, output.split('\n')[:2])

        self.assertEqual(args, ['asd', '1'])

        for arg in ('verbosity', 'traceback', 'settings', 'pythonpath',
                    'test1', 'test2', 'test3'):
            self.assertIn(arg, kwargs)
        self.assertEqual(kwargs['verbosity'], 0)
        self.assertEqual(kwargs['traceback'], True)
        self.assertEqual(kwargs['test1'], 'qwe')
        self.assertTrue(kwargs['test2'])
        self.assertEqual(kwargs['test3'], 1)

    def test__run_from_argv(self):
        with _StreamReplacer(self.__stdout, self.__stderr):
            command = load_command_class('myapp', 'test_command')

            with _ExitHandler(self.__exit_handler):
                command.run_from_argv([
                    'python',
                    'test_command',
                    '-v',
                    '0',
                    '--traceback',
                    '--test1',
                    'qwe',
                    '--test2',
                    '--test3',
                    '1',
                    'asd',
                    '1',
                ])

        self.__check_result(self.__stdout.getvalue(), self.__stderr.getvalue())

    def test__call_command(self):
        with _StreamReplacer(self.__stdout, self.__stderr):
            with _ExitHandler(self.__exit_handler):
                call_command(
                    'test_command',
                    'asd',
                    '1',
                    verbosity=0,
                    traceback=True,
                    test1='qwe',
                    test2=True,
                    test3=1,
                )

        self.__check_result(self.__stdout.getvalue(), self.__stderr.getvalue())

    def test__command_line(self):
        process = subprocess.Popen(
            [
                sys.argv[0], 'test_command', '-v', '0', '--traceback',
                '--test1', 'qwe', '--test2', '--test3', '1', 'asd', '1'
            ],
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
        )
        process.wait()

        output = process.stdout.read().decode('utf-8')
        errors = process.stderr.read().decode('utf-8')

        if process.returncode != 0:
            print_(output)
            print_(errors)
        else:
            self.assertEqual(process.returncode, 0)
            self.__check_result(output, errors)
예제 #44
0
파일: data.py 프로젝트: edemocracy/ekklesia
class DataTable(object):
    """
    import/export for a database table with encryption support.

    parameters:
    columns = names of supported columns
    coltypes = and their Python types
    required = names of required columns or False/True for none/all
    remap = optional remapping (dict) of objfield->column name
    gpg = GPGMIME instance with default key
    dataformat = name of data format
    fileformat = csv (file), json (list), jsondict (key/value), json-file,jsondict-file
    version = the major version is required version for reading, 
        the minor version is the (backwards-compatible) version of the writer
    dialect = dialect for csv format (None=autodetect)
    ignore = write: fill missing data (except required) with '', read: ignore unknown fields

    import: open in 'r' mode, for row in table
    export: open in 'w' mode: table.write(row) 

    CSV based table file format:
    format major.minor
    field1,field2,...
    data1.1,data1.2,...
    data2.1,data2.2,...
    ...

    JSON: fields, data=[row1,row2,...]
    JSONdict: fields, data=[{key=value,...},...]
    """

    def __init__(self,columns=None,coltypes=None,required=True,ignore=True,remap=None,gpg=None,
        dataformat='data',fileformat='csv',version=(1,0),pretty=True,dialect=None):
        assert columns or coltypes, 'columns or coltypes must be specified'
        if columns is None: columns = list(coltypes.keys())
        self.columns = columns # supported columns
        assert not coltypes or isinstance(coltypes,dict), 'invalid coltypes'
        assert fileformat in ('csv','json','jsondict','json-file','jsondict-file'), 'invalid fileformat'
        self.coltypes = coltypes # and their types
        self.required = required # required columns
        self.ignore = ignore # whether to ignore unknown fields
        if not remap: remap = {}
        self.remap = remap # optional remapping {field:objfield}
        self.gpg = gpg
        self.dialect = dialect or csv.excel # csv format
        self.pretty = pretty
        self.dataformat = dataformat
        self.fileformat = fileformat
        self.version = version
        self.mode = '' # r or w
        self.csv = None # csv reader/writer
        self.file = None # the file to read/write unencrypted data
        self.origfile = None # the actual input/output file
        self.encrypt = False # recipients or False
        self.sign = False # False=no, True=default_key, other=sender
        self.fields = None # fields provided by input
        self.read_columns = None # (fields to read, and to ignore)
        self.rows = None # tmp store for JSON

    def get_columns(self): return self.read_columns

    def open(self,f=None,mode='r',encrypt=False,sign=False):
        """write: encrypt =bool or list of recipients, sign = sender or bool(default_key)
        read: encrypt = bool encrypted data expected, sign= expected key or True=defaultkey
        """
        from six.moves import StringIO
        from six import next, PY3, BytesIO
        self.mode,self.encrypt,self.sign = mode,encrypt,sign
        if self.required is True: self.required = self.columns
        if encrypt or sign: assert self.gpg, 'gpg not intialized'
        self.origfile = self.file = f
        assert mode in ('r','w'), 'invalid mode'
        if mode=='r':
            if sign:
                if sign is True:
                    fingerprint = self.gpg.default_key
                    if type(fingerprint) == tuple: fingerprint = fingerprint[0]
                else:
                    if type(sign) == tuple: sign = sign[0]
                    fingerprint = self.gpg.find_key(sign)
                assert fingerprint, "sender key not found"
            if self.fileformat=='csv':
                import re
                if encrypt:
                    if PY3 and isinstance(f,StringIO):
                        result = self.gpg.decrypt_str(f.getvalue())
                    else:
                        result = self.gpg.decrypt_file(f)
                    assert result.ok, "decryption failed"
                    if sign: assert result.valid and result.fingerprint==fingerprint, 'invalid signature'
                    f = StringIO(str(result))
                elif sign:
                    if PY3 and isinstance(f,StringIO):
                        result = self.gpg.verify_str(f.getvalue())
                        f = StringIO(self.gpg.without_signature(f.getvalue()))
                    else:
                        result = self.gpg.verify_file(f)
                        f.seek(0)
                        f = StringIO(self.gpg.without_signature(f.read()))
                    assert result.valid and result.fingerprint==fingerprint, 'invalid signature'
                self.file = f
                dialect = self.dialect
                if not dialect:
                    pos = f.tell()
                    dialect = csv.Sniffer().sniff(f.read(1024))
                    f.seek(pos) # rewind
                if not PY3:
                    import unicodecsv
                    reader = unicodecsv.reader
                else:
                    reader = csv.reader
                reader = reader(f,dialect=dialect)
                preamble = next(reader)
                assert len(preamble), 'invalid file format'
                assert preamble[0]==self.dataformat, "file format not supported"
                preamble = re.match(r'^(\d+).(\d+)',preamble[1])
                assert int(preamble.group(2))<=self.version[0], "format version not supported"
                fields = next(reader)
                self.csv = reader
            else: # self.fileformat in ('json','jsondict','json-file','jsondict-file'):
                import json
                if self.fileformat in ('json-file','jsondict-file'):
                    self.file = f = json.load(f)
                data, encrypted, signed, result = json_decrypt(f,self.gpg)
                assert data, 'invalid input'
                if encrypt: assert encrypted==bool(encrypt), 'encryption expected'
                if sign:
                    assert signed==bool(sign), 'signature expected'
                    assert result.valid and result.fingerprint==fingerprint, 'invalid signature'
                assert 'format' in data and data['format']==self.dataformat, "file format not supported"
                assert 'version' in data and data['version'][0]<=self.version[0], "file version not supported"
                assert 'fields' in data , "fields missing"
                fields = data['fields']
                self.rows = data['data']
            columns, unknown = [], []
            for field in fields:
                if field in self.columns: columns.append(field)
                elif self.ignore: unknown.append(field)
                else: assert False, "unknown field '%s'" % field
            if self.required:
                for field in self.required:
                    assert field in columns, "missing required field '%s'" % field
            self.fields = fields
            self.read_columns = (columns,unknown)
        elif mode=='w':
            assert self.fileformat in ('json','jsondict') or self.file, 'file missing'
            if self.fileformat=='csv':
                if encrypt or sign: self.file = StringIO()
                else: self.file = f
                if not PY3:
                    import unicodecsv
                    writer = unicodecsv.writer
                else:
                    writer = csv.writer
                self.csv = writer(self.file,lineterminator='\n',dialect=self.dialect)
                self.csv.writerow((self.dataformat,'%i.%i' % tuple(self.version)))
                self.csv.writerow(self.columns)
            else: # self.fileformat in ('json','jsondict'):
                self.rows = []

    def close(self):
        "close input/output. StringIO output is left open"
        if not self.mode: return
        if self.mode=='r':
            if self.fileformat=='csv' and self.encrypt: self.file.close() # close tmp buffer
            elif self.fileformat in ('json','jsondict'): return
        elif self.fileformat in ('json','jsondict','json-file','jsondict-file'):
            import json
            data = {'format':self.dataformat,'version':self.version,
                'fields':list(self.columns),'data':self.rows}
            if self.fileformat in ('json','jsondict'): output = self.file
            else: output = None
            if self.encrypt or self.sign:
                data, result = json_encrypt(data, self.gpg, output=output,
                     encrypt=self.encrypt,sign=self.sign)
                assert data and result,'encryption failed'
            elif not output is None:
                output.update(data)
                data = output
            if self.fileformat in ('json','jsondict'): return data
            if self.pretty:
                json.dump(data,self.file, sort_keys=True, indent=2, separators=(',', ': '))
            else:
                json.dump(data,self.file)
        elif self.encrypt or self.sign:
            from six import PY3, BytesIO, StringIO
            if PY3 and isinstance(self.file,StringIO):
                data = self.file.getvalue()
                if self.encrypt:
                    result = self.gpg.encrypt_str(data,self.encrypt,default_key=self.sign)
                else: #sign
                    result = self.gpg.sign_str(data)
            else:
                self.file.seek(0)
                if self.encrypt:
                    result = self.gpg.encrypt_file(self.file,self.encrypt,default_key=self.sign)
                else: #sign
                    result = self.gpg.sign_str(self.file)
            assert result, "encryption failed"
            self.origfile.write(str(result))
            self.file.close()
        from six.moves import cStringIO
        import sys
        if type(self.origfile) == type(cStringIO()): return
        if self.origfile == sys.stdout: return
        self.origfile.close()

    def __iter__(self):
        assert self.mode=='r', 'file not opened for reading'
        if self.fileformat=='csv': rows = self.csv
        else: rows = self.rows
        for row in rows:
            if self.fileformat=='csv':
                assert len(row) == len(self.fields),\
                 "invalid number of columns in line %i" % self.csv.line_num
            else:
                assert len(row) == len(self.fields), "invalid number of columns"
            data = {}
            for i,field in enumerate(self.fields):
                if not field in self.read_columns[0]: continue
                if self.fileformat in ('jsondict','jsondict-file'): x = row[field]
                else: x = row[i]
                ftype = self.coltypes.get(field) if self.coltypes else None
                if ftype is None: pass
                elif type(ftype)==tuple: # list of same type
                    if self.fileformat=='csv':
                        x = x.split(';') if x else [] # list-separator
                    else: assert type(x) in (tuple,list), "list expected for "+field
                    x = [decode_field(y,ftype[0]) for y in x]
                else:
                    x = decode_field(x,ftype)
                ofield = self.remap.get(field,field)
                data[ofield] = x
            yield data

    def write(self,data,extra={}):
        "extra has precedence"
        assert self.mode=='w', 'file not opened for writing'
        if self.fileformat in ('jsondict','jsondict-file'): row = {}
        else: row = []
        for field in self.columns:
            x = None
            ofield = self.remap.get(field,field)
            if isinstance(data,dict):
                if ofield in extra: x = extra[ofield]
                elif ofield in data: x = data[ofield]
                else: assert self.ignore or not field in self.required, "field '%s' missing"
            else:
                if ofield in extra: x = extra[ofield]
                elif hasattr(data,ofield): x = getattr(data,ofield)
                else: assert self.ignore or not field in self.required, "field '%s' missing"
            ftype = self.coltypes.get(field) if self.coltypes else None
            if ftype is None: pass
            elif type(ftype)==tuple: # list of same type
                print (ftype, x,extra)
                if x is None: x = []
                else: assert type(x) in (tuple,list), "list expected for "+field
                x = [encode_field(y,ftype[0],self.fileformat) for y in x]
                if self.fileformat=='csv':
                    x = ';'.join(x) # list-separator
            else:
                x = encode_field(x,ftype,self.fileformat)
            if self.fileformat in ('jsondict','jsondict-file'): row[field] = x
            else: row.append(x)
        if self.fileformat=='csv': self.csv.writerow(row)
        else: self.rows.append(row)