def test_file_output(unicode_filename, verbose=False):
    data = open(unicode_filename, 'rb').read().decode('utf-8')
    handle, filename = tempfile.mkstemp()
    os.close(handle)
    try:
        stream = StringIO.StringIO()
        yaml.dump(data, stream, allow_unicode=True)
        data1 = stream.getvalue()
        stream = open(filename, 'wb')
        yaml.dump(data, stream, allow_unicode=True)
        stream.close()
        data2 = open(filename, 'rb').read()
        stream = open(filename, 'wb')
        yaml.dump(data, stream, encoding='utf-16-le', allow_unicode=True)
        stream.close()
        data3 = open(filename, 'rb').read().decode('utf-16-le')[1:].encode('utf-8')
        stream = _unicode_open(open(filename, 'wb'), 'utf-8')
        yaml.dump(data, stream, allow_unicode=True)
        stream.close()
        data4 = open(filename, 'rb').read()
        assert data1 == data2, (data1, data2)
        assert data1 == data3, (data1, data3)
        assert data1 == data4, (data1, data4)
    finally:
        if os.path.exists(filename):
            os.unlink(filename)
Example #2
0
def test_representer_types(code_filename, verbose=False):
    test_constructor._make_objects()
    for allow_unicode in [False, True]:
        for encoding in ['utf-8', 'utf-16-be', 'utf-16-le']:
            native1 = test_constructor._load_code(open(code_filename, 'rb').read())
            native2 = None
            try:
                output = yaml.dump(native1, Dumper=test_constructor.MyDumper,
                            allow_unicode=allow_unicode, encoding=encoding)
                native2 = yaml.load(output, Loader=test_constructor.MyLoader)
                try:
                    if native1 == native2:
                        continue
                except TypeError:
                    pass
                value1 = test_constructor._serialize_value(native1)
                value2 = test_constructor._serialize_value(native2)
                if verbose:
                    print "SERIALIZED NATIVE1:"
                    print value1
                    print "SERIALIZED NATIVE2:"
                    print value2
                assert value1 == value2, (native1, native2)
            finally:
                if verbose:
                    print "NATIVE1:"
                    pprint.pprint(native1)
                    print "NATIVE2:"
                    pprint.pprint(native2)
                    print "OUTPUT:"
                    print output
Example #3
0
def test_recursive(recursive_filename, verbose=False):
    exec open(recursive_filename, "rb").read()
    value1 = value
    output1 = None
    value2 = None
    output2 = None
    try:
        output1 = yaml.dump(value1)
        value2 = yaml.load(output1)
        output2 = yaml.dump(value2)
        assert output1 == output2, (output1, output2)
    finally:
        if verbose:
            # print "VALUE1:", value1
            # print "VALUE2:", value2
            print "OUTPUT1:"
            print output1
            print "OUTPUT2:"
            print output2
Example #4
0
def main(file):
    stream = open(file, "r")
    docs = yaml.load(stream)
    dirname = os.path.dirname(sys.argv[0])
    for doc in docs:
        id = doc['id']

        newfile = open(dirname + "/../_papers/" + id + ".html", "w")
        #for item in doc.
        doc['layout'] = 'singlepaper'
        doc['picture'] = 'paco2'

        string = yaml.dump(doc,
                           explicit_start=True,
                           default_flow_style=False,
                           allow_unicode=True)
        newfile.write(string)
        newfile.write("---\n\n")
        newfile.write("{% include singlepaper.html paper=page %}")

        newfile.close()
def test_unicode_output(unicode_filename, verbose=False):
    data = open(unicode_filename, 'rb').read().decode('utf-8')
    value = ' '.join(data.split())
    for allow_unicode in [False, True]:
        data1 = yaml.dump(value, allow_unicode=allow_unicode)
        for encoding in [None, 'utf-8', 'utf-16-be', 'utf-16-le']:
            stream = StringIO.StringIO()
            yaml.dump(value, _unicode_open(stream, 'utf-8'), encoding=encoding, allow_unicode=allow_unicode)
            data2 = stream.getvalue()
            data3 = yaml.dump(value, encoding=encoding, allow_unicode=allow_unicode)
            stream = StringIO.StringIO()
            yaml.dump(value, stream, encoding=encoding, allow_unicode=allow_unicode)
            data4 = stream.getvalue()
            for copy in [data1, data2, data3, data4]:
                if allow_unicode:
                    try:
                        copy[4:].encode('ascii')
                    except (UnicodeDecodeError, UnicodeEncodeError), exc:
                        if verbose:
                            print exc
                    else:
                        raise AssertionError("expected an exception")
                else:
                    copy[4:].encode('ascii')
            assert isinstance(data1, str), (type(data1), encoding)
            data1.decode('utf-8')
            assert isinstance(data2, str), (type(data2), encoding)
            data2.decode('utf-8')
            if encoding is None:
                assert isinstance(data3, unicode), (type(data3), encoding)
                assert isinstance(data4, unicode), (type(data4), encoding)
            else:
                assert isinstance(data3, str), (type(data3), encoding)
                data3.decode(encoding)
                assert isinstance(data4, str), (type(data4), encoding)
                data4.decode(encoding)
Example #6
0
 def save(self):
     simpleyaml.dump(self.config['var'], open(os.path.join('/var/tmp', self.configfilename), 'w'))
    proximity_counts = get_proximity_counts(map_list, edge_class, directions, proximity_stats=save_prox)
    proximity_prob = calculate_probabilities(proximity_counts, min_n_size, edge_class)
    if class_prob_csv:
        classprobs = classify_byprob(map_list, proximity_prob, directions, edge_class)
        with open(class_prob_csv, 'w') as f:
            w = csv.writer(f)
            w.writerow(["i", "j"] + proximity_prob.keys() + ["intended"])
            for r in classprobs:
                w.writerow(r)
    prob_headers = csv_probs(proximity_prob, map_classifcation_type)
    header_names = ["i", "j", map_classifcation_type] + get_header_from_prox_prob(prob_headers)
    writer.writerow(header_names)
    for  i in range(0, len(map_list)):
        for j in range(0,len(map_list[i])):
            cell_class = get_class(j, i, map_list)
            writer.writerow([i, j, cell_class]+prob_headers[cell_class].values())


if __name__ == "__main__":
    args = parse_args()
    if args.output:
        out_fd = open(args.output, "w")
    else:
        out_fd = sys.stdout
    prox=dict()
    if args.load:
        prox=yaml.load(open(args.load))
    main(args.map_csv, out_fd, args.edge_class, args.min_n_size, args.map_classification_type, save_prox=prox, class_prob_csv=args.probs)
    if args.save:
        open(args.save, 'w').write(yaml.dump(prox))
            image=blend_images(imgs),
            n=len(imgs))

    # update the meta_data and global means
    for name, newdata in img_by_class.items():
        if name in meta_data:
            data = meta_data[name]
            data['n'] += newdata['n']
            cell = newdata['image']
            if not args.noalign:
                cell = align_images.align_to(data['template'], cell)
            img = data['image']
            data['image'] = Image.blend(img, cell, newdata['n'] / data['n'])
        else:
            meta_data[name] = dict(
                image=newdata['image'],
                n=newdata['n'],
                path=name + '_mean.png',
                template_path=name + '_temp_mean.png',
                template=newdata['template'])

    # write out images and remove PIL data
    for name, data in meta_data.items():
        data['image'].save(data['path'])
        data['template'].save(data['template_path'])
        del data['image']
        del data['template']

    with open(args.output, 'w') as out:
        out.write(yaml.dump(meta_data))