Esempio n. 1
0
def build_metafiles(model):
    name = model['metadata']['name']
    return {
            'python/' + snake_case(name) + '/' + snake_case(name) + '.html' : env.get_template('main.html').render(standalone=False, **model),
            'python/' + snake_case(name) + '/index.html' : env.get_template('main.html').render(standalone=True, **model),
            'python/' + snake_case(name) + '/' + snake_case(name) + '_preview.html' : env.get_template('preview.html').render(**model)
            }
Esempio n. 2
0
def build_locals(model):
    locals = model["locals"]
    hardware = model['metadata']['hardware']
    module_name = snake_case(model['metadata']['name'])    
    moves = {}
    
    for local in locals:
        name = snake_case(local["name"])
        file = local["file"]
        row_type = local["type"]
        json_path = snake_case(model['metadata']['name']) + "_dataset.js"
        new_file = name+'.csv'
        new_small_file = name+'_small.csv'
        row_type = local["type"]
        keys = set()
        with open(file, "r") as local_file:
            if row_type == "json":
                data_list = json.load(local_file)
                data = [JsonLeafNodes(name+'.[0]', item).result for item in data_list]
                key_names = set([key for row in data for key in row.keys()])
                short_key_names = shortest_unique_strings(key_names)
                key_name_map = dict(zip(short_key_names, key_names))
                data = [OrderedDict(sorted([(short, kill_unicode(row.get(long, '')))
                                            for short, long in key_name_map.items()]))
                        for row in data]
                
                #json_list = [OrderedDict(sorted(flatten_json(_byteify(element), '_').items()))
                #             for element in data_list]
                write_list_of_dictionaries(new_file, data)
                moves[new_file] = 'csv/'+module_name+'/'
                write_list_of_dictionaries(new_small_file, data[:hardware])
                moves[new_small_file] = 'csv/'+module_name+'/'
            elif row_type == "csv":
                pass
    return moves
Esempio n. 3
0
def build_csv(model):
    files = {}
    moves = {}
    descriptions = []

    for local_file_name, fkd in build_locals(model):
        moves.update(local_file_name)
        descriptions.append(fkd)

    for appendix in model['metadata']['appendix']:
        moves[appendix['file']] = 'csv/' + snake_case(
            model['metadata']['name']) + '/'

    files.update(build_metafiles(model, descriptions))

    icon_file = model['metadata']['icon']
    name = snake_case(model['metadata']['name'])
    new_folder = 'csv/' + name + '/'
    if os.path.exists(icon_file):
        with open(icon_file, 'rb') as icon_data:
            files[new_folder + name + '.png'] = icon_data.read()
    else:
        model["metadata"]["icon"] = False

    return files, moves
Esempio n. 4
0
def build_metafiles(model):
    name = model['metadata']['name']
    return {
        'python/' + snake_case(name) + '/' + snake_case(name) + '.html':
        env.get_template('main.html').render(standalone=False, **model),
        'python/' + snake_case(name) + '/index.html':
        env.get_template('main.html').render(standalone=True, **model),
        'python/' + snake_case(name) + '/' + snake_case(name) + '_preview.html':
        env.get_template('preview.html').render(**model)
    }
Esempio n. 5
0
def build_locals(model):
    locals = model["locals"]
    hardware = model['metadata']['hardware']
    module_name = snake_case(model['metadata']['name'])
    moves = {}

    for local in locals:
        name = snake_case(local["name"])
        file = local["file"]
        row_type = local["type"]
        json_path = module_name + "_dataset.js"
        new_file = module_name + '.csv'
        new_small_file = module_name + '_small.csv'
        row_type = local["type"]
        keys = set()
        with open(file, "r") as local_file:
            if row_type == "json":
                data_list = json.load(local_file)
                data = [
                    JsonLeafNodes(name + '.[0]', item).result
                    for item in data_list
                ]
                data_long_names = data
                key_names = set([key for row in data for key in row.keys()])
                short_key_names = shortest_unique_strings(key_names)
                key_name_map = dict(zip(short_key_names, key_names))
                comment_map = {
                    short: model['structures_comments'].get(long, '')
                    for short, long in key_name_map.items()
                }
                data = [
                    OrderedDict(
                        sorted([(short, kill_unicode(row.get(long, '')))
                                for short, long in key_name_map.items()]))
                    for row in data
                ]
                full_key_descriptions = [{
                    'name': short,
                    'short': short,
                    'type': value,
                    'comment': comment_map.get(short),
                    'example': value
                } for short, value in data[0].items()]

                #json_list = [OrderedDict(sorted(flatten_json(_byteify(element), '_').items()))
                #             for element in data_list]
                write_list_of_dictionaries(new_file, data)
                moves[new_file] = 'csv/' + module_name + '/'
                write_list_of_dictionaries(new_small_file, data[:hardware])
                moves[new_small_file] = 'csv/' + module_name + '/'
            elif row_type == "csv":
                pass
        yield moves, full_key_descriptions
Esempio n. 6
0
def build_database(model):
    name = snake_case(model['metadata']['name'])
    new_file = name+'.db'
    if os.path.exists(new_file):
        os.remove(new_file)
    database_file = sqlite3.connect(new_file)
    return new_file, database_file
Esempio n. 7
0
def build_racket(model, fast):
    name = snake_case(model['metadata']['name'])
    new_folder = 'racket/' + name + '/'
    
    files = {}
    
    icon_file = model['metadata']['icon']
    
    if os.path.exists(icon_file):
        with open(icon_file, 'rb') as icon_data:
            files[new_folder+name+'.png'] = icon_data.read()
    else:
        model["metadata"]["icon"] = False
    
    files.update(build_metafiles(model))
    files.update(build_main(model))
    
    if not fast:
        new_file, database_file = build_database(model)
        build_locals(model, database_file)
        
        database_file.close()
        moves = {new_file: new_folder}
        for appendix in model['metadata']['appendix']:
            moves[appendix['file']] = new_folder
    else:
        moves = {}
    
    return files, moves
Esempio n. 8
0
def build_main(model):
    name = model['metadata']['name']
    root = 'java/{name}/src/corgis/{name}/'.format(name=snake_case(name))
    return {
        root + camel_case_caps(name) + 'Library.java':
        env.get_template('main.java').render(**model)
    }
Esempio n. 9
0
def build_blockpy(model, fast):
    name = snake_case(model['metadata']['name'])
    new_folder = 'blockpy/' + name + '/'

    files = {}

    icon_file = model['metadata']['icon']

    if os.path.exists(icon_file):
        with open(icon_file, 'rb') as icon_data:
            files[new_folder + name + '.png'] = icon_data.read()
    else:
        model["metadata"]["icon"] = False

    results = list(build_locals(model, new_folder))
    moves = {f: new_folder for s in results for f in s[:2]}
    key_names = [k for s in results for k in s[2]]
    indexes = {k: v for s in results for k, v in s[3].items()}
    full_key_descriptions = [i[4] for i in results]

    tifa_definitions = []
    for interface, s in zip(model['interfaces'], results):
        tifa_definitions.append(
            (interface['name'], TifaDefinition(s[5]).result))

    files.update(
        build_metafiles(model, key_names, indexes, full_key_descriptions,
                        tifa_definitions))

    return files, moves
Esempio n. 10
0
def build_python(model, fast):
    name = snake_case(model['metadata']['name'])
    new_folder = 'python/' + name + '/'

    files = {}

    icon_file = model['metadata']['icon']
    if os.path.exists(icon_file):
        with open(icon_file, 'rb') as icon_data:
            files[new_folder + name + '.png'] = icon_data.read()
    else:
        model["metadata"]["icon"] = False
    splash_file = model['metadata']['splash']
    if os.path.exists(splash_file):
        with open(splash_file, 'rb') as splash_data:
            files[new_folder + name + "_splash.png"] = splash_data.read()
    else:
        model["metadata"]["splash"] = False

    files.update(build_metafiles(model))
    files.update(build_main(model))

    if not fast:
        new_file, database_file = build_database(model)
        build_locals(model, database_file)

        database_file.close()
        moves = {new_file: new_folder}
        for appendix in model['metadata']['appendix']:
            moves[appendix['file']] = new_folder
    else:
        moves = {}

    return files, moves
Esempio n. 11
0
def build_visualizer(model, fast):
    name = snake_case(model['metadata']['name'])
    new_folder = 'visualizer/' + name + '/'
    
    files = {}
    
    icon_file = model['metadata']['icon']
    if os.path.exists(icon_file):
        with open(icon_file, 'rb') as icon_data:
            files[new_folder+name+'.png'] = icon_data.read()
    else:
        model["metadata"]["icon"] = False
    splash_file = model['metadata']['splash']
    if os.path.exists(splash_file):
        with open(splash_file, 'rb') as splash_data:
            files[new_folder+name+"_splash.png"] = splash_data.read()
    else:
        model["metadata"]["splash"] = False
    
    if not fast:
        moves = {f: new_folder for f in build_locals(model, new_folder)}
    else:
        moves = {}
        
    files.update(build_metafiles(model))
    
    return files, moves
Esempio n. 12
0
def build_locals(model, database_file):
    locals = model["locals"]
    moves = {}
    
    for local in locals:
        name = snake_case(local["name"])
        file = local["file"]
        type = local["type"]
        with open(file, "r") as local_file:
            if type == "json":
                data_list = json.load(local_file)
                json_list = [json.dumps(element) for element in data_list]
                indexes = []
                index_titles = ""
                for index in local["indexes"]:
                    index_name = index["name"]
                    index_path = index["jsonpath"]
                    indexed_value = [json_path(index_path, element) for element in data_list]
                    indexes.append(indexed_value)
                    index_titles += ", {} text".format(index_name)
                
                header = "{}(data{})".format(name, index_titles)
                blanks = "?" + (", ?" * len(indexes))
                database_file.execute('CREATE TABLE '+header)
                if local["indexes"]:
                    the_list = zip(json_list, zip(*indexes))
                else:
                    the_list = zip(json_list, [tuple() for x in json_list])
                for row, indices in the_list:
                    database_file.execute("INSERT INTO {} VALUES ({})".format(name, blanks),
                                          (row,)+indices)
                database_file.commit()
            elif type == "csv":
                pass
Esempio n. 13
0
def build_database(model):
    name = snake_case(model['metadata']['name'])
    new_file = name+'.db'
    if os.path.exists(new_file):
        os.remove(new_file)
    database_file = sqlite3.connect(new_file)
    return new_file, database_file
Esempio n. 14
0
def build_sql(model, fast):
    name = snake_case(model['metadata']['name'])
    new_folder = 'sql/' + name + '/'
    
    files = {}
    
    icon_file = model['metadata']['icon']
    
    if os.path.exists(icon_file):
        with open(icon_file, 'rb') as icon_data:
            files[new_folder+name+'.png'] = icon_data.read()
    else:
        model["metadata"]["icon"] = False
    
    files.update(build_metafiles(model))
    
    if not fast:
        new_file, database_file = build_database(model)
        sql_file = name+".sql"
        build_locals(model, database_file, sql_file)
        database_file.close()
        moves = {new_file: new_folder, sql_file: new_folder}
        for appendix in model['metadata']['appendix']:
            moves[appendix['file']] = new_folder
    else:
        moves = {}
    
    return files, moves
Esempio n. 15
0
def build_metafiles(model):
    name = snake_case(model['metadata']['name'])
    root = 'csv/{name}/'.format(name=name)
    return {
            root+'index.html' : env.get_template('csv_main.html').render(standalone=True, **model),
            root+name+'.html' : env.get_template('csv_main.html').render(standalone=False, **model)
            }
Esempio n. 16
0
def build_metafiles(model, report):
    name = snake_case(model['metadata']['name'])
    root = 'metrics/' + name + '/'
    return {
            root+'index.html' : env.get_template('metrics_main.html').render(report=report, standalone=True, **model),
            root+name+'.html' : env.get_template('metrics_main.html').render(report=report, standalone=False, **model)
            }
Esempio n. 17
0
def build_metafiles(model, key_names, indexes, full_key_descriptions,
                    tifa_definitions):
    name = snake_case(model['metadata']['name'])
    root = 'blockpy/' + name + '/'
    return {
        root + 'index.html':
        env.get_template('blockpy_main.html').render(
            key_names=key_names,
            indexes=indexes,
            full_key_descriptions=full_key_descriptions,
            standalone=True,
            **model),
        root + name + '.html':
        env.get_template('blockpy_main.html').render(
            key_names=key_names,
            indexes=indexes,
            full_key_descriptions=full_key_descriptions,
            standalone=False,
            **model),
        root + name + '_skulpt.js':
        env.get_template('skulpt.js').render(key_names=key_names,
                                             indexes=indexes,
                                             tifa_definitions=tifa_definitions,
                                             **model),
        root + name + '_blockly.js':
        env.get_template('blockly.js').render(key_names=key_names,
                                              indexes=indexes,
                                              **model)
    }
Esempio n. 18
0
def build_database(model):
    name = snake_case(model['metadata']['name'])
    new_file = name+'.csv'
    if os.path.exists(new_file):
        os.remove(new_file)
    with open(new_file, 'w') as output:
        pass
    return new_file
Esempio n. 19
0
def build_database(model):
    name = snake_case(model['metadata']['name'])
    new_file = name + '.csv'
    if os.path.exists(new_file):
        os.remove(new_file)
    with open(new_file, 'w') as output:
        pass
    return new_file
Esempio n. 20
0
def build_metafiles(model):
    name = snake_case(model['metadata']['name'])
    root = 'racket/'+name+'/'
    return {
            root + name + '.scrbl' : env.get_template('main.scrbl').render(**model),
            root+'index.html' : env.get_template('racket_main.html').render(standalone=True, **model),
            root+name+'.html' : env.get_template('racket_main.html').render(standalone=False, **model)
            }
Esempio n. 21
0
def build_metafiles(model, key_names, indexes, full_key_descriptions):
    name = snake_case(model['metadata']['name'])
    root = 'blockpy/' + name + '/'
    return {
            root+'index.html' : env.get_template('blockpy_main.html').render(key_names=key_names, indexes=indexes, full_key_descriptions=full_key_descriptions, standalone=True, **model),
            root+name+'.html' : env.get_template('blockpy_main.html').render(key_names=key_names, indexes=indexes, full_key_descriptions=full_key_descriptions, standalone=False, **model),
            root + name + '_skulpt.js' : env.get_template('skulpt.js').render(key_names=key_names, indexes=indexes, **model),
            root + name + '_blockly.js' : env.get_template('blockly.js').render(key_names=key_names, indexes=indexes,  **model)
            }
Esempio n. 22
0
def build_metafiles(model):
    name = snake_case(model['metadata']['name'])
    root = 'json/{name}/'.format(name=name)
    return {
        root + 'index.html':
        env.get_template('json_main.html').render(standalone=True, **model),
        root + name + '.html':
        env.get_template('json_main.html').render(standalone=False, **model)
    }
Esempio n. 23
0
def build_csv(model):
    files = {}
    files.update(build_metafiles(model))
    
    moves = {}
    moves.update(build_locals(model))
    for appendix in model['metadata']['appendix']:
        moves[appendix['file']] = 'csv/' + snake_case(model['metadata']['name']) + '/'
        
    icon_file = model['metadata']['icon']
    name = snake_case(model['metadata']['name'])
    new_folder = 'csv/' + name + '/'
    if os.path.exists(icon_file):
        with open(icon_file, 'rb') as icon_data:
            files[new_folder+name+'.png'] = icon_data.read()
    else:
        model["metadata"]["icon"] = False
    
    return files, moves
Esempio n. 24
0
def build_locals(model, js_path):
    locals = model["locals"]
    metadata_name = snake_case(model['metadata']['name'])
    model['visualized_datasets'] = {}
    for local in locals:
        name = local["name"]
        file = local["file"]
        row_type = local["type"]
        row = local["row"]
        json_path = snake_case(model['metadata']['name']) + "_dataset.js"
        with open(file, "r") as local_file, open(json_path, 'w') as output_file:
            output_file.write("_IMPORTED_DATASETS['{}'] = ".format(metadata_name))
            if row_type == "json":
                data_list = json.load(local_file)
                data = [JsonLeafNodes(name+'.[0]', item).result for item in data_list]                
                data = lod_to_dol(data)
                remove_outliers(data)
                for row in data:
                    if row['name'] in model['structures_comments']:
                        row['comment'] = model['structures_comments'][row['name']]
                    is_index = row['name'] in [i['jsonpath'] for i in local['indexes']]
                    row['index'] = is_index
                    if is_index:
                        print("Index:", row["name"])
                key_names = [row['name'] for row in data]
                short_key_names = shortest_unique_strings(key_names)
                key_name_map = dict(zip(key_names, short_key_names))
                full_key_descriptions = [
                    {'name': row['name'], 
                     'short': key_name_map[row['name']],
                     'type': row['type'],
                     'comment': row.get('comment', ''),
                     'example': row['data'][0]}
                    for row in data]
                indexes = {key_name_map[row['name']]: row for row in data if row['index']}
                for index_data in indexes.values():
                    index_data['data'] = [str(val) for val in index_data['data']]
                data = {key_name_map[row['name']]: row for row in data}
                #sample_down(data)
                json.dump(data, output_file, indent=2)
                #model['visualized_datasets'][name] = data.keys()
            print("File sizes:", "{}mb".format(os.stat(json_path).st_size / 1024 / 1024))
        yield json_path, short_key_names, indexes, full_key_descriptions
Esempio n. 25
0
def to_python_variable(source):
    was_list = is_list(source)
    if was_list:
        source = strip_list(source) #chomp out the "list(" and ")"
    converted_type= python_types.get(source, None)
    if converted_type is None: # need to convert to custom class
        converted_type = snake_case(source)
    if was_list: # if it's a list, apply it to each element
        return "list_of_{}".format(converted_type)
    else: # otherwise just return it normally
        return "a_{}".format(converted_type)
Esempio n. 26
0
def build_main(model):
    name = model['metadata']['name']

    tifa_definitions = []
    locals = model["locals"]
    from languages.tifa import TifaDefinition
    for interface, local in zip(model['interfaces'], locals):
        file = local["file"]
        type = local["type"]
        with open(file, "r") as local_file:
            if type == "json":
                data_list = json.load(local_file)
                tifa_definitions.append(
                    (interface['name'], TifaDefinition(data_list).result))

    return {
        'python/' + snake_case(name) + '/' + snake_case(name) + '.py':
        env.get_template('main.py').render(tifa_definitions=tifa_definitions,
                                           **model)
    }
Esempio n. 27
0
def to_sql_variable(source):
    was_list = is_list(source)
    if was_list:
        source = strip_list(source) #chomp out the "list(" and ")"
    converted_type= sql_type_names.get(source, None)
    if converted_type is None: # need to convert to custom class
        converted_type = snake_case(source)
    if was_list: # if it's a list, apply it to each element
        return "list_of_{}".format(converted_type)
    else: # otherwise just return it normally
        return "a_{}".format(converted_type)
Esempio n. 28
0
def build_json(model):
    files = {}
    files.update(build_metafiles(model))

    moves = {}
    moves.update(build_locals(model))
    for appendix in model['metadata']['appendix']:
        moves[appendix['file']] = 'json/' + snake_case(
            model['metadata']['name']) + '/'

    icon_file = model['metadata']['icon']
    name = snake_case(model['metadata']['name'])
    new_folder = 'json/' + name + '/'
    if os.path.exists(icon_file):
        with open(icon_file, 'rb') as icon_data:
            files[new_folder + name + '.png'] = icon_data.read()
    else:
        model["metadata"]["icon"] = False

    return files, moves
Esempio n. 29
0
def build_report(model):
    locals = model["locals"]
    hardware = model['metadata']['hardware']
    module_name = snake_case(model['metadata']['name'])
    json_reports = {}
    for local in locals:
        name = snake_case(local["name"])
        file = local["file"]
        type = local["type"]
        row = local["row"]
        with open(file, "r") as local_file:
            if type == "json":
                data_list = json.load(local_file)
                metrics = JsonMetrics({}, data_list, row)
                json_reports[name] = metrics.report
                json_reports[name]['length'] = len(data_list)
                json_reports[name]['size'] = os.path.getsize(file)
                json_reports[name]['tags'] = model['metadata']['tags']
            elif type == "csv":
                pass
    return json.dumps(json_reports, indent=2)
Esempio n. 30
0
def build_classes(model):
    name = model['metadata']['name']
    root = 'java/{name}/src/corgis/{name}/domain/'.format(
        name=snake_case(name))
    files = {}
    template = env.get_template('domain.java')
    for structure_name, structure in model['structures'].items():
        for path, data in structure['dictionaries'].items():
            filename = root + camel_case_caps(
                clean_invalid_characters(data['name'])) + '.java'
            files[filename] = template.render(dictionary=data, **model)
    return files
Esempio n. 31
0
def build_metafiles(model):
    name = snake_case(model['metadata']['name'])
    root = 'java/{name}/'.format(name=name)
    return {
        root + '.classpath':
        env.get_template('.classpath').render(**model),
        root + '.project':
        env.get_template('.project').render(**model),
        root + 'build.xml':
        env.get_template('build.xml').render(**model),
        root + 'index.html':
        env.get_template('java_main.html').render(standalone=True, **model),
        root + name + '.html':
        env.get_template('java_main.html').render(standalone=False, **model)
    }
Esempio n. 32
0
def build_locals(model):
    locals = model["locals"]
    hardware = model['metadata']['hardware']
    module_name = snake_case(model['metadata']['name'])
    moves = {}

    for local in locals:
        name = module_name  #snake_case(local["name"])
        new_file = name + '.json'
        file = local["file"]
        with open(file, 'rb') as inp, open(new_file, 'wb') as out:
            json.dump(json.load(inp), out, indent=2)
        moves[new_file] = 'json/' + module_name + '/'

    return moves
Esempio n. 33
0
def build_report(model):
    locals = model["locals"]
    hardware = model['metadata']['hardware']
    module_name = snake_case(model['metadata']['name'])
    json_reports = {}
    for local in locals:
        name = snake_case(local["name"])
        file = local["file"]
        type = local["type"]
        row = local["row"]
        with open(file, "r") as local_file:
            if type == "json":
                data_list = json.load(local_file)
                metrics = JsonMetrics({}, data_list, row)
                json_reports[name] = metrics.report
                json_reports[name]['length'] = len(data_list)
                json_reports[name]['size'] = os.path.getsize(file)
                json_reports[name]['tags'] = model['metadata']['tags']
                json_reports[name]['description'] = model['metadata']['description']['overview']
                json_reports[name]['indexes'] = [len(local['indexes'])
                                                 for local in locals]
            elif type == "csv":
                pass
    return json.dumps(json_reports, indent=2), json_reports
Esempio n. 34
0
def build_java(model, fast):
    name = snake_case(model['metadata']['name'])
    root = 'java/{name}/src/corgis/{name}/'.format(name=snake_case(name))
    new_folder = 'java/' + name + '/'

    files = {
        new_folder + '/libs/sqlite-jdbc-3.8.11.2.jar':
        copy_file(java_templates + 'libs/sqlite-jdbc-3.8.11.2.jar'),
        new_folder + '/libs/json-simple-1.1.1.jar':
        copy_file(java_templates + 'libs/json-simple-1.1.1.jar')
    }

    icon_file = model['metadata']['icon']

    if os.path.exists(icon_file):
        with open(icon_file, 'rb') as icon_data:
            files[new_folder + name + '.png'] = icon_data.read()
    else:
        model["metadata"]["icon"] = False

    files.update(build_metafiles(model))
    files.update(build_main(model))
    files.update(build_classes(model))

    if not fast:
        new_file, database_file = build_database(model)
        build_locals(model, database_file)

        database_file.close()
        moves = {new_file: new_folder}
        for appendix in model['metadata']['appendix']:
            moves[appendix['file']] = new_folder
    else:
        moves = {}

    return files, moves
Esempio n. 35
0
def post_build(model, files, moves, target):
    print("Building jar")

    name = snake_case(model['metadata']['name'])
    path = os.path.join(target, 'java', name)

    backup_location = os.getcwd()
    os.chdir(path)

    subprocess.call(["ant"], shell=True)

    #jar_file = os.path.join('./dist', name+'-doc-1.jar')
    #subprocess.call(["java", "-jar", "../../../tools/plantuml.jar", jar_file, "-o", "./docs/uml/"], shell=True)

    os.chdir(backup_location)
    return None
Esempio n. 36
0
def build_metrics(model, fast):
    module_name = snake_case(model['metadata']['name'])    
    new_folder = 'metrics/' + module_name + '/'
    
    json_data, report = build_report(model)
    
    files = {'metrics/' + module_name + '/' + module_name + '.json': json_data}
    
    icon_file = model['metadata']['icon']
    
    if os.path.exists(icon_file):
        with open(icon_file, 'rb') as icon_data:
            files[new_folder+module_name+'.png'] = icon_data.read()
    else:
        model["metadata"]["icon"] = False
    
    files.update(build_metafiles(model, report))
    
    return files, {}
    
Esempio n. 37
0
def build_blockpy(model, fast):
    name = snake_case(model['metadata']['name'])
    new_folder = 'blockpy/' + name + '/'
    
    files = {}
    
    icon_file = model['metadata']['icon']
    
    if os.path.exists(icon_file):
        with open(icon_file, 'rb') as icon_data:
            files[new_folder+name+'.png'] = icon_data.read()
    else:
        model["metadata"]["icon"] = False
    
    results = list(build_locals(model, new_folder))
    moves = {f: new_folder for f in first_items(results)}
    key_names = [k for s in results for k in s[1]]
    indexes = {k:v for s in results for k, v in s[2].items()}
    full_key_descriptions = [i[3] for i in results]
        
    files.update(build_metafiles(model, key_names, indexes, full_key_descriptions))
    
    return files, moves
Esempio n. 38
0
def build_locals(model, database_file):
    locals = model["locals"]
    moves = {}

    for local in locals:
        name = snake_case(local["name"])
        file = local["file"]
        type = local["type"]
        with open(file, "r") as local_file:
            if type == "json":
                data_list = json.load(local_file)
                json_list = [json.dumps(element) for element in data_list]
                indexes = []
                index_titles = ""
                for index in local["indexes"]:
                    index_name = index["name"]
                    index_path = index["jsonpath"]
                    indexed_value = [
                        json_path(index_path, element) for element in data_list
                    ]
                    indexes.append(indexed_value)
                    index_titles += ", {} text".format(index_name)

                header = "{}(data{})".format(name, index_titles)
                blanks = "?" + (", ?" * len(indexes))
                database_file.execute('CREATE TABLE ' + header)
                if local["indexes"]:
                    the_list = zip(json_list, zip(*indexes))
                else:
                    the_list = zip(json_list, [tuple() for x in json_list])
                for row, indices in the_list:
                    database_file.execute(
                        "INSERT INTO {} VALUES ({})".format(name, blanks),
                        (row, ) + indices)
                database_file.commit()
            elif type == "csv":
                pass
Esempio n. 39
0
def build_metrics(model, fast):
    module_name = snake_case(model['metadata']['name'])    
    files = {'metrics/' + module_name + '/' + module_name + '.json': build_report(model)}
    
    return files, {}
    
Esempio n. 40
0
def build_locals(model, js_path):
    locals = model["locals"]
    metadata_name = snake_case(model['metadata']['name'])
    model['visualized_datasets'] = {}
    for local in locals:
        name = local["name"]
        file = local["file"]
        row_type = local["type"]
        row = local["row"]
        # Complete Dataset
        complete_path = snake_case(model['metadata']['name']) + "_complete.js"
        with open(file, "r") as local_file, open(complete_path,
                                                 'w') as output_file:
            output_file.write(
                "_IMPORTED_COMPLETE_DATASETS['{}'] = Sk.ffi.remapToPy(".format(
                    metadata_name))
            if row_type == "json":
                data_list = json.load(local_file)
                json.dump(data_list, output_file, indent=2)
                output_file.write(");")
        # Linearized Dataset
        json_path = snake_case(model['metadata']['name']) + "_dataset.js"
        with open(file, "r") as local_file, open(json_path,
                                                 'w') as output_file:
            output_file.write(
                "_IMPORTED_DATASETS['{}'] = ".format(metadata_name))
            if row_type == "json":
                data_list = json.load(local_file)
                data = [
                    JsonLeafNodes(name + '.[0]', item).result
                    for item in data_list
                ]
                data = lod_to_dol(data)
                remove_outliers(data,
                                actually_keep=model['metadata']['outliers'])
                for row in data:
                    if row['name'] in model['structures_comments']:
                        row['comment'] = model['structures_comments'][
                            row['name']]
                    is_index = row['name'] in [
                        i['jsonpath'] for i in local['indexes']
                    ]
                    row['index'] = is_index
                    if is_index:
                        print("Index:", row["name"])
                key_names = [row['name'] for row in data]
                short_key_names = shortest_unique_strings(key_names)
                key_name_map = dict(zip(key_names, short_key_names))
                full_key_descriptions = [{
                    'name': row['name'],
                    'short': key_name_map[row['name']],
                    'type': row['type'],
                    'comment': row.get('comment', ''),
                    'example': row['data'][0]
                } for row in data]
                indexes = {
                    key_name_map[row['name']]: row
                    for row in data if row['index']
                }
                for index_data in indexes.values():
                    index_data['data'] = [
                        str(val) for val in index_data['data']
                    ]
                data = {key_name_map[row['name']]: row for row in data}
                json.dump(data, output_file, indent=2)
                #model['visualized_datasets'][name] = data.keys()
            print("File sizes:",
                  "{}mb".format(os.stat(json_path).st_size / 1024 / 1024))
        yield json_path, complete_path, short_key_names, indexes, full_key_descriptions, data_list
Esempio n. 41
0
def build_main(model):
    name = model['metadata']['name']
    return {
        'python/' + snake_case(name) + '/' + snake_case(name) + '.py':
        env.get_template('main.py').render(**model)
    }
Esempio n. 42
0
def build_locals(model, js_path):
    locals = model["locals"]
    model['visualized_datasets'] = {}
    print("")
    for local in locals:
        name = snake_case(local["name"])
        file = local["file"]
        row_type = local["type"]
        row = local["row"]
        json_path = name + ".js"
        json_bar_path = name + "_bar.js"
        with open(file, "r") as local_file, open(json_path, 'w') as output_file, open(json_bar_path, 'w') as output_bar_file:
            output_file.write(name+" = ")
            output_bar_file.write(name+"_bar = ")
            if row_type == "json":
                data_list = json.load(local_file)
                data = [JsonLeafNodes(name+'.[0]', item).result for item in data_list]
                data = lod_to_dol(data)
                bar_data = []
                key_name_map = remove_outliers(data, actually_keep=model['metadata']['outliers'])
                for row in data:
                    if row['name'] in model['structures_comments']:
                        row['comment'] = model['structures_comments'][row['name']]
                    is_index = row['name'] in [i['jsonpath'] for i in local['indexes']]
                    row['index'] = is_index
                dol = {d['name']: d['data'] for d in data}
                lod = [dict(zip(dol, t)) for t in zip(*dol.values())]
                
                for index in local['indexes']:
                    index_path = index['jsonpath']
                    indexed_values = {}
                    aggregated_values = {}
                    for chunk in lod:
                        if index_path in chunk:
                            category = str(chunk[index_path])
                        else:
                            category = ""
                        if category not in indexed_values:
                            indexed_values[category] = {}
                        for key, value in chunk.items():
                            if not isinstance(value, (int, float)):
                                continue
                            if key not in indexed_values[category]:
                                indexed_values[category][key] = []
                            indexed_values[category][key].append(value)
                            
                    for category, items in indexed_values.items():
                        category = str(category)
                        for key, values in items.items():
                            if key not in aggregated_values:
                                aggregated_values[key] = {}
                            aggregated_values[key][category.replace(',', '')] = {
                                'count': len(values),
                                'sum': sum(values),                                
                                'average': statistics.mean(values) if sum(values) else 0,
                                'average w/o zero': statistics.mean([v for v in values if v>0] if sum([v for v in values if v>0]) else [0])
                            }
                    #inter = index_path.split('.', 2)[2]
                    #if '.' in inter:
                    #    category, name = inter.rsplit('.', 1)
                    #else:
                    #    category, name = inter, inter
                    #category = category.replace('.', ' ')
                    
                    bar_data.append({
                        'data': aggregated_values,
                        'name': index_path,
                        'indexes': [k.replace(',', '') for k in indexed_values.keys()],
                        'best_indexes': [k.replace(',', '') for k, v in sorted([(k, v['count']) 
                                            for k, v in list(aggregated_values.values())[0].items()], key=lambda i: -i[1])[:10]],
                        #'pretty': category.title() + ": "+name.title()
                        'pretty': key_name_map[index_path]
                    })
                #sample_down(data)
                json.dump(data, output_file, indent=2)
                json.dump(bar_data, output_bar_file, indent=2)
                #model['visualized_datasets'][name] = data.keys()
        print("File sizes:", "{}mb".format(os.stat(json_path).st_size/1024/1024), "{}mb".format(os.stat(json_bar_path).st_size/1024/1024))
        yield json_path
        yield json_bar_path
Esempio n. 43
0
def build_main(model):
    name = model['metadata']['name']
    return {'python/' + snake_case(name) + '/' + snake_case(name) + '.py' :
                env.get_template('main.py').render(**model)}
Esempio n. 44
0
def build_main(model):
    name = model['metadata']['name']
    return {'racket/' + snake_case(name) + '/' + snake_case(name) + '.rkt' :
                env.get_template('main.rkt').render(**model)}