示例#1
0
    def compress(self, data, file_name, k):
        compressed_file = open(
            f'./data/large_inputs/compression/{file_name}.bin', 'wb')

        compressor = Compressor(data=data,
                                dictionary=self.init_code_dictionary(
                                    k=k, file_type=file_name.split(".")[-1]),
                                k=k)
        compressor_response = compressor.run()

        elapsed_time = compressor_response["time"]
        self.compressed_message = compressor_response["message"]

        compressed_file.write(
            struct.pack(f">{'H'*len(self.compressed_message)}",
                        *self.compressed_message))
        compressed_file.close()
        print(
            f"\nSIZE AFTER COMPRESSION: {os.path.getsize(f'./data/large_inputs/compression/{file_name}.bin')}"
        )

        return {
            "Compression file size":
            os.path.getsize(
                f'./data/large_inputs/compression/{file_name}.bin'),
            "Elapsed Time":
            elapsed_time,
            "Indices":
            len(self.compressed_message)
        }
def main(dataPath, datacollector, display):
    """Main function of the odb2collector.

    :param String        dataPath:         Directory where the data will be stored.
    :param DataCollector datacollector:    DataCollector instance used.
    :param Display       display:          Display instance used to show messages.
    """
    ## Numer of bytes stored
    loggedBytes = 0

    ## Initiate the compressor
    gzip = Compressor()
    gzip.start()

    logEnding = CONFIGURATION["logending"]

    while True:
        message = ["Logged %skB" % (loggedBytes / 1024)]

        print "\n".join(message)

        display.write_message(message)

        datafile = "%s/%s.%s" % (dataPath, _getCurrentTime(), logEnding)

        loggedBytes += datacollector.write_data_log(datafile,
                                                    nbrOfOBDFrames=1000,
                                                    messagesPerTimestamp=20)

        print "Collected data log..."
        gzip.add_file_for_compression(datafile)
示例#3
0
文件: engine.py 项目: alanshepard/tcc
 def __init__(self):
     self.compressor = Compressor()
     self.turbine = Turbine()
     self.initial_guess = self.__class__.DEFAULT_PARAMS.copy()
     self.cpc = 1004  # J/(kg K)
     self.cpt = 1225  # J/(kg K)
     self.T01 = 273.15  # K
     self.P01 = 101e3  # Pa
     self.R_c = (1 - 1 / self.compressor.gam) * self.cpc  # from gam and cp
     self.R_t = (1 - 1 / self.turbine.gam) * self.cpt
     self.A8 = pi / 4 * 35e-3**2  # Measured: D8 = 45mm
     self.FHV = 43e6  #J/kg Fuel lower calorific value
     self.eta_combustion = 0.5
示例#4
0
    def test_generateHeader(self):
        myMap = {
            ord('r'): '01',
            ord('e'): '00',
            ord('j'): '11',
            ord('l'): '10',
        }
        chars = ['e', 'r', 'l', 'j']
        payloadFormat = '001{0[0]}1{0[1]}01{0[2]}1{0[3]}'

        expected = self.getHeader(chars, payloadFormat)
        actual = Compressor(None, None).generateHeader(myMap)
        self.assertEqual(expected, actual)
示例#5
0
    def __init__(self,V=1000.,cmp_eff=80.,trb_eff=90.,p=200000):

        # Air Side
        self.amb = Ambient()
        self.cmp = Compressor(eff=cmp_eff)
        self.tank = Tank(V=V,p=p)
        self.trb = Turbine(eff=trb_eff)
        self.m_dot_i = 0.0
        self.m_dot_o = 0.0

        # Heat Side

        # Create pandas DataFrame to store results
        self.df = pd.DataFrame(columns=variables)
示例#6
0
    def test_generateHeader3(self):
        myMap = {
            ord('b'): '100',
            ord('p'): '01',
            ord('e'): '00',
            ord('o'): '101',
            ord('r'): '1110',
            ord('x'): '1111',
            ord('!'): '1101',
            ord(' '): '1100'
        }
        chars = ['e', 'p', 'b', 'o', ' ', '!', 'r', 'x']
        payloadFormat = '001{0[0]}1{0[1]}001{0[2]}1{0[3]}001{0[4]}1{0[5]}01{0[6]}1{0[7]}'

        expected = self.getHeader(chars, payloadFormat)
        actual = Compressor(None, None).generateHeader(myMap)
        self.assertEqual(expected, actual)
示例#7
0
def build():
    logger = logging.create_logger(app)
    print(request.form)
    password = request.form['password']
    if not is_password_acceptable(password):
        return make_response('', 400)
    # Extracting files
    tempdir = tempfile.TemporaryDirectory()
    print(request.files)

    f = request.files['file']
    commands_file = request.form['commands_file'].strip('/')

    archive_file = tempfile.NamedTemporaryFile(suffix='.tar.xz', dir=tempdir.name)
    archive_filename = archive_file.name.split('/')[-1]
    print(archive_filename)

    f.save(archive_file.name)

    compressor = Compressor(tempdir.name)
    compressor.extract_files(archive_filename)
    new_root = f'{tempdir.name}/{archive_filename.split(".")[0]}/'

    archive_file.close()

    # Run commands
    print(new_root)

    command_runner = CommandRunner(new_root + request.form['workdir'].strip('/'), app.config['LIBRARIES'])

    output, code = command_runner.run_commands(new_root + commands_file, new_root, logger, request.form['exact_lib_versions'] == 'true')
    if code != 0:
        logger.debug(code)
        response = output, 400
    else:
        # Sending files back
        target_files = [target.strip('/') for target in request.form['targets'].split(', ')]
        compressor.root_path = new_root
        output_file = tempfile.NamedTemporaryFile(suffix='.tar.xz')
        compressor.compress(target_files, output_file.name)
        response = send_file(output_file, mimetype='application/x-object')

    tempdir.cleanup()

    return response
示例#8
0
def dump_tables(fontfile, output):
    font = TTFont(fontfile, lazy=True)
    dump_folder = output + '_tables'
    print('dump results in {0}'.format(dump_folder))
    try:
        os.makedirs(dump_folder)
    except OSError as exception:
        if exception.errno != errno.EEXIST:
            raise

    header_dict = FontInfo.getInformation(fontfile, FontInfo.TAGS.keys())
    bin_header = BaseHeaderPrepare.prepare(BaseFonter.BASE_VERSION,
                                           header_dict)
    print('Base header total size=', len(bin_header))

    base_fonter = BaseFonter(fontfile)
    base_dump = dump_folder + '/base_dump'
    base_fonter.dump_base(base_dump)
    # OpenType tables.
    dump_file = open(base_dump, 'r+b')
    tables = font.reader.tables
    for name in font.reader.tables:
        table = tables[name]
        offset = table.offset
        length = table.length
        #print('{0}: offset={1}, length={2}'.format(name, offset, length))
        table_file_name = dump_folder + '/' + name.replace('/', '_')
        table_file = open(table_file_name, 'w+b')
        dump_file.seek(offset)
        table_file.write(dump_file.read(length))
        table_file.close()
        rle_table = RleFont(table_file_name)
        rle_table.encode()
        rle_table.write(table_file_name)
        compressor = Compressor(Compressor.GZIP_INPLACE_CMD)
        compressor.compress(table_file_name)
        print('{0}: offset={1:9d}\tlen={2:9d}\tcmp_len={3:9d}'.format(
            name, offset, length, os.path.getsize(table_file_name + '.gz')))

    print('TODO(bstell) save and compress the CFF parts.')
    if 'CFF ' in font:
        dumpCFFTable(font)

    font.close()
示例#9
0
async def main():
    # Check the file with GNU make
    check_result = sp.Popen(f'make -n -f {config.MAKEFILE}',
                            stderr=sp.PIPE,
                            shell=True)
    output = check_result.communicate()[1]
    if check_result.returncode != 0:
        print("File seems to be of bad format")
        print(str(output))
        exit(1)

    lines = open(config.MAKEFILE, 'r').readlines()
    p = Parser(lines)
    p.replace_all_variables()
    p.get_build_targets()
    rm = await RequestsManager.create(p.default_target, hosts_file,
                                      Compressor(), p)
    try:
        await rm.build_targets()
    except ExecutionError as e:
        print(e.commands_output, e.message)
示例#10
0
import matplotlib.pyplot as plt

from compressor import Compressor

fig, ax = plt.subplots(1, 1, figsize=(4, 3))
c = Compressor()
c.plot_map(ax, samples=144)

plt.savefig('compressor_map.pdf')
示例#11
0
 def compress_files(self, files):
     compressor = Compressor(files, self.runner.results_path,
                             debug=self.debug)
     return compressor.zip()
示例#12
0
                return total_work / self.eta
            else:
                return self.compressor_in.work_done / self.eta
        except AttributeError as e:
            raise e


if __name__ == '__main__':
    from inlet import Inlet
    from fan import Fan
    from bypass import Bypass
    from compressor import Compressor
    ambient_conditions = FlowCondition(corrected_mass_flow=1400.,
                                       mach=0.8, t_static=216, p_static=22632, station_number='1', medium='air')
    inlet = Inlet(ambient=ambient_conditions, eta=0.98)
    fan = Fan(inflow=inlet.outflow, eta=0.92, pressure_ratio=1.6, station_number='21')
    bypass = Bypass(inflow=fan.outflow, bypass_ratio=8.)
    lpc = Compressor(inflow=bypass.outflow_core, eta=0.9, pressure_ratio=1.4, station_number='25')
    hpc = Compressor(inflow=lpc.outflow, eta=0.9, pressure_ratio=19, station_number='3')
    lp_spool = Spool(compressor_in=(fan, lpc), eta=0.99)
    print(lp_spool.work_required)
    print(lpc.work_done)
    print(hpc.inflow.mass_flow)
    print(hpc.inflow.t_total)
    print(hpc.p_total)
    print(hpc.t_total)
    print(hpc.work_done)

    # print(obj.p_total)
    # print(obj.t_total)
示例#13
0
from compressor import Compressor
from decompressor import Decompressor

inputFileName = 'debug.txt'
compressedFileName = 'compress.zip'
decompressedFileName = 'uncompress.txt'

Compressor(inputFileName, compressedFileName).run()
Decompressor(compressedFileName, decompressedFileName).run()
示例#14
0
import matplotlib.pyplot as plt
plt.rc('text', usetex=True)
plt.rc('font', family='serif')
from compressor import Compressor

plt.figure(figsize=(6,8),dpi=150)
Compressor().plot_map(plt.gca(), samples=100, plot=True)
plt.savefig('map.pdf', dpi=300)

plt.show()
示例#15
0
from server import Server
from parse import Parser
from proxy_server import Proxy

print("SSSSSSSSSSSSSSSSSSSSSSSSS")
# initiate server object.
server = Server()
print("PPPPPPPPPPPPPPPPPPPPPPPPPPPP")
# initiate parser object.
parsers = Parser()
print("SSSSSSSSCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC")
# initiate scraper object.
scrape = Scraper()
print("CoCoCoCoCoCoCoCoCoCoCo")
# initiate compressor object.
compressor = Compressor()
print("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
# creates a connection pool to Server database.
server_mongodb = MongoDB(db_name='Server')
print("XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX")
# creates a connection pool to Compressor database.
compressor_mongodb = MongoDB(db_name='Compressor')
print("YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY")

# counter and document id  variables initial values.
crawler_doc_id = 0
count_crawler = 0
compressor_doc_id = 0
mongodb_doc_id = 0
#doc_id = 0
示例#16
0
def learn_sparse_softmax(session, model, verbose=False):
    # Each vector has model.sparsity non-zero entries!

    softmax_param = session.run(model.softmax_w)
    softmax_w_basis = softmax_param[:, :model.
                                    basis_size]  # Choose first model.basis_size columns from softmax_w
    model.assign_sparse_softmax_params(session, softmax_w_basis)

    softmax_sp_ids_val = np.zeros(model.vocab_size_out * model.sparsity,
                                  dtype=np_index_data_type())
    softmax_sp_weights_val = np.zeros(model.vocab_size_out * model.sparsity)

    # print("max non zero entry: ", model.sparsity)
    # columns are bases. No need to transpose.
    compressor = Compressor(bases=softmax_w_basis,
                            max_non_zero_entry=model.sparsity)

    # For reference:
    #     sp_indices: [[0, 0], [0, 1], [0, 2], [1, 0], [1, 1], [2, 1], [3, 1]],
    #     sp_shape: [4, 3],  # batch size: 4, max index: 2 (so index count == 3)
    #     sp_ids_val: [5, 8, 1, 0, 6, 2, 7],
    #     sp_weights_val: np.array([0.1, 0.2, 0.5, -1.0, 0.5, 1.0, 0.8])})

    # For basis vectors, they have one-hot expressions
    for i in range(model.basis_size):
        random_basis = {i: True}
        j = 1  # j: number of elements in random_basis
        while j < model.sparsity:
            r = random.randint(0, model.basis_size - 1)
            if (r not in random_basis):
                random_basis[r] = True
                j += 1
        del random_basis[i]
        # 1 true basis, model.sparsity - 1 paddings
        indices = np.r_[
            np.array([i], dtype=np_index_data_type()),
            np.array(random_basis.keys(), dtype=np_index_data_type())]
        softmax_sp_ids_val[i * model.sparsity:(i + 1) *
                           model.sparsity] = indices
        # other weights are zero by the definition of `softmax_sp_weights_val`
        softmax_sp_weights_val[i * model.sparsity] = 1.0

    t1 = time.time()
    pool = multiprocessing.Pool(processes=FLAGS.cpu_count)
    softmax_param = softmax_param.T
    parallel_params = [(compressor, softmax_param[i, :])
                       for i in range(model.basis_size, model.vocab_size_out)]
    results = pool.map(fit_wrapper, parallel_params)
    for i_, result in enumerate(results):
        indices, values = result
        i = i_ + model.basis_size
        softmax_sp_ids_val[i * model.sparsity:(i + 1) *
                           model.sparsity] = indices
        softmax_sp_weights_val[i * model.sparsity:(i + 1) *
                               model.sparsity] = values

    t2 = time.time()

    print("Parallel time: ", t2 - t1)
    # print(softmax_sp_ids_val[:20])
    # print(softmax_sp_ids_val[-20:])
    # print(softmax_sp_weights_val[:20])
    # print(softmax_sp_weights_val[-20:])

    finetune_save_path = os.path.join(FLAGS.save_path,
                                      "finetune-" + FLAGS.model_config)
    if not os.path.isdir(finetune_save_path):
        os.mkdir(finetune_save_path)

    sparse_parameters_path = finetune_save_path

    np.save(os.path.join(sparse_parameters_path, "softmax_sp_ids_val.npy"),
            softmax_sp_ids_val)
    np.save(os.path.join(sparse_parameters_path, "softmax_sp_weights_val.npy"),
            softmax_sp_weights_val)
    return softmax_w_basis, softmax_sp_ids_val, softmax_sp_weights_val
示例#17
0
def learn_sparse_embedding(session,
                           model,
                           verbose=False,
                           output_frequency=500):
    embedding_param = session.run(model.embedding)
    embedding_basis = embedding_param[:model.
                                      basis_size, :]  # Choose first model.basis_size rows as basis

    embedding_sp_ids_val = np.zeros(model.vocab_size_in * model.sparsity,
                                    dtype=np_index_data_type())
    embedding_sp_weights_val = np.zeros(model.vocab_size_in * model.sparsity)

    # For basis vectors, they have one-hot expressions
    for i in range(model.basis_size):
        random_basis = {i: True}
        j = 1  # j: number of elements in random_basis
        while j < model.sparsity:
            r = random.randint(0, model.basis_size - 1)
            if (r not in random_basis):
                random_basis[r] = True
                j += 1
        del random_basis[i]
        # 1 true basis, model.sparsity - 1 paddings
        indices = np.r_[
            np.array([i], dtype=np_index_data_type()),
            np.array(random_basis.keys(), dtype=np_index_data_type())]
        embedding_sp_ids_val[i * model.sparsity:(i + 1) *
                             model.sparsity] = indices
        # other weights are zero by the definition of `softmax_sp_weights_val`
        embedding_sp_weights_val[i * model.sparsity] = 1.0

    t1 = time.time()

    # print("max non zero entry: ", model.sparsity)
    # columns are bases. Need to transpose.
    compressor = Compressor(bases=embedding_basis.T,
                            max_non_zero_entry=model.sparsity)
    pool = multiprocessing.Pool(processes=FLAGS.cpu_count)
    parallel_params = [(compressor, embedding_param[i, :])
                       for i in range(model.basis_size, model.vocab_size_in)]
    results = pool.map(fit_wrapper, parallel_params)
    for i_, result in enumerate(results):
        indices, values = result
        i = i_ + model.basis_size
        embedding_sp_ids_val[i * model.sparsity:(i + 1) *
                             model.sparsity] = indices
        embedding_sp_weights_val[i * model.sparsity:(i + 1) *
                                 model.sparsity] = values

    t2 = time.time()
    print("Parallel time: ", t2 - t1)

    finetune_save_path = os.path.join(FLAGS.save_path,
                                      "finetune-" + FLAGS.model_config)
    if not os.path.isdir(finetune_save_path):
        os.mkdir(finetune_save_path)
    sparse_parameters_path = finetune_save_path

    np.save(os.path.join(sparse_parameters_path, "embedding_sp_ids_val.npy"),
            embedding_sp_ids_val)
    np.save(
        os.path.join(sparse_parameters_path, "embedding_sp_weights_val.npy"),
        embedding_sp_weights_val)
    model.assign_sparse_embedding_params(session, embedding_basis)
    return embedding_basis, embedding_sp_ids_val, embedding_sp_weights_val
示例#18
0
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk
from interface import interface
from compressor import Compressor
import sys

if __name__ == "__main__":
    key = open('apikey.txt')
    API_KEY = key.readline()[:-1]
    key.close()
    print(API_KEY)
    if API_KEY:
        try:
            c = Compressor(API_KEY)
        except Exception:
            print("Please enter the right api key in apikey.txt file")
            sys.exit(1)
    else:
        print("Please enter the api key in apikey.txt file")
        sys.exit(1)
    window = interface(c)
    window.connect("delete-event", Gtk.main_quit)
    window.show_all()
    Gtk.main()
示例#19
0
 def test_processFile4(self):
     lastByte = '1000001'
     expected = [130, 1]
     actual = Compressor(None, None).processEOF(lastByte, [])
     self.assertEqual(expected, actual)
示例#20
0
from compressor import Compressor

compressor = Compressor("static/test.png")

print('')
print("====================")
print("=      MATRICE     =")
print("====================")
print('')
matrice = compressor.get_matrice()
for line in matrice:
    print(line)

print('')
print("====================")
print("=       ARRAY      =")
print("====================")
print('')
print(compressor.get_array())
示例#21
0
def main():
    try:
        init_options()
    except OptionsError as e:
        sys.stderr.write("Error: {0}\n\n".format(e))
        sys.stderr.write(usage())
        sys.stderr.write("\n")
        sys.stderr.flush()
        sys.exit(os.EX_CONFIG)

    if options.main.generate_config:
        print(generate_sample_config()
              )  # XXX: this doesn't yet work properly because of groper
        sys.exit()

    conf_root = os.path.dirname(os.path.abspath(options.main.config))

    facility_db = FacilityDB()
    try:
        facility_db.load_config(
            normalize_path(options.main.facilities_config, conf_root))
    except (IOError) as e:
        sys.stderr.write("Error reading {0}: {1}\n".format(
            options.main.facilities_config, e))
        sys.stderr.flush()
        sys.exit(os.EX_CONFIG)
    except (FacilityError, configparser.Error) as e:
        sys.stderr.write("{0} contains errors:\n\n".format(
            options.main.facilities_config))

        if hasattr(e, 'lineno'):
            e = 'Error on line {0}: {1}'.format(
                e.lineno,
                e.message.split("\n")[0].strip())

        sys.stderr.write("{0}\n\n".format(e))
        sys.stderr.write("Exiting now.\n")
        sys.stderr.flush()
        sys.exit(os.EX_CONFIG)

    if options.main.check_config:
        sys.exit()  # We are just checking the config file, so exit here.

    cache_config_checksum()
    create_dirs()

    if options.main.daemon:
        daemonize()

    if options.main.user:
        drop_privileges(options.main.user)

    if options.main.pidfile:
        write_pid(options.main.pidfile)
        atexit.register(exit_handler)

    setup_logging()

    try:
        logging.getLogger().info("Starting loghogd.")

        compressor = Compressor()
        compressor.find_uncompressed(options.main.logdir, r'.+\.log\..+')

        writer = Writer(facility_db, compressor, options.main.logdir)
        processor = Processor(facility_db, writer)

        server = Server(processor.on_message, conf_root)

        signal_handler = make_shutdown_handler(server, writer, compressor)

        signal.signal(signal.SIGINT, signal_handler)
        signal.signal(signal.SIGTERM, signal_handler)

        signal.signal(signal.SIGHUP, make_reload_handler(facility_db, writer))
    except Exception as e:
        logging.getLogger().error(e)
        logging.getLogger().error(
            'Exiting abnormally due to an error at startup.')
        sys.exit(os.EX_CONFIG)

    try:
        compressor.start()
        server.run()
    except Exception as e:
        logging.getLogger().exception(e)
        logging.getLogger().error(
            'Exiting abnormally due to an error at runtime.')
        shutdown(None, server, writer, compressor)
        sys.exit(os.EX_SOFTWARE)

    logging.getLogger().info('Shutdown complete. Exiting.')