Esempio n. 1
0
def process_queue(source_directory, destination_directory, queue_directory, encoding_profile):
    queue = listdir(queue_directory)
    for queued_name in queue:
        source_file = join(source_directory, queued_name)
        if exists(source_file):
            destination_file = generate_output_name(source_file, destination_directory)
            generate_output(source_file, destination_file, encoding_profile)
            queued_file = join(queue_directory, queued_name)
            remove(queued_file)
        else:
            print("Warning: {0} does not exist".format(source_file))
Esempio n. 2
0
def mirror_videos(source_directory, destination_directory, log_directory, exclusions, only, encoding_profile):
    source_pattern = join(source_directory, MKV_SEARCH)
    source_glob = glob(source_pattern)
    source_set = set(source_glob)
    destination_pattern = join(destination_directory, MP4_SEARCH)
    destination_glob = glob(destination_pattern)
    log_pattern = join(log_directory, MP4_SEARCH)
    log_glob = glob(log_pattern)
    encode_list = []

    if exclusions and only:
        print('mirror_videos was passed both exclusions and only')
        exit_with_error()
        return

    if exclusions:
        for exclusion in exclusions:
            exclusion_pattern = join(source_directory, exclusion)
            exclusion_glob = glob(exclusion_pattern)
            exclusion_set = set(exclusion_glob)

            source_set -= exclusion_set
    elif only:
        source_set = set()
        for pattern in only:
            only_pattern = join(source_directory, pattern)
            only_glob = glob(only_pattern)
            only_set = set(only_glob)

            source_set |= only_set

    for source_file in source_set:
        destination_file = generate_output_name(source_file, destination_directory)
        log_file = generate_output_name(source_file, log_directory)
        if log_file in log_glob:
            # We have a log of doing this conversion. We should make sure we keep the log
            # intact and not convert it again.
            log_glob.remove(log_file)
        elif destination_file in destination_glob:
            # We have the destination file, but not the log of doing it. Create the log.
            create_log_file(log_file)
        else:
            # We haven't converted this video yet. Queue it up
            encode_job = (source_file, destination_file, log_file)
            encode_list.append(encode_job)

    for log_file in log_glob:
        print("Deleting " + log_file)
        remove(log_file)

    for input_file, output_file, log_file in encode_list:
        generate_output(input_file, output_file, encoding_profile)
        create_log_file(log_file)
Esempio n. 3
0
def main():
    args = parse_args()

    testing_suite = BenchmarkSuite(args.capsule_dir, args.parallelism)
    results = testing_suite.test(args.num_samples)

    df = pd.DataFrame.from_records(results,
                                   columns=BenchmarkSuite.Result._fields)
    df.sort_values(by=list(df.columns), inplace=True, ignore_index=True)

    output.generate_output(output=df,
                           csv_path=args.output_csv,
                           graph_path=args.output_graph)
Esempio n. 4
0
 def on_ok(self, evt):
     if not self.pan.Validate():
         return
     params = self.pan.get_data()
     data = get_query(self.report, params)
     
     headers = data.next()
     col_types = data.next()
     try:
         first_row = data.next()
     except StopIteration:
         wx.MessageBox('Sorry, no data.', 'Report', wx.OK|wx.ICON_INFORMATION)
         return
     
     out_name = self.available_outputs[self.output_doc.GetSelection()]
     generate_output(self, self.report, out_name, 
                     headers, col_types, first_row, data)
Esempio n. 5
0
def main():
    input_file = sys.argv[1]

    [endpoints, caches, videos] = parse(input_file)

    #solution = CacheDistributor.distribute(
    #    DistributionStrategy.popular_content,
    #    endpoints,
    #    caches,
    #    videos
    #)

    solution = CacheDistributor.distribute(
        DistributionStrategy.active_endpoints,
        endpoints,
        caches,
        videos
    )

    result = score(endpoints, solution)

    generate_output(len(caches), solution)
    print(result)
Esempio n. 6
0
 def open_file_dialog(self):    
     filename, filter = QtGui.QFileDialog.getSaveFileName(parent=self, caption='Select output file', dir='.', filter= FILE_FILTERS  )   
     if filename:
         seq = self.get_results()
         output.generate_output(filename,seq)
Esempio n. 7
0
    print('start training new model')

# training
model.train()
model, loss = train(train_dir, train_dataloader, model, optimizer, loss_fn,
                    num_epoches, use_gpu, quiet)
print('Training complete, final loss=', loss.data)
torch.save(model.state_dict(), model_file)
print('Model saved')

# generate output
if make_validation_output:
    # empty folder
    for the_file in os.listdir(validation_dir):
        file_path = os.path.join(validation_dir, the_file)
        try:
            if os.path.isfile(file_path):
                os.unlink(file_path)
        except Exception as e:
            print(e)

    generate_output(validation_dir, validation_dataloader, model, use_gpu)
    print('Validation output written')
    # evaluate validation outputs
    mae = evaluate(validation_dir + '/normal', validation_dir + '/gt',
                   validation_dir + '/mask')
    print('MAE = ', mae)
else:
    generate_output(test_dir, test_dataloader, model, use_gpu)
    print('Test output written')