def main():
    funcs.setup_logging()
    process = psutil.Process(os.getpid())
    model_res = funcs.initialize_model_res_dict()
    plots = funcs.create_plots()
    results = funcs.init_res_dict()
    noise_mix_vals = [
        param['noise_mix'] for param in constants.PARAMS_TO_EVALUATE.values()
    ]
    c_scale = 1 / (max(noise_mix_vals) - min(noise_mix_vals))
    for prob in constants.PARAMS_TO_EVALUATE.keys():
        tf.logging.info(prob)
        params = funcs.get_params(prob)
        tf.logging.info(params)
        sess_name = funcs.get_session_name(prob, 0)
        results_queue = Queue()
        train_proc = Process(target=run_tf_train_containment_process,
                             args=(results_queue, params, sess_name))
        train_proc.start()
        model_res = results_queue.get()
        nodes_to_preserve = results_queue.get()
        funcs.optimize_frozen_graph(nodes_to_preserve)
        train_proc.join()
        tf.logging.info('memory used after train_proc.join: %s GB',
                        process.memory_info().rss / 1e9)
        hess_proc = Process(target=run_tf_hessian_containment_process,
                            args=(results_queue, nodes_to_preserve, model_res,
                                  params, sess_name))
        hess_proc.start()
        model_res = results_queue.get()
        hess_proc.join()
        tf.logging.info('memory used after hess_proc.join: %s GB',
                        process.memory_info().rss / 1e9)
        gc.collect()
        funcs.update_plots(model_res,
                           params,
                           sess_name,
                           plots,
                           c_scale=c_scale)
        funcs.add_sample_to_res(model_res, prob, 0, results)
        funcs.calc_stats(results, prob)
示例#2
0
def remove_old_posts(keyw):
    """ remove all previous HTML frame files """
    for filename in os.listdir(POSTSDIR):
        file_path = os.path.join(POSTSDIR, filename)
        try:
            if (os.path.isfile(file_path)
                    and (keyw in filename)
                    and filename.endswith('.html')):
                os.unlink(file_path)
        except Exception as e:
            print('Failed to delete %s. Reason: %s' % (file_path, e))
    return None


if __name__ == "__main__":
    modname, keyw = get_parms(sys.argv)
    logger = setup_logging(__name__, modname)

    with open('permalink.txt', 'w') as perm_file:
        os.makedirs(POSTSDIR, exist_ok=True)
        dot = showProgress()
        with open('postlist.txt', 'r') as in_file:
            lines = in_file.read().splitlines()
            for line in lines:
                dot.show()
                topic, postlink = line.split(' ')
                follow(postlink, topic)
        dot.end()

    print('Done')
            content = re.sub('(^ +"' + framework + '"\: ")([\w.]+)',
                             r'\g<1>' + checksum,
                             content,
                             flags=re.M)

        # Update the version
        content = re.sub('(^let latestVersion = \")([\w.]+)',
                         r'\g<1>' + version,
                         content,
                         flags=re.M)
        package_manifest_file.seek(0)
        package_manifest_file.write(content)
        package_manifest_file.truncate()


setup_logging()
version = str(sys.argv[1])
if not validate_version(version):
    logging.error("Version is invalid, exiting")
    sys.exit(1)

project_dir = os.getcwd()
xcframework_path = os.path.join(project_dir, "xcframeworks", "output", "XCF")
archive_path = os.path.join(project_dir, "xcframeworks", "output", "archives")
spm_manifest_repo = './aws-sdk-ios-spm'

logging.info(f"Creating archives from {xcframework_path}")
create_archives(xcframework_path, archive_path, version)

logging.info(f"Calculating checksum from {archive_path}")
framework_to_checksum = create_checksum(archive_path, spm_manifest_repo,