示例#1
0
def do_polysel(parameters):
    name = parameters.myparams({"name": str}, ['tasks'])["name"]
    workdir = parameters.myparams({"workdir": str}, ['tasks'])["workdir"]
    result = {}
    if stage_required('polysel'):
        poly_start = time.time()
        polysel_offset = utils.slurm_cpu_time_start()
        p = polysel.Polysel(parameters)
        result['polyfile'] = p.run()
        poly_finish = time.time()
        result['duration'] = poly_finish - poly_start
        logger.info("\tPolysel in %s", utils.str_time(result['duration']))

        if parameters.myparams({
                "collect_cputime": [str]
        }, ['tasks', 'polyselect']).get("collect_cputime"):
            logger.info("--- Collecting polysel cumulative CPUTime ---")
            polysel_slurm_outfile = str(
                os.path.join(workdir, name + ".slurm_polysel"))
            result['cputime'] = utils.slurm_cpu_time_end(
                polysel_offset, polysel_slurm_outfile)
            logger.info("\tPolysel cumulative CPUTime %s" %
                        utils.str_time(result['cputime']))
        else:
            result['cputime'] = 0

        checkpoint_stage('polysel', result)
    else:
        result = load_stage('polysel')

    return result
示例#2
0
def do_polysel(parameters):
    name = parameters.myparams({"name": str}, ['tasks'])["name"]
    workdir = parameters.myparams({"workdir": str}, ['tasks'])["workdir"]
    result = {}
    if stage_required('polysel'):
        poly_start = time.time()
        polysel_offset = utils.slurm_cpu_time_start()
        p = polysel.Polysel(parameters)
        result['polyfile'] = p.run()
        poly_finish = time.time()
        result['duration'] = poly_finish - poly_start
        logger.info("\tPolysel in %s", utils.str_time(result['duration']))

        if parameters.myparams({"collect_cputime": [str]}, ['tasks', 'polyselect']).get("collect_cputime"):
            logger.info("--- Collecting polysel cumulative CPUTime ---")
            polysel_slurm_outfile = str(os.path.join(workdir, name + ".slurm_polysel"))
            result['cputime'] = utils.slurm_cpu_time_end(polysel_offset, polysel_slurm_outfile)
            logger.info("\tPolysel cumulative CPUTime %s" % utils.str_time(result['cputime']))
        else:
            result['cputime'] = 0
        
        checkpoint_stage('polysel', result)
    else:
        result = load_stage('polysel')

    return result
 def write_intervention(self, start, end, wait, last_trade, this_trade):
     utils.write([
         self.m_name,
         self.m_id,
         self.intervention_num,
         self.buy_state,
         self.sell_state,
         self.buy_eats_block,
         self.sell_eats_block,
         self.condition,
         self.success,
         utils.str_time(start),
         utils.str_time(end),
         wait,
         last_trade['initiate_ordertype'],
         last_trade['tradeid'],
         last_trade['datetime'],
         last_trade['quantity'],
         last_trade['tradeprice'],
         last_trade['total'],
         this_trade['order_id'],
         this_trade['tradeid'],
         this_trade['datetime'],
         this_trade['quantity'],
         this_trade['tradeprice'],
         this_trade['total'],
     ], self.intervention_file)
示例#4
0
def do_sieve(parameters, polysel_result):
    result = {}
    name = parameters.myparams({"name": str}, ['tasks'])["name"]
    workdir = parameters.myparams({"workdir": str}, ['tasks'])["workdir"]
    # msieve_filter is the last checkpointed stage in this block
    if stage_required('sieve'):
        sieve_start = time.time()
        sieving_offset = utils.slurm_cpu_time_start()
        s = sieving.Sieve(parameters, polysel_result['polyfile'])
        relation_files = s.run()
        result['relation_files'] = relation_files
        sieve_finish = time.time()
        result['duration'] = sieve_finish - sieve_start
        logger.info("\tSieving in %s", utils.str_time(result['duration']))

        if parameters.myparams({
                "collect_cputime": [str]
        }, ['tasks', 'sieve']).get("collect_cputime"):
            logger.info("--- Collecting sieving cumulative CPUTime ---")
            sieving_slurm_outfile = str(
                os.path.join(workdir, name + ".slurm_sieving"))
            result['cputime'] = utils.slurm_cpu_time_end(
                sieving_offset, sieving_slurm_outfile)
            logger.info("\tSieving cumulative CPUTime %s" %
                        utils.str_time(result['cputime']))
        else:
            result['cputime'] = 0

        relation_files_file = str(
            os.path.join(workdir, name + '.relation_files'))
        logger.info("Saving used relation files from this run to %s",
                    relation_files_file)
        with open(relation_files_file, 'w') as f:
            for line in relation_files:
                f.write(line + '\n')

        if s.completed_factorization:
            logger.info(
                "Completed factorization using trial division. Check log file for factors."
            )
            utils.update_checkpoint({'trail_division': True})

        checkpoint_stage('sieve', result)
    else:
        result = load_stage('sieve')

    post_sieve = parameters.myparams({
        'post_sieve': None
    }, ['commands']).get('post_sieve')
    if post_sieve != None:
        logger.info('Post-sieve command %s', post_sieve)
        utils.run_command(post_sieve, logger=logger)

    return result
示例#5
0
    def run_polysel2(self, parameters, polysel1_bestpolys):
        # get parameters for polysel2
        polysel2_paths = ["tasks", "polyselect", "polyselect2", "polyselect_ropt"]
        polysel2_program = cadoprograms.PolyselectRopt
        polysel2_keys = polysel2_program.get_accepted_keys()
        polysel2_keys.update({"batch_size": int, "N": int, "I": int, "alim": int, "rlim": int})
        polysel2_params = parameters.myparams(polysel2_keys, polysel2_paths)
        polysel2_params.update({"outputdir": str(os.path.join(self.workdir, self.name + ".upload"))})
        if not os.path.exists(polysel2_params["outputdir"]):
            logger.info("Creating directory for polysel2 output files %s", polysel2_params["outputdir"])
            os.makedirs(polysel2_params["outputdir"])

        logger.info("Starting polysel2")
        start_time = time.time()

        poly_path = os.path.join(self.workdir, "polysel2_input_%d") % (time.time())
        generator = self.generate_polysel2_progparams(polysel2_params, polysel1_bestpolys)

        polys = []
        while True:
            new_polys = self.submit_polysel2_batch(generator, polysel2_params)
            if len(new_polys) == 0:
                break
            polys += new_polys
        
        polysel2_bestpoly = max(polys, key=lambda s: s.MurphyE)
        logger.debug("Polysel2 best poly: %s", polysel2_bestpoly)

        poly_file = os.path.join(self.workdir, self.name + ".polyselect2.poly")
        polysel2_bestpoly.create_file(poly_file)
        logger.info("Polysel2: Saving best polynomial at %s", poly_file)
        logger.info("Polysel2 finished in %s", utils.str_time(time.time() - start_time))
        return poly_file
示例#6
0
    def run_polysel1(self, parameters):
        logger.info("Starting polysel1")
        start_time = time.time()

        # get parameters for polysel1
        polysel1_paths = ["tasks", "polyselect", "polyselect1", "polyselect2l"]
        polysel1_program = cadoprograms.Polyselect2l
        polysel1_keys = polysel1_program.get_accepted_keys()
        polysel1_keys.update({"batch_size": int, "nrkeep": int, "admin": 0, "admax": int, "adrange": int})
        polysel1_params = parameters.myparams(polysel1_keys, polysel1_paths)
        polysel1_params.update({"outputdir": str(os.path.join(self.workdir, self.name + ".upload"))})
        if not os.path.exists(polysel1_params["outputdir"]):
            logger.info("Creating directory for polysel1 output files %s", polysel1_params["outputdir"])
            os.makedirs(polysel1_params["outputdir"])

        nrkeep = polysel1_params["nrkeep"]
        batch_size = polysel1_params["batch_size"]

        generator = self.generate_polysel1_task_commands(polysel1_params)
        polys = []
        while True:
            new_polys = self.submit_polysel1_batch(generator, polysel1_params)
            if len(new_polys) == 0:
                break
            polys += new_polys

        polysel1_bestpolys = heapq.nsmallest(nrkeep, polys, lambda s: s.lognorm)
        if not polysel1_bestpolys:
            logger.critical("No polys recieved from polysel1. Exiting...")
            sys.exit(0)
        
        logger.info("Found best %d polynomials", len(polysel1_bestpolys))
        logger.info("Polysel1 finished in %s", utils.str_time(time.time() - start_time))
        return polysel1_bestpolys
示例#7
0
def do_sieve(parameters, polysel_result):
    result = {}
    name = parameters.myparams({"name": str}, ['tasks'])["name"]
    workdir = parameters.myparams({"workdir": str}, ['tasks'])["workdir"]
    # msieve_filter is the last checkpointed stage in this block
    if stage_required('sieve'):
        sieve_start = time.time()
        sieving_offset = utils.slurm_cpu_time_start()
        s = sieving.Sieve(parameters, polysel_result['polyfile'])
        relation_files = s.run()
        result['relation_files'] = relation_files
        sieve_finish = time.time()
        result['duration'] = sieve_finish - sieve_start
        logger.info("\tSieving in %s", utils.str_time(result['duration']))

        if parameters.myparams({"collect_cputime": [str]}, ['tasks', 'sieve']).get("collect_cputime"):
            logger.info("--- Collecting sieving cumulative CPUTime ---")
            sieving_slurm_outfile = str(os.path.join(workdir, name + ".slurm_sieving"))
            result['cputime'] = utils.slurm_cpu_time_end(sieving_offset, sieving_slurm_outfile)
            logger.info("\tSieving cumulative CPUTime %s" % utils.str_time(result['cputime']))
        else:
            result['cputime'] = 0

        relation_files_file = str(os.path.join(workdir, name + '.relation_files'))
        logger.info("Saving used relation files from this run to %s", relation_files_file)
        with open(relation_files_file, 'w') as f:
            for line in relation_files:
                f.write(line+'\n')

        if s.completed_factorization:
            logger.info("Completed factorization using trial division. Check log file for factors.")
            utils.update_checkpoint({
                'trail_division': True})

        checkpoint_stage('sieve', result)
    else:
        result = load_stage('sieve')

    post_sieve = parameters.myparams({'post_sieve': None}, ['commands']).get('post_sieve')
    if post_sieve != None:
        logger.info('Post-sieve command %s', post_sieve)
        utils.run_command(post_sieve, logger=logger)
    
    return result
示例#8
0
    def run_status_thread(self):
        while not self.is_finished():

            # give the user a status update
            stage = self.get_stage()
            if stage == 'sieve':
                elapsed = time.time() - self.start_time
                rels_total = self.get_rels_total()
                rels_wanted = self.get_rels_wanted()

                rate = rels_total / elapsed
                eta = 0
                if rate > 0:
                    eta = (rels_wanted - rels_total) / rate
                    logger.info(
                        "Status: %d/%d relations at %d rels/sec - elapsed: %s, ETA: %s",
                        rels_total, rels_wanted, int(rate),
                        utils.str_time(elapsed), utils.str_time(eta))
            elif stage == 'filter':
                logger.info("Status: performing filtering")

            sleep(10)
 def write_monitor(self, now, past_volumes, past_totals, volume, total,
                   last_trade, buy_order, sell_order):
     utils.write([
         self.m_name,
         self.m_id,
         self.intervention_num,
         self.monitor_num,
         self.buy_state,
         self.sell_state,
         self.buy_eats_block,
         self.sell_eats_block,
         self.condition,
         self.success,
         utils.str_time(now),
         past_volumes['buy'][0] + past_volumes['sell'][0],
         past_volumes['buy'][1] + past_volumes['sell'][1],
         past_volumes['buy'][2] + past_volumes['sell'][2],
         past_totals['buy'][0] + past_totals['sell'][0],
         past_totals['buy'][1] + past_totals['sell'][1],
         past_totals['buy'][2] + past_totals['sell'][2],
         volume['buy'] + volume['sell'],
         total['buy'] + total['sell'],
         past_volumes['buy'][0],
         past_volumes['buy'][1],
         past_volumes['buy'][2],
         past_totals['buy'][0],
         past_totals['buy'][1],
         past_totals['buy'][2],
         volume['buy'],
         total['buy'],
         past_volumes['sell'][0],
         past_volumes['sell'][1],
         past_volumes['sell'][2],
         past_totals['sell'][0],
         past_totals['sell'][1],
         past_totals['sell'][2],
         volume['sell'],
         total['sell'],
         last_trade['initiate_ordertype'],
         last_trade['tradeid'],
         last_trade['datetime'],
         last_trade['quantity'],
         last_trade['tradeprice'],
         last_trade['total'],
         buy_order['buyprice'],
         buy_order['total'],
         buy_order['quantity'],
         sell_order['sellprice'],
         sell_order['total'],
         sell_order['quantity'],
     ], self.monitor_file)
示例#10
0
    def run_polysel2(self, parameters, polysel1_bestpolys):
        # get parameters for polysel2
        polysel2_paths = [
            "tasks", "polyselect", "polyselect2", "polyselect_ropt"
        ]
        polysel2_program = cadoprograms.PolyselectRopt
        polysel2_keys = polysel2_program.get_accepted_keys()
        polysel2_keys.update({
            "batch_size": int,
            "N": int,
            "I": int,
            "alim": int,
            "rlim": int
        })
        polysel2_params = parameters.myparams(polysel2_keys, polysel2_paths)
        polysel2_params.update({
            "outputdir":
            str(os.path.join(self.workdir, self.name + ".upload"))
        })
        if not os.path.exists(polysel2_params["outputdir"]):
            logger.info("Creating directory for polysel2 output files %s",
                        polysel2_params["outputdir"])
            os.makedirs(polysel2_params["outputdir"])

        logger.info("Starting polysel2")
        start_time = time.time()

        poly_path = os.path.join(self.workdir,
                                 "polysel2_input_%d") % (time.time())
        generator = self.generate_polysel2_progparams(polysel2_params,
                                                      polysel1_bestpolys)

        polys = []
        while True:
            new_polys = self.submit_polysel2_batch(generator, polysel2_params)
            if len(new_polys) == 0:
                break
            polys += new_polys

        polysel2_bestpoly = max(polys, key=lambda s: s.MurphyE)
        logger.debug("Polysel2 best poly: %s", polysel2_bestpoly)

        poly_file = os.path.join(self.workdir, self.name + ".polyselect2.poly")
        polysel2_bestpoly.create_file(poly_file)
        logger.info("Polysel2: Saving best polynomial at %s", poly_file)
        logger.info("Polysel2 finished in %s",
                    utils.str_time(time.time() - start_time))
        return poly_file
示例#11
0
    def run_polysel1(self, parameters):
        logger.info("Starting polysel1")
        start_time = time.time()

        # get parameters for polysel1
        polysel1_paths = ["tasks", "polyselect", "polyselect1", "polyselect2l"]
        polysel1_program = cadoprograms.Polyselect2l
        polysel1_keys = polysel1_program.get_accepted_keys()
        polysel1_keys.update({
            "batch_size": int,
            "nrkeep": int,
            "admin": 0,
            "admax": int,
            "adrange": int
        })
        polysel1_params = parameters.myparams(polysel1_keys, polysel1_paths)
        polysel1_params.update({
            "outputdir":
            str(os.path.join(self.workdir, self.name + ".upload"))
        })
        if not os.path.exists(polysel1_params["outputdir"]):
            logger.info("Creating directory for polysel1 output files %s",
                        polysel1_params["outputdir"])
            os.makedirs(polysel1_params["outputdir"])

        nrkeep = polysel1_params["nrkeep"]
        batch_size = polysel1_params["batch_size"]

        generator = self.generate_polysel1_task_commands(polysel1_params)
        polys = []
        while True:
            new_polys = self.submit_polysel1_batch(generator, polysel1_params)
            if len(new_polys) == 0:
                break
            polys += new_polys

        polysel1_bestpolys = heapq.nsmallest(nrkeep, polys,
                                             lambda s: s.lognorm)
        if not polysel1_bestpolys:
            logger.critical("No polys recieved from polysel1. Exiting...")
            sys.exit(0)

        logger.info("Found best %d polynomials", len(polysel1_bestpolys))
        logger.info("Polysel1 finished in %s",
                    utils.str_time(time.time() - start_time))
        return polysel1_bestpolys
示例#12
0
def do_sqrt(parameters, linalg_result):
    result = {}
    if stage_required('sqrt'):

        sqrt_start = time.time()
        result['factors'] = sqrt.run(parameters)
        sqrt_finish = time.time()
        result['duration'] = sqrt_finish - sqrt_start
        logger.info("\tSqrt in %s", utils.str_time(result['duration']))

        checkpoint_stage('sqrt', result)
    else:
        result = load_stage('sqrt')

    post_sqrt = parameters.myparams({'post_sqrt': None}, ['commands']).get('post_sqrt')
    if post_sqrt != None:
        logger.info('Post-sqrt command %s', post_sqrt)
        utils.run_command(post_sqrt, logger=logger)

    return result
示例#13
0
def do_sqrt(parameters, linalg_result):
    result = {}
    if stage_required('sqrt'):

        sqrt_start = time.time()
        result['factors'] = sqrt.run(parameters)
        sqrt_finish = time.time()
        result['duration'] = sqrt_finish - sqrt_start
        logger.info("\tSqrt in %s", utils.str_time(result['duration']))

        checkpoint_stage('sqrt', result)
    else:
        result = load_stage('sqrt')

    post_sqrt = parameters.myparams({
        'post_sqrt': None
    }, ['commands']).get('post_sqrt')
    if post_sqrt != None:
        logger.info('Post-sqrt command %s', post_sqrt)
        utils.run_command(post_sqrt, logger=logger)

    return result
示例#14
0
def do_linalg(parameters, sieve_result):
    result = {}
    # Check the rare case that factorization was completed in msieve's filtering
    if utils.get_checkpoint().get('trial_division') != None:
        result['duration'] = 0
    elif stage_required('linalg'):
        linalg_start = time.time()
        linalg.run(parameters)
        linalg_finish = time.time()
        result['duration'] = linalg_finish - linalg_start
        logger.info("\tLinalg in %s", utils.str_time(result['duration']))

        checkpoint_stage('linalg', result)
    else:
        result = load_stage('linalg')

    post_linalg = parameters.myparams({'post_linalg': None}, ['commands']).get('post_linalg')
    if post_linalg != None:
        logger.info('Post-linalg command %s', post_linalg)
        utils.run_command(post_linalg, logger=logger)

    return result
示例#15
0
def do_linalg(parameters, sieve_result):
    result = {}
    # Check the rare case that factorization was completed in msieve's filtering
    if utils.get_checkpoint().get('trial_division') != None:
        result['duration'] = 0
    elif stage_required('linalg'):
        linalg_start = time.time()
        linalg.run(parameters)
        linalg_finish = time.time()
        result['duration'] = linalg_finish - linalg_start
        logger.info("\tLinalg in %s", utils.str_time(result['duration']))

        checkpoint_stage('linalg', result)
    else:
        result = load_stage('linalg')

    post_linalg = parameters.myparams({
        'post_linalg': None
    }, ['commands']).get('post_linalg')
    if post_linalg != None:
        logger.info('Post-linalg command %s', post_linalg)
        utils.run_command(post_linalg, logger=logger)

    return result
示例#16
0
def main():
    
    signal.signal(signal.SIGINT, signal_handler)

    parser = argparse.ArgumentParser(description="Integer Factorization with "
                                         "the Number Field Sieve")
    parser.add_argument("parameters", help="A file with the parameters to use")
    parser.add_argument("options", metavar="OPTION", help="An option as in "
                            "parameter file (format: key=value)", nargs="*")
    parser.add_argument('--resume','-r', help="checkpoint file to resume from")
    parser.add_argument('--stage','-s', action='append', help="stage to complete ('start','polysel','sieving','linalg','complete'), add + to run all subsequent stages")
    
    
    args = parser.parse_args()
    parameters = utils.get_params(args.parameters, args.options)

    name = parameters.myparams({"name": str}, ['tasks'])["name"]
    workdir = parameters.myparams({"workdir": str}, ['tasks'])["workdir"]

    if not os.path.exists(workdir):
        logger.info("Creating work directory %s", workdir)
        os.makedirs(workdir)

    setup_logging(workdir, name)

    # Load or create initial checkpoint
    checkpoint_file = args.resume
    if not checkpoint_file:
        checkpoint_file = os.path.join(workdir, "checkpoint.dat")
    utils.init_checkpoint(checkpoint_file)

    # set parameters that are unlikely to change from run to run, such as filenames and directories
    parameters = set_static_parameters(parameters)

    # check that all required parameters are present
    params = check_parameters(parameters)
    utils.update_checkpoint({'params': params})

    # set parameters that will likely change from run to run
    parameters = set_dynamic_parameters(parameters)

    # Write a snapshot of the parameters to a file
    snapshot_filename = "%s/%s.parameters_snapshot" % (workdir, name)
    with open(snapshot_filename, "w") as snapshot_file:
        logger.debug("Writing parameter snapshot to %s", snapshot_filename)
        snapshot_file.write(str(parameters))
        snapshot_file.write("\n")

    start_time = time.time()

    # For each checkpointed stage, check if the stage should be run again.
    # A stage should be run again under the following circumstances:
    #   - The user manually requested to run the stage
    #   - No checkpoint exists for the stage
    #   - A stage on which this stage depends will be re-run
    #   - Parameters on which the stage depends have been changed since the last run
    if args.stage:
        for stage in args.stage:
            if stage.endswith('+'):
                stage = stage[:-1]
                if stage not in stages:
                    continue
                stage_required.manual_stages = range(stages.index(stage), len(stages))
                break
            if stage not in stages:
                args.stage.pop(stage)
            else:
                stage_required.manual_stages.append(stages.index(stage))
    else:
        # since no stage were specified to run manually, choose the first stage based on the checkpoint file
        stage_required.manual_stages = set_manual_stages(params)

    # Run polynomial selection
    polysel_result = do_polysel(parameters)

    # Run sieving
    sieve_result = do_sieve(parameters, polysel_result)

    # Run linalg
    linalg_result = do_linalg(parameters, sieve_result)

    # Run square root
    sqrt_result = do_sqrt(parameters, linalg_result)

    factoring_duration = polysel_result['duration'] + sieve_result['duration'] + linalg_result['duration'] + sqrt_result['duration']
    logger.info('Factoring completed in %s', utils.str_time(factoring_duration))
    logger.info('\tPolysel in real/cpu %s/%s', utils.str_time(polysel_result['duration']), utils.str_time(polysel_result['cputime']))
    logger.info("\tSieving in real/cpu %s/%s", utils.str_time(sieve_result['duration']), utils.str_time(sieve_result['cputime']))
    logger.info("\tLinalg in %s", utils.str_time(linalg_result['duration']))
    logger.info("\tSqrt in %s", utils.str_time(sqrt_result['duration']))
    logger.info("\tFactors %s", ','.join(sqrt_result['factors']))

    post_factor = parameters.myparams({'post_factor': None}, ['commands']).get('post_factor')
    if post_factor != None:
        logger.info('Post-factor command %s', post_factor)
        utils.run_command(post_factor, logger=logger)
示例#17
0
    def run_status_thread(self):
        while not self.is_finished():

            # give the user a status update
            stage = self.get_stage()
            if stage == 'sieve':
                elapsed = time.time() - self.start_time
                rels_total = self.get_rels_total()
                rels_wanted = self.get_rels_wanted()

                rate = rels_total / elapsed
                eta = 0
                if rate > 0:
                    eta = (rels_wanted - rels_total) / rate
                    logger.info("Status: %d/%d relations at %d rels/sec - elapsed: %s, ETA: %s", rels_total, rels_wanted, int(rate), utils.str_time(elapsed), utils.str_time(eta))
            elif stage == 'filter':
                logger.info("Status: performing filtering")
                    
            sleep(10)
示例#18
0
def main():

    signal.signal(signal.SIGINT, signal_handler)

    parser = argparse.ArgumentParser(description="Integer Factorization with "
                                     "the Number Field Sieve")
    parser.add_argument("parameters", help="A file with the parameters to use")
    parser.add_argument("options",
                        metavar="OPTION",
                        help="An option as in "
                        "parameter file (format: key=value)",
                        nargs="*")
    parser.add_argument('--resume',
                        '-r',
                        help="checkpoint file to resume from")
    parser.add_argument(
        '--stage',
        '-s',
        action='append',
        help=
        "stage to complete ('start','polysel','sieving','linalg','complete'), add + to run all subsequent stages"
    )

    args = parser.parse_args()
    parameters = utils.get_params(args.parameters, args.options)

    name = parameters.myparams({"name": str}, ['tasks'])["name"]
    workdir = parameters.myparams({"workdir": str}, ['tasks'])["workdir"]

    if not os.path.exists(workdir):
        logger.info("Creating work directory %s", workdir)
        os.makedirs(workdir)

    setup_logging(workdir, name)

    # Load or create initial checkpoint
    checkpoint_file = args.resume
    if not checkpoint_file:
        checkpoint_file = os.path.join(workdir, "checkpoint.dat")
    utils.init_checkpoint(checkpoint_file)

    # set parameters that are unlikely to change from run to run, such as filenames and directories
    parameters = set_static_parameters(parameters)

    # check that all required parameters are present
    params = check_parameters(parameters)
    utils.update_checkpoint({'params': params})

    # set parameters that will likely change from run to run
    parameters = set_dynamic_parameters(parameters)

    # Write a snapshot of the parameters to a file
    snapshot_filename = "%s/%s.parameters_snapshot" % (workdir, name)
    with open(snapshot_filename, "w") as snapshot_file:
        logger.debug("Writing parameter snapshot to %s", snapshot_filename)
        snapshot_file.write(str(parameters))
        snapshot_file.write("\n")

    start_time = time.time()

    # For each checkpointed stage, check if the stage should be run again.
    # A stage should be run again under the following circumstances:
    #   - The user manually requested to run the stage
    #   - No checkpoint exists for the stage
    #   - A stage on which this stage depends will be re-run
    #   - Parameters on which the stage depends have been changed since the last run
    if args.stage:
        for stage in args.stage:
            if stage.endswith('+'):
                stage = stage[:-1]
                if stage not in stages:
                    continue
                stage_required.manual_stages = range(stages.index(stage),
                                                     len(stages))
                break
            if stage not in stages:
                args.stage.pop(stage)
            else:
                stage_required.manual_stages.append(stages.index(stage))
    else:
        # since no stage were specified to run manually, choose the first stage based on the checkpoint file
        stage_required.manual_stages = set_manual_stages(params)

    # Run polynomial selection
    polysel_result = do_polysel(parameters)

    # Run sieving
    sieve_result = do_sieve(parameters, polysel_result)

    # Run linalg
    linalg_result = do_linalg(parameters, sieve_result)

    # Run square root
    sqrt_result = do_sqrt(parameters, linalg_result)

    factoring_duration = polysel_result['duration'] + sieve_result[
        'duration'] + linalg_result['duration'] + sqrt_result['duration']
    logger.info('Factoring completed in %s',
                utils.str_time(factoring_duration))
    logger.info('\tPolysel in real/cpu %s/%s',
                utils.str_time(polysel_result['duration']),
                utils.str_time(polysel_result['cputime']))
    logger.info("\tSieving in real/cpu %s/%s",
                utils.str_time(sieve_result['duration']),
                utils.str_time(sieve_result['cputime']))
    logger.info("\tLinalg in %s", utils.str_time(linalg_result['duration']))
    logger.info("\tSqrt in %s", utils.str_time(sqrt_result['duration']))
    logger.info("\tFactors %s", ','.join(sqrt_result['factors']))

    post_factor = parameters.myparams({
        'post_factor': None
    }, ['commands']).get('post_factor')
    if post_factor != None:
        logger.info('Post-factor command %s', post_factor)
        utils.run_command(post_factor, logger=logger)