Exemplo n.º 1
0
def phosolve(options):
    jobset = JobSet(name="Apollo 15 Metric Camera Photometry")
    jobset.save()

    last_time_job_list = create_iteration_jobs(options, jobset)
    polish_albedo_jobs = create_albedo_jobs(jobset, options.albedo_jobs, options.polish_max_level, dependencies=last_time_job_list)
    create_mipmap_job(options.platefile, dependencies=polish_albedo_jobs)
Exemplo n.º 2
0
def create_snapshot_jobs(mmjobset=None,
                         interval=256,
                         input_platefile=None,
                         output_platefile=None,
                         lowest_snapshot_level=3,
                         activate=True):
    if not input_platefile:
        raise Exception("'input_platefile' argument required.")
    if not mmjobset:
        mmjobset = JobSet.objects.filter(name__contains="MipMap").latest('pk')
    snapshot_jobset = JobSet()
    snapshot_jobset.name = "mosaic snapshots (js%d)" % mmjobset.id
    snapshot_jobset.command = "snapshot"
    snapshot_jobset.save()

    i = 0
    transaction_range_start = None
    jobs_for_dependency = []
    endjob = None
    for mmjob in mmjobset.jobs.all().order_by('transaction_id'):
        i += 1
        jobs_for_dependency.append(mmjob)
        if not transaction_range_start:
            transaction_range_start = mmjob.transaction_id
        if i % interval == 0:
            transaction_range = (transaction_range_start, mmjob.transaction_id)
            startjob, endjob = _build_snapshot_start_end(
                transaction_range, jobs_for_dependency, snapshot_jobset,
                endjob, input_platefile, output_platefile)
            startjob.context['lowest_snapshot_level'] = lowest_snapshot_level
            startjob.save(
            )  # StartSnapshot JobCommand needs access to this value

            #clear transaction range and jobs for dependency list
            transaction_range_start = mmjob.transaction_id + 1  # Set the start of the next snapshot
            jobs_for_dependency = []

    else:  # after the last iteration, start a snapshot with whatever's left.
        if jobs_for_dependency:
            transaction_range = (transaction_range_start, mmjob.transaction_id)
            startjob, endjob = _build_snapshot_start_end(
                transaction_range, jobs_for_dependency, snapshot_jobset,
                endjob, input_platefile, output_platefile)
            startjob.context['lowest_snapshot_level'] = lowest_snapshot_level
            startjob.save()

    print "Setting priority to 1 and " + ("activating."
                                          if activate else "NOT activating.")
    snapshot_jobset.priority = 1
    snapshot_jobset.active = activate
    snapshot_jobset.save()
    return snapshot_jobset
def main():
    parser = optparse.OptionParser()
    parser.add_option('-p', '--platefile', action='store', dest='platefile', help='Platefile URL to which the images should be written (e.g. pf://wwt10one/index/collectible.plate)')
    parser.add_option('--njobs', action="store", dest="n_jobs", type="int", help="Limit the number of image2plate jobs generated")
    parser.add_option('--name', action='store', dest='jobset_name', help="Override the default name for the image2plate jobset.")
    parser.add_option('--no-activate', action='store_false', dest='activate', help='Do not activate the new jobsets after creation.')
    parser.add_option('--no-snapshots', action='store_false', dest='do_snapshots', help="Don't create a snapshot JobSet.")
    parser.add_option('--downsample', action='store', type='int', dest='downsample', help="Percentage to downsample during preprocessing.")
    parser.add_option('--bandnorm', action='store_true', dest='bandnorm', help="Perform ISIS band normalization.")
    parser.add_option('--clipping', action='store', type='float', dest='clipping', help="Clip to within N standard deviations of the mean intensity value (0 disables)")
    parser.add_option('--nocache', action='store_false', dest='use_cache', help='If there is a cached output cube, reprocess anyway')
    parser.add_option('--use-cache', action='store_true', dest='use_cache', help='Use a cached copy of the preprocessed output, if one exists.')
    parser.add_option('--percentages', dest='use_percentages', action='store_true', help="Use percentages instead of values for the stretch step (overrides clipping setting)")
    parser.add_option('--noplate', dest='no_plate', action='store_true', help="Skip platefile insertion.  Just preprocess.")
    parser.add_option('--test', dest='test', action='store_true', help="Draw image urls from a particular set of test images rather than the PDS cumulative label (default).")
    parser.add_option('--augment', dest='augment', action='store', type='int', help="Augment an existing jobset by creating new jobs only for products that do not already exist within the jobset.  Requires a jobset ID.")
    parser.set_defaults(
        platefile = DEFAULT_PLATEFILE, 
        activate = True, 
        name = None, 
        do_snapshots = True,
        n_jobs = None,  
        downsample = None, 
        bandnorm = False, 
        clipping = 3.0, 
        use_cache = False,
        use_percentages = False,
        test = False,
        augment = None,
        no_plate = False,
    )
    (options, args) = parser.parse_args()

    if options.augment:
        options.test = False
        options.activate = False
        options.do_snapshots = False
        augment_jobset(options.augment, platefile=options.platefile, options=options)
    else:
        mm_jobset = create_mipmap_jobs(n_jobs=options.n_jobs, platefile=options.platefile, name=options.jobset_name, options=options)
    if options.do_snapshots:
        sn_jobset = create_snapshot_jobs(mmjobset=mm_jobset, input_platefile=options.platefile)
    else:
        sn_jobset = None
    if options.activate:
        for js in (mm_jobset, sn_jobset):
            if js:
                JobSet.activate(js)
Exemplo n.º 4
0
def make_jobset(commandfile_name):
    commandfile = open(commandfile_name, 'r')
    jobset_name = commandfile.next()
    print "Creating JobSet."
    jobset = JobSet()
    jobset.name = jobset_name
    jobset.command = options.command
    jobset.priority = options.priority
    jobset.save()
    make_jobs(jobset, commandfile)
    print "JobSet %d created." % jobset.id
    if options.activate:
        print "Activating!"
        jobset.active = True
        jobset.save()
    else:
        print "Not activated."
Exemplo n.º 5
0
def make_jobset(commandfile_name):
    commandfile = open(commandfile_name, 'r')
    jobset_name = commandfile.next()
    print "Creating JobSet."
    jobset = JobSet()
    jobset.name = jobset_name
    jobset.command = options.command
    jobset.priority = options.priority
    jobset.save()
    make_jobs(jobset, commandfile)
    print "JobSet %d created." % jobset.id
    if options.activate:
        print "Activating!"
        jobset.active = True
        jobset.save()
    else:
        print "Not activated."
Exemplo n.º 6
0
def create_scale2int8_jobset():
    assets = Asset.objects.filter(class_label='mocprocd image')
    set = JobSet()
    set.name = "int8 scaling"
    set.command = 'scale2int8'
    set.output_asset_label = "scaled image int8"
    set.save()
    for asset in Tracker(iter=assets.iterator(), target=assets.count(), progress=True):
        set.assets.add(asset)
    return set
Exemplo n.º 7
0
def create_mipmap_jobs(n_jobs=None, basemap=True):
    # where n_jobs is the number of jobs to generate.  Default (None) builds jobs for all assets in the queryset.
    transaction_id_sequence.setval(1) # reset the transaction_id sequence
    mola_assets = Asset.objects.filter(class_label='mola basemap')
    hrsc_assets = Asset.objects.filter(class_label='hrsc')[:n_jobs]
    assets = itertools.chain(mola_assets, hrsc_assets)
    jobset = JobSet()
    jobset.name = "HRSC MipMap (%s)" % (n_jobs or 'all')
    jobset.command = "mipmap"
    jobset.priority = 3
    jobset.save()
    _build_mipmap_jobs(jobset, assets, count=mola_assets.count() + hrsc_assets.count())
    return jobset
def create_mipmap_jobs(n_jobs=None, platefile=DEFAULT_PLATEFILE, name=None, options=None):
    # where n_jobs is the number of jobs to generate.  Default (None) builds jobs for all assets in the queryset.
    #transaction_id_sequence.setval(1) # reset the transaction_id sequence
    jobset = JobSet()
    jobset.name = name or "CTX MipMap (%s)" % (n_jobs or 'all')
    jobset.command = "ctx2plate"
    jobset.priority = 3
    jobset.save()
    _build_mipmap_jobs(jobset, generate_urls(is_test=options.test), platefile, n_jobs=n_jobs, options=options)
    return jobset
Exemplo n.º 9
0
def create_snapshot_jobs(mmjobset=None, interval=256, input_platefile=None, output_platefile=None, lowest_snapshot_level=3, activate=True):
    if not input_platefile:
        raise Exception("'input_platefile' argument required.")
    if not mmjobset:
        mmjobset = JobSet.objects.filter(name__contains="MipMap").latest('pk')
    snapshot_jobset = JobSet()
    snapshot_jobset.name = "mosaic snapshots (js%d)" % mmjobset.id
    snapshot_jobset.command = "snapshot"
    snapshot_jobset.save()

    i = 0
    transaction_range_start = None
    jobs_for_dependency = []
    endjob = None
    for mmjob in mmjobset.jobs.all().order_by('transaction_id'):
        i += 1
        jobs_for_dependency.append(mmjob)
        if not transaction_range_start:
            transaction_range_start = mmjob.transaction_id        
        if i % interval == 0:
            transaction_range = (transaction_range_start, mmjob.transaction_id)
            startjob, endjob = _build_snapshot_start_end(transaction_range, jobs_for_dependency, snapshot_jobset, endjob, input_platefile, output_platefile)
            startjob.context['lowest_snapshot_level'] = lowest_snapshot_level; startjob.save() # StartSnapshot JobCommand needs access to this value

            #clear transaction range and jobs for dependency list
            transaction_range_start = mmjob.transaction_id + 1  # Set the start of the next snapshot
            jobs_for_dependency = []

    else: # after the last iteration, start a snapshot with whatever's left.
        if jobs_for_dependency:
            transaction_range = (transaction_range_start, mmjob.transaction_id)
            startjob, endjob = _build_snapshot_start_end(transaction_range, jobs_for_dependency, snapshot_jobset, endjob, input_platefile, output_platefile)
            startjob.context['lowest_snapshot_level'] = lowest_snapshot_level; startjob.save()
    
    print "Setting priority to 1 and " + ("activating." if activate else "NOT activating.")
    snapshot_jobset.priority = 1
    snapshot_jobset.active = activate
    snapshot_jobset.save()
    return snapshot_jobset
Exemplo n.º 10
0
def create__mocproc_jobset():
    assets = Asset.objects.filter(class_label='MOC SDP', md5_check=True)
    set = JobSet()
    set.name = 'MOC Staging'
    set.command='moc-stage'
    set.save()
    print "Saved JobSet %d.  Adding assets." % set.id
    for asset in Tracker(iter=assets.iterator(), target=assets.count(), progress=True):
        set.assets.add(asset)
    transaction.commit()
    return set
Exemplo n.º 11
0
def create_mipmap_jobs(n_jobs=None, basemap=False, platefile=PLATEFILE):
    # where n_jobs is the number of jobs to generate.  Default (None) builds jobs for all assets in the queryset.
    transaction_id_sequence.setval(1)  # reset the transaction_id sequence
    assets = Asset.objects.filter(class_label='hirise product',
                                  md5_check=True)[:n_jobs]
    jobset = JobSet()
    jobset.name = "hirise2plate (%s)" % (n_jobs or 'all')
    jobset.command = "hirise2plate"
    jobset.priority = 3
    jobset.save()
    if basemap:
        _build_jobs(MipMapCommand, jobset,
                    Asset.objects.filter(class_label='color basemap'),
                    platefile)
    _build_jobs(hirise2plateCommand, jobset, assets, platefile)
    return jobset
def create_mipmap_jobs(n_jobs=None, basemap=False, platefile=PLATEFILE, name=None):
    # where n_jobs is the number of jobs to generate.  Default (None) builds jobs for all assets in the queryset.
    transaction_id_sequence.setval(1) # reset the transaction_id sequence
    assets = Asset.objects.filter(has_errors=False, class_label='MOC SDP')[:n_jobs]
    jobset = JobSet()
    jobset.name = name or "MOC MipMap (%s)" % (n_jobs or 'all')
    jobset.command = "moc2plate"
    jobset.priority = 3
    jobset.save()
    if basemap:
        _build_mipmap_jobs(jobset, Asset.objects.filter(class_label='color basemap'), platefile)
    _build_mipmap_jobs(jobset, assets, platefile)
    return jobset
def create_mipmap_jobs(n_jobs=None, basemap=False, platefile=PLATEFILE):
    # where n_jobs is the number of jobs to generate.  Default (None) builds jobs for all assets in the queryset.
    transaction_id_sequence.setval(1) # reset the transaction_id sequence
    assets = Asset.objects.filter(class_label='hirise product', md5_check=True)[:n_jobs]
    jobset = JobSet()
    jobset.name = "hirise2plate (%s)" % (n_jobs or 'all')
    jobset.command = "hirise2plate"
    jobset.priority = 3
    jobset.save()
    if basemap:
        _build_jobs(MipMapCommand, jobset, Asset.objects.filter(class_label='color basemap'), platefile)
    _build_jobs(hirise2plateCommand, jobset, assets, platefile)
    return jobset
Exemplo n.º 14
0
def create_mipmap_jobs(n_jobs=None,
                       platefile=DEFAULT_PLATEFILE,
                       name=None,
                       options=None):
    # where n_jobs is the number of jobs to generate.  Default (None) builds jobs for all assets in the queryset.
    #transaction_id_sequence.setval(1) # reset the transaction_id sequence
    jobset = JobSet()
    jobset.name = name or "CTX MipMap (%s)" % (n_jobs or 'all')
    jobset.command = "ctx2plate"
    jobset.priority = 3
    jobset.save()
    _build_mipmap_jobs(jobset,
                       generate_urls(is_test=options.test),
                       platefile,
                       n_jobs=n_jobs,
                       options=options)
    return jobset
Exemplo n.º 15
0
def create_mipmap_jobs(n_jobs=None,
                       basemap=False,
                       platefile=PLATEFILE,
                       name=None):
    # where n_jobs is the number of jobs to generate.  Default (None) builds jobs for all assets in the queryset.
    transaction_id_sequence.setval(1)  # reset the transaction_id sequence
    assets = Asset.objects.filter(has_errors=False,
                                  class_label='MOC SDP')[:n_jobs]
    jobset = JobSet()
    jobset.name = name or "MOC MipMap (%s)" % (n_jobs or 'all')
    jobset.command = "moc2plate"
    jobset.priority = 3
    jobset.save()
    if basemap:
        _build_mipmap_jobs(jobset,
                           Asset.objects.filter(class_label='color basemap'),
                           platefile)
    _build_mipmap_jobs(jobset, assets, platefile)
    return jobset
Exemplo n.º 16
0
def main():
    parser = optparse.OptionParser()
    parser.add_option(
        '-p',
        '--platefile',
        action='store',
        dest='platefile',
        help=
        'Platefile URL to which the images should be written (e.g. pf://wwt10one/index/collectible.plate)'
    )
    parser.add_option('--njobs',
                      action="store",
                      dest="n_jobs",
                      type="int",
                      help="Limit the number of image2plate jobs generated")
    parser.add_option(
        '--name',
        action='store',
        dest='jobset_name',
        help="Override the default name for the image2plate jobset.")
    parser.add_option('--no-activate',
                      action='store_false',
                      dest='activate',
                      help='Do not activate the new jobsets after creation.')
    parser.add_option('--no-snapshots',
                      action='store_false',
                      dest='do_snapshots',
                      help="Don't create a snapshot JobSet.")
    parser.add_option('--downsample',
                      action='store',
                      type='int',
                      dest='downsample',
                      help="Percentage to downsample during preprocessing.")
    parser.add_option('--bandnorm',
                      action='store_true',
                      dest='bandnorm',
                      help="Perform ISIS band normalization.")
    parser.add_option(
        '--clipping',
        action='store',
        type='float',
        dest='clipping',
        help=
        "Clip to within N standard deviations of the mean intensity value (0 disables)"
    )
    parser.add_option(
        '--nocache',
        action='store_false',
        dest='use_cache',
        help='If there is a cached output cube, reprocess anyway')
    parser.add_option(
        '--use-cache',
        action='store_true',
        dest='use_cache',
        help='Use a cached copy of the preprocessed output, if one exists.')
    parser.add_option(
        '--percentages',
        dest='use_percentages',
        action='store_true',
        help=
        "Use percentages instead of values for the stretch step (overrides clipping setting)"
    )
    parser.add_option('--noplate',
                      dest='no_plate',
                      action='store_true',
                      help="Skip platefile insertion.  Just preprocess.")
    parser.add_option(
        '--test',
        dest='test',
        action='store_true',
        help=
        "Draw image urls from a particular set of test images rather than the PDS cumulative label (default)."
    )
    parser.add_option(
        '--augment',
        dest='augment',
        action='store',
        type='int',
        help=
        "Augment an existing jobset by creating new jobs only for products that do not already exist within the jobset.  Requires a jobset ID."
    )
    parser.set_defaults(
        platefile=DEFAULT_PLATEFILE,
        activate=True,
        name=None,
        do_snapshots=True,
        n_jobs=None,
        downsample=None,
        bandnorm=False,
        clipping=3.0,
        use_cache=False,
        use_percentages=False,
        test=False,
        augment=None,
        no_plate=False,
    )
    (options, args) = parser.parse_args()

    if options.augment:
        options.test = False
        options.activate = False
        options.do_snapshots = False
        augment_jobset(options.augment,
                       platefile=options.platefile,
                       options=options)
    else:
        mm_jobset = create_mipmap_jobs(n_jobs=options.n_jobs,
                                       platefile=options.platefile,
                                       name=options.jobset_name,
                                       options=options)
    if options.do_snapshots:
        sn_jobset = create_snapshot_jobs(mmjobset=mm_jobset,
                                         input_platefile=options.platefile)
    else:
        sn_jobset = None
    if options.activate:
        for js in (mm_jobset, sn_jobset):
            if js:
                JobSet.activate(js)