def generate_mocproc_jobs(jobset): track = None queryset = jobset.assets for asset in queryset.get_query_set(): if not track: track = Tracker(target=queryset.count()) job = Job(command=jobset.command) sourcefile = asset.file_path basename = '/'.join(asset.file_path.split('/')[-2:]) dest_name = os.path.splitext(basename)[0] + '.cub' destfile = os.path.join(DESTPATH, asset.volume.lower(), dest_name) try: centerlat = asset.footprint.centroid.y except AttributeError: # indicates an incomplete or funnky geometry centerlat = asset.footprint.convex_hull.centroid.y if abs(centerlat) > 85: map_projection = 'PolarStereographic' else: map_projection = 'Sinusoidal' job.arguments = json.dumps( ("%s %s --map %s" % (sourcefile, destfile, map_projection) ).split(' ')) track.next() job.jobset = jobset job.save() job.assets.add(asset) yield job
def main(): index = build_index() mocprocd_assets = Asset.objects.filter(class_label='mocprocd image') print "Fixing %d mocproc'd assets" % mocprocd_assets.count() i = 0 for asset in Tracker(mocprocd_assets, target=mocprocd_assets.count(), progress=True): try: if index[asset.product_id].strip() == 'ERRORS': asset.has_errors = True asset.save() i += 1 except KeyError: print "Error: %s not in index!" % asset.product_id print "%d assets have errors" % i int8_assets = Asset.objects.filter(class_label='scaled image int8') print "Fixing %d int8 assets" % int8_assets.count() i = 0 for asset in Tracker(int8_assets, target=int8_assets.count(), progress=True): asset.has_errors = asset.creator_job.assets.all()[0].has_errors if asset.has_errors: asset.save() i += 1 print "%d assets have errors" % i print "Done!"
def generate_index_rows(): for volname in Tracker(list(generate_volnames()), progress=True): index_path = os.path.join(moc_rootpath, volname, 'index') labelfile = os.path.join(index_path, 'imgindex.lbl') tabfile = os.path.join(index_path, 'imgindex.tab') tbl = Table(labelfile, tabfile) for row in tbl: yield row
def _build_mipmap_jobs(jobset, urls, platefile, n_jobs=None, options=None, initial_transaction_id=0, presave_callback=None): if options: downsample = options.downsample bandnorm = options.bandnorm clipping = options.clipping else: downsample = None bandnorm = False clipping = 0 transaction_ids = gen_transaction_ids(initial_transaction_id) i = 0 for url in Tracker(iter=urls, target=27859, progress=True): job = Job() job.transaction_id = transaction_ids.next() job.command = 'ctx2plate' job.arguments = job.wrapped().build_arguments( url=url, platefile=platefile, transaction_id=job.transaction_id, downsample=downsample) if bandnorm: job.arguments.append('--bandnorm') if not options.use_cache: job.arguments.append('--nocache') if options.use_percentages: job.arguments.append('--percentages') if options.no_plate: job.arguments.append('--noplate') job.arguments.append('--clipping=%f' % clipping) job.jobset = jobset if presave_callback: job = presave_callback(job) job.save() i += 1 if n_jobs and i >= n_jobs: break print "Created %d jobs." % i
def populate_mocproc_jobs(jobset): for job in Tracker(iter=generate_jobs(jobset), target=jobset.assets.count(), progress=True): jobset.jobs.add(job) transaction.commit() return jobset.jobs
import urllib import re, os, sys from BeautifulSoup import BeautifulSoup as Soup sys.path.insert(0, '/home/ted/alderaan-wc/') from ngt.utils.tracker import Tracker rooturl = 'http://pds-imaging.jpl.nasa.gov/data/mgs-m-moc-na_wa-2-sdp-l0-v1.0/' targetpath = '/home/ted/data/moc_meta' indexfiles = ['imgindx.lbl','imgindx.tab','imgindex.lbl','imgindex.tab'] root = urllib.urlopen(rooturl) soup = Soup(root.read()) volpattern = re.compile('^mgsc_\d+/?$') dirlinks = soup.findAll('a', href=volpattern) for voldir in Tracker(iter=[l['href'] for l in dirlinks] ): try: target_dir = os.path.join(targetpath, voldir, 'index') os.makedirs(target_dir) except os.error: pass for ifile in indexfiles: img_response = urllib.urlopen(rooturl + voldir + 'index/' + ifile) if img_response.getcode() == 200: out = open(os.path.join(target_dir, ifile), 'w') out.write(img_response.read()) out.close()