Example #1
0
File: test.py Project: banados/lsd
def worker(x):
	""" Example with context manager """
	with locking.lock(lockfn, timeout=None):
		with open(sumfile, "r+") as fp:
			n = int(fp.readline().strip()) + 1
			fp.seek(0)
			fp.truncate()
			fp.write(str(n) + "\n")
	yield n
Example #2
0
File: test.py Project: gregreen/lsd
def worker(x):
    """ Example with context manager """
    with locking.lock(lockfn, timeout=None):
        with open(sumfile, "r+") as fp:
            n = int(fp.readline().strip()) + 1
            fp.seek(0)
            fp.truncate()
            fp.write(str(n) + "\n")
    yield n
Example #3
0
File: ptf.py Project: gregreen/lsd
def import_from_catalogs(db,
                         det_tabname,
                         exp_tabname,
                         catalog_files,
                         create=False,
                         all=False):
    """ Import a PTF catalog from a collection of SExtractor catalog files.

	    Note: Assumes underlying shared storage for all output table
	          cells (i.e., any worker is able to write to any cell).
	"""
    with locking.lock(db.path[0] + "/.__smf-import-lock.lock"):
        if not db.table_exists(det_tabname) and create:
            # Set up commit hooks
            exp_table_def['commit_hooks'] = [
                ('Updating neighbors', 1, 'lsd.smf', 'make_image_cache',
                 [det_tabname])
            ]

            # Create new tables
            det_table = db.create_table(det_tabname, det_table_def)
            exp_table = db.create_table(exp_tabname, exp_table_def)

            # Set up a one-to-X join relationship between the two tables (join det_table:exp_id->exp_table:exp_id)
            db.define_default_join(det_tabname,
                                   exp_tabname,
                                   type='indirect',
                                   m1=(det_tabname, "det_id"),
                                   m2=(det_tabname, "exp_id"))
        else:
            det_table = db.table(det_tabname)
            exp_table = db.table(exp_tabname)

    # MJD of import
    now = datetime.datetime.now()
    djm = astropy.time.Time(datetime.datetime(now.year, now.month,
                                              now.day)).mjd
    djm -= 55682

    t0 = time.time()
    at = 0
    ntot = 0
    pool = pool2.Pool()
    explist_file = open('explist.txt', 'w')
    for (file, nloaded, error_type,
         expID) in pool.imap_unordered(catalog_files,
                                       import_from_catalogs_aux,
                                       (det_table, exp_table, djm, all),
                                       progress_callback=pool2.progress_pass):
        at = at + 1
        ntot = ntot + nloaded
        t1 = time.time()
        time_pass = (t1 - t0) / 60
        time_tot = time_pass / at * len(catalog_files)
        #		sfile = (file)[-65:] if len(file) > 70 else file
        sfile = file
        if error_type == 0:
            print(
                '  ===> Imported %s [%d/%d, %5.2f%%] +%-6d %9d (%.0f/%.0f min.)'
                % (sfile, at, len(catalog_files), 100 * float(at) /
                   len(catalog_files), nloaded, ntot, time_pass, time_tot))
            explist_file.write('%s\n' % str(expID))
        elif error_type == 1:
            print('%s is missing!' % (sfile))
        elif error_type == 2:
            print('%s has bad data type in exposure database!' % (sfile))
        elif error_type == 3:
            print('%s has bad data type in detection database!' % (sfile))
        elif error_type == 4:
            print('%s has bad WCS transform parameters!' % (sfile))
        else:
            print "Nothing"

        # show the dirt every 100 ingests


#		if at % 100. == 0:
#			gc.collect()
#			n_collected = gc.collect()
#			if n_collected > 0:
#				dump_garbage()

    del pool
    explist_file.close()
Example #4
0
File: ptf.py Project: schlafly/lsd
def import_from_catalogs(db, det_tabname, exp_tabname, catalog_files, create=False, all=False):
    """ Import a PTF catalog from a collection of SExtractor catalog files.

	    Note: Assumes underlying shared storage for all output table
	          cells (i.e., any worker is able to write to any cell).
	"""
    with locking.lock(db.path[0] + "/.__smf-import-lock.lock"):
        if not db.table_exists(det_tabname) and create:
            # Set up commit hooks
            exp_table_def["commit_hooks"] = [("Updating neighbors", 1, "lsd.smf", "make_image_cache", [det_tabname])]

            # Create new tables
            det_table = db.create_table(det_tabname, det_table_def)
            exp_table = db.create_table(exp_tabname, exp_table_def)

            # Set up a one-to-X join relationship between the two tables (join det_table:exp_id->exp_table:exp_id)
            db.define_default_join(
                det_tabname, exp_tabname, type="indirect", m1=(det_tabname, "det_id"), m2=(det_tabname, "exp_id")
            )
        else:
            det_table = db.table(det_tabname)
            exp_table = db.table(exp_tabname)

            # MJD of import
    now = datetime.datetime.now()
    (djm, j) = sla_caldj(now.year, now.month, now.day)
    djm -= 55682

    t0 = time.time()
    at = 0
    ntot = 0
    pool = pool2.Pool()
    explist_file = open("explist.txt", "w")
    for (file, nloaded, error_type, expID) in pool.imap_unordered(
        catalog_files, import_from_catalogs_aux, (det_table, exp_table, djm, all), progress_callback=pool2.progress_pass
    ):
        at = at + 1
        ntot = ntot + nloaded
        t1 = time.time()
        time_pass = (t1 - t0) / 60
        time_tot = time_pass / at * len(catalog_files)
        # 		sfile = (file)[-65:] if len(file) > 70 else file
        sfile = file
        if error_type == 0:
            print (
                "  ===> Imported %s [%d/%d, %5.2f%%] +%-6d %9d (%.0f/%.0f min.)"
                % (
                    sfile,
                    at,
                    len(catalog_files),
                    100 * float(at) / len(catalog_files),
                    nloaded,
                    ntot,
                    time_pass,
                    time_tot,
                )
            )
            explist_file.write("%s\n" % str(expID))
        elif error_type == 1:
            print ("%s is missing!" % (sfile))
        elif error_type == 2:
            print ("%s has bad data type in exposure database!" % (sfile))
        elif error_type == 3:
            print ("%s has bad data type in detection database!" % (sfile))
        elif error_type == 4:
            print ("%s has bad WCS transform parameters!" % (sfile))
        else:
            print "Nothing"

            # show the dirt every 100 ingests
    # 		if at % 100. == 0:
    # 			gc.collect()
    # 			n_collected = gc.collect()
    # 			if n_collected > 0:
    # 				dump_garbage()

    del pool
    explist_file.close()