def __init__(self, fname, acqimage): if fname is None: if acqimage.is_type("GMOS_N"): fname = ConfigSpace.lookup_path("Gemini/GMOS/MDF/gnifu_slits_mdf.fits") elif acqimage.is_type("GMOS_S"): fname = ConfigSpace.lookup_path("Gemini/GMOS/MDF/gsifu_slits_mdf.fits") else: raise ValueError("Only GMOS North and South supported") self.ad = AstroData(fname) bundle_map = defaultdict(list) self.num_fibers = 0 for record in self.ad.data: block = record.field("BLOCK") bundle_idx = block.split("_")[0] bundle_map[bundle_idx].append(record) self.num_fibers += 1 self.bundles = [] for block, fibers in bundle_map.items(): bundle = FiberBundle(block, fibers) self.bundles.append(bundle) self.bundles.sort()
def do_check(self): ### get files now = datetime.now() self._last_check = now ret = {} slist = self.ingest_package.get_store_list(elements = self.source["elements"]) for filnam in slist: basename = os.path.basename(filnam) if len(basename)>0: if "transferred_files" not in ret: ret["transferred_files"] = [] indivpkg = self.ingest_package.__class__(storename = filnam) indivpkg.deliver_from_warehouse(move = "_ingested") ret["transferred_files"].append(filnam) ## RUN COMMANDS # if document is a known type (see warehouse_daemon.py in lookups) xfers = None if "transferred_files" in ret: xfers = ret["transferred_files"] if xfers: if "commands" in self.source: commands = self.source["commands"] for command in commands: patt = command["file_pattern"] for filename in xfers: if re.match(patt, filename): if False: # if command["clean_working_directory"]: # @@WARN: possibly dangerous, Ideally isolate # the daemon with it's own account/permisions rmcontents = os.getcwd() shutil.rmtree(rmcontents) os.mkdir(rmcontent) for command_line in command["command_lines"]: vargs = {"dataset": os.path.basename(filename), "context": ConfigSpace.get_current_default_context() } command_line = command_line.format(**vargs) print tc.colored("-"*(len(command_line)+len("running:")+2), None, "on_green") print tc.colored("running:", None, "on_green"), "%s" % command_line print tc.colored("-"*(len(command_line)+len("running:")+2), None, "on_green") cmdparts = command_line.split() # check parts you want to glob convparts = [] for part in cmdparts: if "*" in part: convparts.extend(glob(part)) else: convparts.append(part) exit_code = subprocess.call(convparts) print " exit_code = %s" % exit_code if command["clean_working_directory"]: # @@WARN: possibly dangerous, Ideally isolate # the daemon with it's own account/permisions rmcontents = os.getcwd() clear_directory(rmcontents) if not ret: return None else: return ret
parser.add_argument("--verbose", default = False, action="store_true") args = parser.parse_args() ######## ###### #### TAKE CARE OF CONTEXT IN A GLOBAL WAY ### @@NOTE: the issue is, we don't want to, nor do we have arguments ### : for, passing context through the system. Instead we take ### : advantage of the ability to set the current default context. ### : This necessitates setting the context string prior to importing ### : anything that uses the ConfigSpace or Lookups modules. from astrodata import ConfigSpace if args.context: ConfigSpace.set_current_default_context(args.context) print "setting context='%s' (dw142)" % args.context from astrodata.adutils.dwutil.dwsettings import package_classes from astrodata.adutils.dwutil.dwsettings import warehouse_packages from astrodata.adutils.dwutil.dwsettings import dataset_extensions # this relies on ConfigSpace, and therefore context # AGAIN: the point is we have to be careful to set the context before # any modules use the ConfigSpace or Lookups modules. from astrodata.adutils.dwutil import daemon_process as dp ### #### ###### ########