def run_file_operation(outdoc, filenames, use_segment_table, operation, preserve=True): """ Performs an operation (intersect or union) across a set of files. That is, given a set of files each with segment definers DMT-FLAG1, DMT-FLAG2 etc the result is a file where DMT-FLAG1 = (file 1's DMT-FLAG1 operation file 2's DMT-FLAG1 operation ...) DMT-FLAG2 = (file 1's DMT-FLAG2 operation file 2's DMT-FLAG2 operation ...) etc """ proc_id = table.get_table(outdoc, lsctables.ProcessTable.tableName)[0].process_id # load up the files into individual documents xmldocs = [ ligolw_add.ligolw_add(ligolw.Document(), [fname]) for fname in filenames ] # Get the list of dinstinct segment_definers across all docs segment_definers = {} def register_definer(seg_def): key = (seg_def.ifos, seg_def.name, seg_def.version) segment_definers[key] = True return key for xmldoc in xmldocs: seg_def_table = table.get_table(xmldoc, lsctables.SegmentDefTable.tableName) map(register_definer, seg_def_table) # For each unique segment definer, find the intersection for ifo, name, version in segment_definers: if operation == INTERSECT: # If I were feeling especially functional-ist I'd write this # with reduce() result = glue.segments.segmentlist([ glue.segments.segment(-glue.segments.infinity(), glue.segments.infinity()) ]) for xmldoc in xmldocs: result &= find_segments(xmldoc, '%s:%s:%d' % (ifo, name, version), use_segment_table) elif operation == UNION: result = glue.segments.segmentlist([]) for xmldoc in xmldocs: result |= find_segments(xmldoc, '%s:%s:%d' % (ifo, name, version), use_segment_table) elif operation == DIFF: result = find_segments(xmldocs[0], '%s:%s:%d' % (ifo, name, version), use_segment_table) for xmldoc in xmldocs[1:]: result -= find_segments(xmldoc, '%s:%s:%d' % (ifo, name, version), use_segment_table) else: raise NameError( "%s is not a known operation (intersect, union or diff)" % operation) # Add a segment definer for the result seg_def_id = add_to_segment_definer(outdoc, proc_id, ifo, name, version) # Add the segments if use_segment_table: add_to_segment(outdoc, proc_id, seg_def_id, result) else: add_to_segment_summary(outdoc, proc_id, seg_def_id, result) # If we're preserving, also load up everything into the output document. if preserve: # Add them to the output document map(lambda x: outdoc.appendChild(x.childNodes[0]), xmldocs) # Merge the ligolw elements and tables ligolw_add.merge_ligolws(outdoc) ligolw_add.merge_compatible_tables(outdoc) return outdoc, abs(result)
def run_file_operation(outdoc, filenames, use_segment_table, operation, preserve = True): """ Performs an operation (intersect or union) across a set of files. That is, given a set of files each with segment definers DMT-FLAG1, DMT-FLAG2 etc the result is a file where DMT-FLAG1 = (file 1's DMT-FLAG1 operation file 2's DMT-FLAG1 operation ...) DMT-FLAG2 = (file 1's DMT-FLAG2 operation file 2's DMT-FLAG2 operation ...) etc """ proc_id = table.get_table(outdoc, lsctables.ProcessTable.tableName)[0].process_id # load up the files into individual documents xmldocs = [ligolw_add.ligolw_add(ligolw.Document(), [fname]) for fname in filenames] # Get the list of dinstinct segment_definers across all docs segment_definers = {} def register_definer(seg_def): key = (seg_def.ifos, seg_def.name, seg_def.version) segment_definers[key] = True return key for xmldoc in xmldocs: seg_def_table = table.get_table(xmldoc, lsctables.SegmentDefTable.tableName) map (register_definer, seg_def_table) # For each unique segment definer, find the intersection for ifo, name, version in segment_definers: if operation == INTERSECT: # If I were feeling especially functional-ist I'd write this # with reduce() result = glue.segments.segmentlist([glue.segments.segment(-glue.segments.infinity(), glue.segments.infinity())]) for xmldoc in xmldocs: result &= find_segments(xmldoc, '%s:%s:%d' % (ifo, name, version), use_segment_table) elif operation == UNION: result = glue.segments.segmentlist([]) for xmldoc in xmldocs: result |= find_segments(xmldoc, '%s:%s:%d' % (ifo, name, version), use_segment_table) elif operation == DIFF: result = find_segments(xmldocs[0], '%s:%s:%d' % (ifo, name, version), use_segment_table) for xmldoc in xmldocs[1:]: result -= find_segments(xmldoc, '%s:%s:%d' % (ifo, name, version), use_segment_table) else: raise NameError ("%s is not a known operation (intersect, union or diff)" % operation) # Add a segment definer for the result seg_def_id = add_to_segment_definer(outdoc, proc_id, ifo, name, version) # Add the segments if use_segment_table: add_to_segment(outdoc, proc_id, seg_def_id, result) else: add_to_segment_summary(outdoc, proc_id, seg_def_id, result) # If we're preserving, also load up everything into the output document. if preserve: # Add them to the output document map(lambda x: outdoc.appendChild(x.childNodes[0]), xmldocs) # Merge the ligolw elements and tables ligolw_add.merge_ligolws(outdoc) ligolw_add.merge_compatible_tables(outdoc) return outdoc, abs(result)
data=ligolw_table_to_array(sims), compression='gzip', compression_opts=1) h5file.create_dataset("/sngl_inspiral", data=ligolw_table_to_array(tmplts), compression='gzip', compression_opts=1) h5file.create_dataset("/match_map", data=match_map, compression='gzip', compression_opts=1) h5file.flush() # merge process and process_params tables, then complete ourselves lsctables.reset_next_ids( (lsctables.ProcessTable, lsctables.ProcessParamsTable)) ligolw_add.reassign_ids(fake_xmldoc) ligolw_add.merge_ligolws(fake_xmldoc) ligolw_add.merge_compatible_tables(fake_xmldoc) ligolw_process.set_process_end_time(process) # output process proc = lsctables.ProcessTable.get_table(fake_xmldoc) for p in proc: p.cvs_entry_time = 0 p.end_time = 0 h5file.create_dataset("/process", data=ligolw_table_to_array(proc)) pp = lsctables.ProcessParamsTable.get_table(fake_xmldoc) h5file.create_dataset("/process_params", data=ligolw_table_to_array(pp)) h5file.close()
if verbose: print "\tbest matching template: ", print bank._templates[match_tup[1]].params print "\tbest match: %f\n" % match_tup[0] match_map[inj_ind] = (inj_ind, inj_wf.sigmasq) + match_tup inj_wf.clear() # prune inj waveform if verbose: print "total number of match calculations:", bank._nmatch # merge process and process_params tables, then complete ourselves table.reset_next_ids((lsctables.ProcessTable, lsctables.ProcessParamsTable)) ligolw_add.reassign_ids(fake_xmldoc) ligolw_add.merge_ligolws(fake_xmldoc) ligolw_add.merge_compatible_tables(fake_xmldoc) ligolw_process.set_process_end_time(process) # output h5file.create_dataset("/match_map", data=match_map, compression='gzip', compression_opts=1) proc = lsctables.ProcessTable.get_table(fake_xmldoc) for p in proc: p.cvs_entry_time = 0 p.end_time = 0 h5file.create_dataset("/process", data=ligolw_table_to_array(proc)) pp = lsctables.ProcessParamsTable.get_table(fake_xmldoc) h5file.create_dataset("/process_params", data=ligolw_table_to_array(pp)) h5file.close()
def main(): usage ="""%%prog [options] GROUP TYPE EVENTFILE where GROUP is one of %(groups)s TYPE is one of %(types)s EVENTFILE is file containing event data. '-' indicates stdin. %%prog [options] replace GRACEID EVENTFILE where GROUP is one of %(groups)s TYPE is one of %(types)s EVENTFILE is file containing event data. '-' indicates stdin. %%prog [options] ping Test server connection %%prog [options] upload GRACEID FILE [COMMENT] where GRACEID is the id of an existing candidate event in GraCEDb FILE is the name of the file to upload. '-' indicates stdin. COMMENT is an optional annotation to enter into the log Upload FILE to the private data area for a candidate event. To apply a tag, use the --tag-name option (and --tag-display-name if desired.) %%prog [options] download GRACEID FILE [DESTINATION] where GRACEID is the id of an existing candidate event in GraCEDb FILE is the name of the file previosuly uploaded. DESTINATION is the download destination. '-' indicates stdout. default is same file name as FILE Download FILE from private data area of a candidate event %%prog [options] log GRACEID COMMENT where GRACEID is the id of an existing candidate event in GraCEDb COMMENT is text that will be entered into the event's log Enter a comment into the log for a candidate event. To apply a tag, use the --tag-name option (and --tag-display-name if desired). %%prog [options] label GRACEID LABEL Label event with GRACEDID with LABEL. LABEL must already exist. %%prog [options] tag GRACEID LOG_N TAG_NAME [DISP_NAME] where GRACEID is the id of an existing candidate event in GraCEDb LOG_N is the number of the log message. TAG_NAME is the name of the tag DISP_NAME is the tag display name (ignored for existing tags) Tag an existing log message. Alternatively, the tag name and display name can be passed in with the --tag-name and --tag-display-name options. %%prog [options] delete_tag GRACEID LOG_N TAG_NAME Remove a tag from a log message. Alternatively, the tag name can be passed in with the --tag-name option. %%prog [options] search SEARCH PARAMS Search paramaters are a list of requirements to be satisfied. They may be GPS times, GPS time ranges, graceids and ranges, group(s), analysis type(s), labels, etc. Note that text is case insensitive Example: %%prog search G0100..G0200 mbta LUMIN_GO Environment Variables: GRACEDB_SERVICE_URL (can be overridden by --service-url) HTTP_PROXY (can be overridden by --proxy) X509_USER_PROXY X509_USER_CERT X509_USER_KEY Credentials are looked for in this order: (1) $(X509_USER_CERT) / $(X509_USER_KEY) (2) $(X509_USER_PROXY) (3) Default location of grid proxy ( /tmp/x509up_u$(UID) ) (4) $(HOME)/.globus/usercert.pem / $(HOME)/.globus/userkey.pem Note that comments can only be 200 characters long. Longer strings will be truncated.""" % { 'groups' : 'CBC, Burst, Stochastic, Coherent, Test, External', 'types' : ", ".join(validTypes), } from optparse import OptionParser op = OptionParser(usage=usage) op.add_option("-p", "--proxy", dest="proxy", help="HTTP Proxy", metavar="PROXY[:PORT]") op.add_option("-s", "--service-url", dest="service", help="GraCEDb Service URL", metavar="URL") op.add_option("-f", "--filename", dest="filename", help="If data is read from stdin, use this as the filename.", metavar="NAME") op.add_option("-a", "--alert", dest="alert", help="Send an LV alert (deprecated; alerts sent by default)", action="store_true", default=None ) op.add_option("-c", "--columns", dest="columns", help="Comma separated list of event attributes to include in results (only meaningful in search)", default=DEFAULT_COLUMNS ) op.add_option("-l", "--ligolw", dest="ligolw", help="Download ligolw file of combined search results (not meaningful outside of search). NOTE: Produces an ERROR if any of the events returned by the search do not have coinc.xml files.", action="store_true", default=False ) op.add_option("-t", "--tag-name", dest="tagName", help="tag name in database (only used for log, upload, tag, and delete_tag)", default=None ) op.add_option("-d", "--tag-display-name", dest="tagDispName", help="tag display name (ignored for existing tags)", default=None ) options, args = op.parse_args() try: from glue.ligolw import ligolw from glue.ligolw import lsctables from glue.ligolw import utils from glue.ligolw.utils import ligolw_add except: if options.ligolw: error("ligolw modules not found") exit(1) else: pass proxy = options.proxy or os.environ.get('HTTP_PROXY', None) service = options.service or \ os.environ.get('GRACEDB_SERVICE_URL', None) or \ DEFAULT_SERVICE_URL if options.alert is not None: warning("alert option is deprecated. Alerts are now sent by default.") proxyport = None if proxy and proxy.find(':') > 0: try: proxy, proxyport = proxy.split(':') proxyport = int(proxyport) except: op.error("Malformed proxy: '%s'" % proxy) if proxyport: client = Client(service, proxy_host=proxy, proxy_port=proxyport) else: client = Client(service, proxy_host=proxy) if len(args) < 1: op.error("not enough arguments") elif args[0] == 'ping': response = client.ping() if response.status==200: output("%s: 200 OK" % service) exit(0) elif args[0] == 'upload': if len(args) < 3: op.error("not enough arguments for upload") graceid = args[1] filename = args[2] comment = " ".join(args[3:]) tagName = options.tagName tagDispName = options.tagDispName response = client.writeLog(graceid, comment, filename, None, tagName, tagDispName) elif args[0] == 'download': if len(args) not in [2,3,4]: op.error("not enough arguments for download") graceid = args[1] if len(args) == 2: # get/print listing. response = client.files(graceid) if response and response.status == 200: for fname in json.loads(response.read()): print(fname) exit(0) print(response.reason) exit(1) filename = args[2] if len(args) == 4: outfile = args[3] else: outfile = os.path.basename(filename) response = client.download(graceid, filename, outfile) if response: # no response means file saved. any other response is an error message. print response exit(1) exit(0) elif args[0] == 'log': if len(args) < 3: op.error("not enough arguments for log") graceid = args[1] message = " ".join(args[2:]) response = client.writeLog(graceid, message, options.tagName, options.tagDispName) elif args[0] == 'tag': if options.tagName: if len(args) != 2: op.error("wrong number of arguments for tag") tagName = options.tagName tagDispName = options.tagDispName else: if len(args) not in [4,5]: op.error("wrong number of arguments for tag") tagName = args[3] tagDispName = None if len(args)==5: tagDispName = args[4] graceid = args[1] logN = args[2] response = client.createTag(graceid, logN, tagName, tagDispName) elif args[0] == 'delete_tag': error("len args = %s" % len(args)) error("args = %s" % args) if options.tagName: if len(args) != 2: op.error("wrong number of arguments for delete_tag") tagName = options.tagName else: if len(args) != 4: op.error("wrong number of arguments for delete_tag") tagName = args[3] graceid = args[1] logN = args[2] response = client.deleteTag(graceid, logN, tagName) elif args[0] == 'label': if len(args) != 3: op.error("wrong number of arguments for label") graceid = args[1] label = args[2] response = client.writeLabel(graceid, label) elif args[0] == 'search': query = " ".join(args[1:]) columns = options.columns columns = columns.replace('DEFAULTS',DEFAULT_COLUMNS) columns = columns.split(',') count = None # XXX Let's just get rid of this? orderby = None # XXX Should we implement this? events = client.events(query, orderby, count, columns) if options.ligolw: xmldoc = ligolw.Document() for e in events: graceid = e['graceid'] try: r = client.files(graceid, "coinc.xml") utils.load_fileobj(r, xmldoc = xmldoc) except: error("Missing coinc.xml for %s. Cannot build ligolw output." % graceid) exit(1) ligolw_add.reassign_ids(xmldoc) ligolw_add.merge_ligolws(xmldoc) ligolw_add.merge_compatible_tables(xmldoc) xmldoc.write() else: accessFun = { "labels" : lambda e: \ ",".join(e['labels'].keys()), "dataurl" : lambda e: e['links']['files'], } header = "#" + "\t".join(columns) output(header) for e in events: row = [ accessFun.get(column, lambda e: defaultAccess(e,column))(e) for column in columns ] row = "\t".join(row) output(row) return 0 elif args[0] == 'replace': if len(args) != 3: op.error("wrong number of args for replace") graceid = args[1] filename = args[2] response = client.replaceEvent(graceid, filename) elif len(args) == 3: # Create a new event. group = args[0] type = args[1] filename = args[2] # Check that the group and type are known to the API. # NB: the dictionary returned by the API has keys and values # reversed w.r.t. the typeCodeMap above. foundType = False for key, value in client.analysis_types.items(): if type==str(value): type = key foundType = True if not foundType: error("Type must be one of: ", ", ".join(client.analysis_types.values())) sys.exit(1) foundGroup = True if (unicode(group) in client.groups) else False if not foundGroup: error("Group must be one of: ", ", ".join(client.groups)) sys.exit(1) response = client.createEvent(group, type, filename) if not response: error("There was a problem. Did you do grid-proxy-init -rfc?") sys.exit(1) # XXX Must output graceid for consistency with earlier client. # Therefore, must deal with response here rather than at the end. exitCode = 0 status = response.status if status >= 400: exitCode=1 try: rv = response.read() except: rv = response try: rv = json.loads(rv) except: pass if 'graceid' in rv.keys(): output(rv['graceid']) elif 'error' in rv.keys(): exitCode=1 error(rv['error']) return exitCode else: op.error("") sys.exit(1) # Output the response. exitCode = 0 try: rv = response.read() status = response.status except: rv = response try: responseBody = json.loads(rv) except: responseBody = rv if status >= 400: exitCode=1 if isinstance(responseBody, str): output("%d: %s" % (status, responseBody)) else: output("Server returned %d" % status) if ('error' in responseBody) and response['error']: error(response['error']) exitCode = 1 if ('warning' in responseBody) and response['warning']: warning(response['warning']) if ('output' in responseBody) and response['output']: output(response['output']) return exitCode