filename="overview.xml", directory=data["base_output_dir"], beam_id = beam["id"], pointing_id = pointing["id"] ) output_dps.append(dp) # Update xml to MongoDB client = MongoClient('mongodb://{}:{}@10.98.76.190:30003/'.format(os.environ['MONGO_USERNAME'], os.environ['MONGO_PASSWORD'])) # Add another secret for MongoDB doc = parker.data(lxml.etree.fromstring(open(data["base_output_dir"]+"/overview.xml", "rb").read())) client.trapum.peasoup_xml_files.update(doc, doc, True) return output_dps if __name__ == '__main__': parser = optparse.OptionParser() pika_wrapper.add_pika_process_opts(parser) TrapumPipelineWrapper.add_options(parser) opts,args = parser.parse_args() #processor = pika_wrapper.PikaProcess(...) processor = pika_wrapper.pika_process_from_opts(opts) pipeline_wrapper = TrapumPipelineWrapper(opts,peasoup_pipeline) processor.process(pipeline_wrapper.on_receive)
dp = dict( type="candidate_tar_file", filename=tar_name, directory=data["base_output_dir"], beam_id=beam["id"], pointing_id=pointing["id"], #metainfo=json.dumps(meta_info) ) return dp if __name__ == '__main__': parser = optparse.OptionParser() pika_wrapper.add_pika_process_opts(parser) TrapumPipelineWrapper.add_options(parser) opts,args = parser.parse_args() #processor = pika_wrapper.PikaProcess(...) processor = pika_wrapper.pika_process_from_opts(opts) pipeline_wrapper = TrapumPipelineWrapper(opts,fold_and_score_pipeline) processor.process(pipeline_wrapper.on_receive) ################# Old style ################### # Update all input arguments #consume_parser = optparse.OptionParser() #pika_process.add_pika_process_opts(consume_parser) #opts,args = consume_parser.parse_args()
remove_dir(tmp_dir) log.info("Removed temporary files") # Add tar file to dataproduct dp = dict( type="candidate_tar_file", filename=tar_name, directory=output_dir, # Note: This is just for convenience. Technically needs all beam # ids beam_id=beam_id_list[0], pointing_id=pointing["id"], metainfo=json.dumps("tar_file:filtered_csvs") ) output_dps.append(dp) return output_dps if __name__ == "__main__": parser = optparse.OptionParser() pika_wrapper.add_pika_process_opts(parser) TrapumPipelineWrapper.add_options(parser) opts, args = parser.parse_args() processor = pika_wrapper.pika_process_from_opts(opts) pipeline_wrapper = TrapumPipelineWrapper(opts, candidate_filter_pipeline) processor.process(pipeline_wrapper.on_receive)
''' for pointing in data["data"]["pointings"]: for beam in pointing["beams"]: # Processing happens here dp = dict( type="peasoup_xml", filename="overview.xml", directory=data["base_output_dir"], beam_id = beam["id"], pointing_id = pointing["id"] ) output_dps.append(dp) return output_dps if __name__ == '__main__': parser = optparse.OptionParser() pika_wrapper.add_pika_process_opts(parser) TrapumPipelineWrapper.add_options(...,parser) opts,args = parser.parse_args() #processor = pika_wrapper.PikaProcess(...) processor = pika_wrapper.pika_process_from_opts(opts) pipeline_wrapper = TrapumPipelineWrapper(opts.database,null_pipeline) processor.process(pipeline_wrapper.on_receive)
tsamp=subband_header['tsamp'], nsamples=subband_header['nsamples'], ra=subband_header['ra'], dec=subband_header['dec'], refdm=ref_dm) dp = dict(type="filterbank-iqrm-%d-%dus-%ddm" % (new_chans, sampling_number, ref_dm), filename=os.path.basename(subbanded_file), directory=data["base_output_dir"], beam_id=beam["id"], pointing_id=pointing["id"], metainfo=json.dumps(meta_info)) output_dps.append(dp) return output_dps if __name__ == '__main__': parser = optparse.OptionParser() pika_wrapper.add_pika_process_opts(parser) TrapumPipelineWrapper.add_options(parser) opts, args = parser.parse_args() # processor = pika_wrapper.PikaProcess(...) processor = pika_wrapper.pika_process_from_opts(opts) pipeline_wrapper = TrapumPipelineWrapper(opts, subband_pipeline) processor.process(pipeline_wrapper.on_receive)