def __init__(self,
                 context: AmpelContext,
                 logger: AmpelLogger,
                 directives: Sequence[IngestDirective | DualIngestDirective],
                 process_name: str,
                 db_log_format: str = "standard") -> None:
        """
		:raises: ValueError if no process can be loaded or if a process is
		associated with an unknown channel
		"""

        embed = db_log_format == "compact"

        # Create FilterBlock instances (instantiates channel filter and loggers)
        self.filter_blocks = [
            FilterBlock(i,
                        context,
                        channel=model.channel,
                        filter_model=model.filter,
                        process_name=process_name,
                        logger=logger,
                        check_new=isinstance(model, DualIngestDirective),
                        embed=embed) for i, model in enumerate(directives)
        ]

        # Robustness
        if len(self.filter_blocks) == 0:
            raise ValueError("No directive loaded, please check your config")

        # Note: channel names can be integers
        self.chan_names = [
            f"{fb.channel}" for fb in self.filter_blocks
            if fb.channel in context.config._config['channel']
        ]

        # Check that channels defined in directives exist in ampel config
        if len(self.chan_names) != len(self.filter_blocks):
            for fb in self.filter_blocks:
                if fb.channel not in context.config._config['channel']:
                    raise ValueError(
                        f"Channel {fb.channel} unknown in ampel config")

        if len(self.filter_blocks) == 1 and db_log_format == "compact":
            logger.warn(
                "You should not use db_log_format='compact' with only one channel"
            )

        # Deactivated for now partly because of lack of time
        """
Example #2
0
def t0_ingester(patch_mongo, dev_context):
    run_id = 0
    logger = AmpelLogger.get_logger()
    updates_buffer = DBUpdatesBuffer(dev_context.db, run_id=run_id, logger=logger)
    ingester = MongoT0Ingester(updates_buffer=updates_buffer)
    compiler = T0Compiler(tier=0, run_id=run_id)
    return ingester, compiler
def test_get_channel():
    template = LegacyChannelTemplate(
        channel="FOO",
        version=0,
        t0_filter={"unit": "NoFilter"},
    )
    channel = template.get_channel(logger=AmpelLogger.get_logger())
    assert ChannelModel(**channel).dict() == channel
Example #4
0
def t0_process(kwargs, first_pass_config):
    first_pass_config["resource"]["ampel-ztf/kafka"] = {
        "group": "nonesuch",
        "broker": "nonesuch:9092",
    }
    # explicitly add version, which would otherwise be synthesized by ProcessMorpher
    template = ZTFLegacyChannelTemplate(**kwargs)
    return ProcessModel(**(
        template.get_processes(AmpelLogger.get_logger(), first_pass_config)[0]
        | {
            "version": template.version
        }))
Example #5
0
def _make_muxer(context: AmpelContext, model: UnitModel) -> ZiArchiveMuxer:
    run_id = 0
    logger = AmpelLogger.get_logger()
    updates_buffer = DBUpdatesBuffer(context.db, run_id=run_id, logger=logger)

    muxer = context.loader.new_context_unit(
        model=model,
        sub_type=ZiArchiveMuxer,
        context=context,
        logger=logger,
        updates_buffer=updates_buffer,
    )

    return muxer
Example #6
0
def get_handler(context, directives, run_id=0) -> ChainedIngestionHandler:
    logger = AmpelLogger.get_logger(console={"level": DEBUG})
    updates_buffer = DBUpdatesBuffer(context.db, run_id=run_id, logger=logger)
    return ChainedIngestionHandler(
        context=context,
        logger=logger,
        run_id=0,
        updates_buffer=updates_buffer,
        directives=directives,
        compiler_opts=ZiCompilerOptions(),
        shaper=UnitModel(unit="ZiDataPointShaper"),
        trace_id={},
        tier=0,
    )
Example #7
0
    def __init__(self, **kwargs) -> None:

        if kwargs.get('logger') is None:
            kwargs['logger'] = AmpelLogger.get_logger()

        super().__init__(**kwargs)

        self.chained_tal: 'None | TarAlertLoader' = None

        if self.file_obj:
            self.tar_file = tarfile.open(fileobj=self.file_obj,
                                         mode=self.tar_mode)
        elif self.file_path:
            self.tar_file = tarfile.open(self.file_path, mode=self.tar_mode)
        else:
            raise ValueError(
                "Please provide value either for 'file_path' or 'file_obj'")

        if self.start != 0:
            count = 0
            for tarinfo in self.tar_file:
                count += 1
                if count < self.start:
                    continue
Example #8
0
class PlotCommand(AbsCoreCommand):

	def __init__(self):
		self.parsers = {}

	# Mandatory implementation
	def get_parser(self, sub_op: None | str = None) -> ArgumentParser | AmpelArgumentParser:

		if sub_op in self.parsers:
			return self.parsers[sub_op]

		sub_ops = ["show", "save", "clipboard", "watch"]
		if sub_op is None or sub_op not in sub_ops:
			return AmpelArgumentParser.build_choice_help(
				'plot', sub_ops, h, description = 'Show or export ampel plots.'
			)

		builder = ArgParserBuilder("plot")
		builder.add_parsers(sub_ops, h)

		builder.notation_add_note_references()
		builder.notation_add_example_references()

		# Required
		builder.add_arg('show|save|clipboard|watch.required', 'config', type=str)
		builder.add_arg('save.required', 'out', type=str)

		# Optional
		builder.add_arg('show|save.optional', 'limit', type=int)
		builder.add_arg('show|save|watch.optional', 'secrets')
		builder.add_arg('optional', 'debug', action="store_true")
		builder.add_arg('show|save.optional', 'id-mapper', type=str)
		builder.add_arg('show|save.optional', 'base-path', type=str)
		builder.add_arg('show|save.optional', 'unit', type=str)
		builder.add_arg('show|save.optional', 'enforce-base-path', action="store_true")
		builder.add_arg('show|save.optional', 'last-body', action="store_true")
		builder.add_arg('show|save.optional', 'latest-doc', action="store_true")
		builder.add_arg('optional', 'scale', nargs='?', type=float, default=1.0)
		builder.add_arg('optional', 'max-size', nargs='?', type=int)
		builder.add_arg('show|save|clipboard.optional', "db", type=str, nargs="+")
		builder.add_arg('show|save|watch|clipboard.optional', "one-db", action="store_true")
		builder.add_arg('watch.required', "db", type=str, nargs="+")
		builder.add_arg('watch.required', "col", type=str, nargs="?")

		# Optional mutually exclusive args
		builder.add_x_args('optional',
			{'name': 'png', 'nargs': '?', 'type': int, 'const': 96},
			{'name': 'html', 'action': 'store_true'},
		)
		builder.add_arg('optional', 'stack', action='store', metavar='#', const=100, nargs='?', type=int, default=0)

		builder.add_group('show|save.match', 'Plot selection arguments')

		for el in (0, 1, 2, 3):
			builder.add_arg('show|save.match', f'no-t{el}', action='store_true', help=f"Ignore t{el} plots")

		for el in (0, 1, 2, 3):
			builder.add_arg('show|save.match', f't{el}', action='store_true', help=f"Match only t{el} plots")

		builder.add_arg('show|save.match', 'plots-col', action='store_true', help="Match only plots from plots collections")
		builder.add_arg('show|save.match', "stock", action=MaybeIntAction, nargs="+")
		builder.create_logic_args('show|save.match', "channel", "Channel")
		builder.create_logic_args('show|save.match', "with-doc-tag", "Doc tag", json=False)
		builder.create_logic_args('show|save.match', "without-doc-tag", "Doc tag", json=False)
		builder.create_logic_args('show|save.match', "with-plot-tag", "Plot tag", json=False)
		builder.create_logic_args('show|save.match', "without-plot-tag", "Plot tag", json=False)
		builder.add_arg('show|save.match', "custom-match", metavar="#", action=LoadJSONAction)

		builder.add_example('show', "-stack -300 -t2")
		builder.add_example('show', "-html -t3 -base-path body.plot -latest-doc -db HelloAmpel -one-db")
		builder.add_example('show', "-html -t2 -stock 123456 -db DB1 DB2")
		builder.add_example('show', "-stack -t2 -png 300 -limit 10")
		builder.add_example('show', "-stack -limit 10 -t2 -with-plot-tag SNCOSMO -with-doc-tag NED_NEAREST_IS_SPEC -custom-match '{\"body.data.ned.sep\": {\"$lte\": 10}}'")
		builder.add_example('show', "-stack -t2 -with-doc-tag NED_NEAREST_IS_SPEC -unit T2PS1ThumbNedSNCosmo -mongo.prefix Dipole2 -resource.mongo localhost:27050 -debug")
		builder.add_example('clipboard', "-html")
		builder.add_example('watch', "-db DipoleAP -col t3 -one-db -config ampel_conf.yaml -stack -png 200")
		
		self.parsers.update(
			builder.get()
		)

		return self.parsers[sub_op]


	# Mandatory implementation
	def run(self, args: dict[str, Any], unknown_args: Sequence[str], sub_op: None | str = None) -> None:

		stack = args.get("stack")
		limit = args.get("limit") or 0
		db_prefixes = args.get("db")
		dbs = []

		config = self.load_config(args['config'], unknown_args, freeze=False)
		vault = self.get_vault(args)

		if db_prefixes:
			for el in db_prefixes:
				config._config['mongo']['prefix'] = el
				dbs.append(
					self.get_db(
						config, vault, require_existing_db=True,
						one_db=args.get('one_db', False)
					)
				)
		else:
			dbs = [
				self.get_db(
					config, vault, require_existing_db=True,
					one_db=args.get('one_db', False)
				)
			]

		if sub_op == "clipboard":
			from ampel.plot.util.keyboard import InlinePynput
			ipo = InlinePynput()
			read_from_clipboard(
				PlotBrowseOptions(**args),
				plots_col = dbs[0].get_collection('plots'),
				keyboard_callback = ipo.is_ctrl_pressed
			)

		
		if (x := args.get('base_path')) and not x.startswith("body."):
			raise ValueError("Option 'base-path' must start with 'body.'")

		if sub_op == "watch":
			read_from_db(
				dbs[0].get_collection(args['col']),
				PlotBrowseOptions(**args)
			)

		if 'id_mapper' in args:
			AuxUnitRegister.initialize(config)
			maybe_load_idmapper(args)

		logger = AmpelLogger.from_profile(
			self.get_context(args, unknown_args, ContextClass=AmpelContext),
			'console_debug' if args['debug'] else 'console_info',
			base_flag=LogFlag.MANUAL_RUN
		)

		ptags: dict = {}
		dtags: dict = {}

		for el in ("with_doc_tag", "with_doc_tags_and", "with_doc_tags_or"):
			if args.get(el):
				dtags['with'] = args.get(el)
				break

		for el in ("without_doc_tag", "without_doc_tags_and", "without_doc_tags_or"):
			if args.get(el):
				dtags['without'] = args.get(el)
				break

		for el in ("with_plot_tag", "with_plot_tags_and", "with_plot_tags_or"):
			if args.get(el):
				ptags['with'] = args.get(el)
				break

		for el in ("without_plot_tag", "without_plot_tags_and", "without_plot_tags_or"):
			if args.get(el):
				ptags['without'] = args.get(el)
				break

		if stack:
			scol = SVGCollection()

		for db in dbs:

			loader = SVGLoader(
				db,
				logger = logger,
				limit = limit,
				enforce_base_path= args['enforce_base_path'],
				last_body = args['last_body'],
				latest_doc = args['latest_doc']
			)

			if args['plots_col']:
				loader.add_query(
					SVGQuery(
						col = "plots",
						path = "",
						plot_tag = ptags,
						doc_tag = dtags,
						unit = args.get("unit"),
						stock = args.get("stock"),
						custom_match = args.get("custom_match")
					)
				)
			else:
				if [k for k in ("t0", "t1", "t2", "t3") if args.get(k, False)]:
					for el in ("t0", "t1", "t2", "t3"):
						if args[el]:
							loader.add_query(
								SVGQuery(
									col = el, # type: ignore[arg-type]
									path = args.get('base_path') or 'body.data.plot',
									plot_tag = ptags,
									doc_tag = dtags,
									unit = args.get("unit"),
									stock = args.get("stock"),
									custom_match = args.get("custom_match")
								)
							)
				else:
					for el in ("t0", "t1", "t2", "t3"):
						if not args.get(f"no-{el}"):
							loader.add_query(
								SVGQuery(
									col = el, # type: ignore[arg-type]
									path = args.get('base_path') or 'body.data.plot',
									plot_tag = ptags,
									doc_tag = dtags,
									unit = args.get("unit"),
									stock = args.get("stock"),
									custom_match = args.get("custom_match")
								)
							)

			loader.run()

			i = 1
			for v in loader._plots.values():

				pbo = PlotBrowseOptions(**args)
				if stack:
					for svg in v._svgs:
						if len(dbs) > 1:
							svg._record['title'] += f"\n<span style='color: steelblue'>{db.prefix}</span>"
						scol.add_svg_plot(svg)
						if i % stack == 0:
							show_collection(scol, pbo, print_func=print)
							scol = SVGCollection()
				else:
					for svg in v._svgs:
						show_svg_plot(svg, pbo)

		if stack:
			show_collection(scol, PlotBrowseOptions(**args), print_func=print)

		if i == 1:
			AmpelLogger.get_logger().info("No plot matched")
	def __init__(self, **kwargs) -> None:
		super().__init__(**kwargs)
		self.logger: AmpelLogger = AmpelLogger.get_logger()
def ampel_logger():
    return AmpelLogger.get_logger()
Example #11
0
 def __init__(self, **kwargs):
     if "logger" not in kwargs:
         kwargs["logger"] = AmpelLogger.get_logger()
     super().__init__(**kwargs)
    def __init__(
        self,
        run_config,
        t_min,
        resource=None,
        filter_class=DecentFilter,
        cone_nside=64,
        cones_to_scan=None,
        logger=None,
    ):
        self.cone_nside = cone_nside
        self.t_min = t_min

        if not hasattr(self, "prob_threshold"):
            self.prob_threshold = None

        if resource is None:
            resource = {
                "ampel-ztf/catalogmatch":
                "https://ampel.zeuthen.desy.de/api/catalogmatch/",
            }

        if logger is None:
            self.logger = logging.getLogger(__name__)
        else:
            self.logger = logger

        self.logger.info("AMPEL run config:")
        self.logger.info(run_config)

        lvl = self.logger.level

        if lvl > 10:
            logger_ampel = logging.getLogger("AMPEL_filter")
            logger_ampel.setLevel(logging.WARNING)
        else:
            from ampel.log.AmpelLogger import AmpelLogger

            logger_ampel = AmpelLogger()

        self.ampel_filter_class = filter_class(logger=logger_ampel,
                                               resource=resource,
                                               **run_config)

        self.dap = DevAlertProcessor(self.ampel_filter_class)

        self.scanned_pixels = []

        if cones_to_scan is None:
            self.cone_ids, self.cone_coords = self.find_cone_coords()
        else:
            self.cone_ids, self.cone_coords = cones_to_scan

        self.cache = dict()
        self.default_t_max = t_min + 10.0

        self.overlap_prob = None
        self.overlap_fields = None
        self.first_obs = None
        self.last_obs = None
        self.n_fields = None
        self.rectangular_area = None
        self.double_extragalactic_area = None

        if not hasattr(self, "dist"):
            self.dist = None
Example #13
0
    def __init__(self,
                 index: int,
                 context: AmpelContext,
                 channel: ChannelId,
                 filter_model: None | FilterModel,
                 process_name: str,
                 logger: AmpelLogger,
                 check_new: bool = False,
                 embed: bool = False) -> None:
        """
		:param index: index of the parent AlertConsumerDirective used for creating this FilterBlock
		:param check_new: check whether a stock already exists in the stock collection
		(first tuple member of method filter (directive index) will be negative then)
		:param in_stock: whished behaviors when a stock with a given id (from the alert)
		already exists in the stock collection.
		:param process_name: associated T0 process name (as defined in the ampel conf)
		:param embed: use compact logging (channel embedded in messages).
		Produces fewer (and bigger) log documents.
		"""

        self._stock_col = context.db.get_collection('stock')
        self.filter_model = filter_model
        self.context = context
        self.idx = index

        # Channel name (ex: HU_SN or 1)
        self.channel = channel
        self.chan_str = str(self.channel)

        # stats
        self._stat_accepted = stat_accepted.labels(self.chan_str)
        self._stat_rejected = stat_rejected.labels(self.chan_str)
        self._stat_autocomplete = stat_autocomplete.labels(self.chan_str)
        self._stat_time = stat_time.labels(f"filter.{self.chan_str}")

        self.check_new = check_new
        self.rej = self.idx, False
        self.stock_ids: set[StockId] = set()

        if filter_model:

            # Minimal log entry in case filter does not log anything
            self.min_log_msg = {'c': self.channel} if embed else None

            # Instantiate/get filter class associated with this channel
            logger.info(f"Loading filter: {filter_model.unit}",
                        extra={'c': self.channel})

            self.buf_hdlr: EnclosedChanRecordBufHandler | ChanRecordBufHandler = \
             EnclosedChanRecordBufHandler(logger.level, self.channel) if embed \
             else ChanRecordBufHandler(logger.level, self.channel)

            self.unit_instance = context.loader.new_logical_unit(
                model=filter_model,
                sub_type=AbsAlertFilter,
                logger=AmpelLogger.get_logger(
                    name="buf_" + self.chan_str,
                    base_flag=(getattr(logger, 'base_flag', 0) & ~LogFlag.CORE)
                    | LogFlag.UNIT,
                    console=False,
                    handlers=[self.buf_hdlr]))

            # Log entries potentially logged by filter post_init method
            if self.buf_hdlr.buffer:
                self.buf_hdlr.forward(logger)
                self.buf_hdlr.buffer = []

            self.forward = self.buf_hdlr.forward  # type: ignore
            self.buffer = self.buf_hdlr.buffer

            self.filter_func = self.unit_instance.process

            if osm := filter_model.on_stock_match:
                self.overrule = self.idx, osm in [
                    'overrule', 'silent_overrule'
                ]
                self.bypass = self.idx, osm == 'bypass'
                self.update_rej = osm == 'overrule'
            else:
                self.overrule = self.idx, False
                self.bypass = self.idx, False
                self.update_rej = True

            self.rej_log_handle: None | Callable[[LightLogRecord | LogRecord],
                                                 None] = None
            self.rej_log_handler: None | LoggingHandlerProtocol = None
            self.file: None | Callable[[AmpelAlertProtocol, None | int],
                                       None] = None
            self.register: None | AbsAlertRegister = None