def client_pub( *, host: str, port: int, username: str, password: str, db_name: str ) -> Callable[[ NamedArg(int, "time"), NamedArg(Dict[str, str], "tags"), NamedArg(Dict[str, float], "data") ], None, ]: # pragma: no cover if client is None: __missing_influxdb() c = client(host, port, username, password, None) databases = c.get_list_database() if len(list(filter(lambda x: x["name"] == db_name, databases))) == 0: c.create_database(db_name) c.switch_database(db_name) def pub(*, time: int, tags: Dict[str, str], data: Dict[str, float]) -> None: c.write_points( [{ "measurement": k, "tags": tags, "time": time, "fields": { "value": v } } for k, v in data.items()], time_precision="s", ) return pub
def hasLabel( *labels: str, ) -> Tuple[Callable[[ VarArg(Any), NamedArg(Any, "traversal"), NamedArg(Union[str, Tuple[str, str]], "vertex"), ], bool, ], Tuple[str, ...], None, ]: return _hasLabel, labels, dict()
def client_pub( *, host: str, port: int, username: str, password: str, db_name: str ) -> Callable[[ NamedArg(int, "time"), NamedArg(Dict[str, str], "tags"), NamedArg(Dict[str, float], "data"), ], None, ]: def pub(*, time: int, tags: Dict[str, str], data: Dict[str, float]) -> None: pass return pub
def get_instance_config( service: str, instance: str, cluster: str, soa_dir: str = DEFAULT_SOA_DIR, load_deployments: bool = False, instance_type: Optional[str] = None, ) -> InstanceConfig: """ Returns the InstanceConfig object for whatever type of instance it is. (chronos or marathon) """ if instance_type is None: instance_type = validate_service_instance( service=service, instance=instance, cluster=cluster, soa_dir=soa_dir, ) instance_config_load_function: Callable[ [ NamedArg(str, 'service'), NamedArg(str, 'instance'), NamedArg(str, 'cluster'), NamedArg(bool, 'load_deployments'), NamedArg(str, 'soa_dir'), ], InstanceConfig, ] if instance_type == 'marathon': instance_config_load_function = load_marathon_service_config elif instance_type == 'chronos': instance_config_load_function = load_chronos_job_config elif instance_type == 'adhoc': instance_config_load_function = load_adhoc_job_config elif instance_type == 'kubernetes': instance_config_load_function = load_kubernetes_service_config elif instance_type == 'tron': instance_config_load_function = load_tron_instance_config elif instance_type == 'flinkcluster': instance_config_load_function = load_flinkcluster_instance_config else: raise NotImplementedError( "instance is %s of type %s which is not supported by paasta" % (instance, instance_type), ) return instance_config_load_function( service=service, instance=instance, cluster=cluster, load_deployments=load_deployments, soa_dir=soa_dir, )
def convert_test_func(test_func: Callable[ [NamedArg(type=random.Random, name='random_object')], None]): seeds = _get_seeds(n_nightly_runs=n_nightly_runs, seed=seed) def fixate_seed_and_yield_test_run(*args, **kwargs): yield test_func(*args, **kwargs) return pytest.mark.parametrize( 'random_object', [random.Random(seed) for seed in seeds], ids=[f'Random({seed})' for seed in seeds], )(_convert_function_to_function_or_coroutine( caller_func=fixate_seed_and_yield_test_run, callee_func=test_func))
CheckPluginName, PiggybackRawData, SectionCacheInfo, SectionName, ) from cmk.base.snmp_utils import ( # pylint: disable=unused-import OIDInfo, SNMPTable, RawSNMPData, PersistedSNMPSections, SNMPSections, SNMPSectionContent, SNMPCredentials, ) from cmk.base.api import PluginName from cmk.base.api.agent_based.section_types import SNMPTree from cmk.fetchers import SNMPDataFetcher # pylint: disable=cmk-module-layer-violation from .abstract import DataSource, management_board_ipaddress, verify_ipaddress from .host_sections import AbstractHostSections PluginNameFilterFunction = Callable[[ SNMPHostConfig, NamedArg(str, 'on_error'), NamedArg(bool, 'do_snmp_scan'), NamedArg(bool, 'for_mgmt_board') ], Set[CheckPluginName]] #. # .--SNMP----------------------------------------------------------------. # | ____ _ _ __ __ ____ | # | / ___|| \ | | \/ | _ \ | # | \___ \| \| | |\/| | |_) | | # | ___) | |\ | | | | __/ | # | |____/|_| \_|_| |_|_| | # | | # +----------------------------------------------------------------------+ # | Realize the data source for dealing with SNMP data | # '----------------------------------------------------------------------'
exit(1) return service def get_jenkins_build_output_url(): """Returns the URL for Jenkins job's output. Returns None if it's not available. """ build_output = os.environ.get("BUILD_URL") if build_output: build_output = build_output + "console" return build_output InstanceListerSig = Callable[[ NamedArg(str, "service"), NamedArg(Optional[str], "cluster"), NamedArg(str, "instance_type"), NamedArg(str, "soa_dir"), ], List[Tuple[str, str]], ] InstanceLoaderSig = Callable[[ NamedArg(str, "service"), NamedArg(str, "instance"), NamedArg(str, "cluster"), NamedArg(bool, "load_deployments"), NamedArg(str, "soa_dir"), ], InstanceConfig, ] LongRunningServiceListerSig = Callable[[ NamedArg(str, "service"),
IndexingFn = Callable[..., invocation.SyncOrAsync[Optional[object]]] WatchingFn = Callable[..., invocation.SyncOrAsync[Optional[object]]] ChangingFn = Callable[..., invocation.SyncOrAsync[Optional[object]]] WebhookFn = Callable[..., invocation.SyncOrAsync[None]] DaemonFn = Callable[..., invocation.SyncOrAsync[Optional[object]]] TimerFn = Callable[..., invocation.SyncOrAsync[Optional[object]]] WhenFilterFn = Callable[..., bool] # strictly sync, no async! MetaFilterFn = Callable[..., bool] # strictly sync, no async! else: from mypy_extensions import Arg, DefaultNamedArg, KwArg, NamedArg # TODO:1: Split to specialised LoginFn, ProbeFn, StartupFn, etc. -- with different result types. # TODO:2: Try using ParamSpec to support index type checking in callbacks # when PEP 612 is released (https://www.python.org/dev/peps/pep-0612/) ActivityFn = Callable[[ NamedArg(configuration.OperatorSettings, "settings"), NamedArg(ephemera.Index, "*"), NamedArg(int, "retry"), NamedArg(datetime.datetime, "started"), NamedArg(datetime.timedelta, "runtime"), NamedArg(typedefs.Logger, "logger"), NamedArg(Any, "memo"), DefaultNamedArg(Any, "param"), KwArg(Any), ], invocation.SyncOrAsync[Optional[object]]] IndexingFn = Callable[[ NamedArg(bodies.Annotations, "annotations"), NamedArg(bodies.Labels, "labels"), NamedArg(bodies.Body, "body"), NamedArg(bodies.Meta, "meta"),
(no intermediate storage on disk) or that supports both ``__getitem__`` and ``__setitem__``. The ``chunks`` on intermediates are technically redundant (they the elementwise minimum of the read and write chunks) but they are provided for convenience. write : ArrayProxy Write proxy with an ``array`` attribute that supports ``__setitem__``. """ read: ArrayProxy intermediate: ArrayProxy write: ArrayProxy Config = Any # TODO: better typing for config SingleArgumentStageFunction = Callable[[ Any, NamedArg(type=Any, name="config") ], None] # noqa: F821 NoArgumentStageFunction = Callable[[NamedArg(type=Any, name="config")], None] # noqa: F821 StageFunction = Union[NoArgumentStageFunction, SingleArgumentStageFunction] @dataclass(frozen=True) class Stage: function: StageFunction name: str mappable: Optional[Iterable] = None @dataclass(frozen=True) class Pipeline:
task_result.as_task(__task) p.models.db.session.commit() lint_instances = _lint_instances_1.delay # pylint: disable=invalid-name add = _add_1.delay # pylint: disable=invalid-name send_done_mail = _send_done_mail_1.delay # pylint: disable=invalid-name send_grader_status_mail = _send_grader_status_mail_1.delay # pylint: disable=invalid-name run_plagiarism_control = _run_plagiarism_control_1.delay # pylint: disable=invalid-name notify_broker_of_new_job = _notify_broker_of_new_job_1.delay # pylint: disable=invalid-name notify_broker_end_of_job = _notify_broker_end_of_job_1.delay # pylint: disable=invalid-name notify_broker_kill_single_runner = _notify_broker_kill_single_runner_1.delay # pylint: disable=invalid-name adjust_amount_runners = _adjust_amount_runners_1.delay # pylint: disable=invalid-name kill_runners_and_adjust = _kill_runners_and_adjust_1.delay # pylint: disable=invalid-name update_latest_results_in_broker = _update_latest_results_in_broker_1.delay # pylint: disable=invalid-name clone_commit_as_submission = _clone_commit_as_submission_1.delay # pylint: disable=invalid-name delete_file_at_time = _delete_file_at_time_1.delay # pylint: disable=invalid-name send_direct_notification_emails = _send_direct_notification_emails_1.delay # pylint: disable=invalid-name send_email_as_user = _send_email_as_user_1.delay # pylint: disable=invalid-name send_reminder_mails: t.Callable[ [t.Tuple[int], NamedArg(t.Optional[DatetimeWithTimezone], 'eta')], t. Any] = _send_reminder_mails_1.apply_async # pylint: disable=invalid-name check_heartbeat_auto_test_run: t.Callable[ [t.Tuple[str], DefaultNamedArg(t.Optional[DatetimeWithTimezone], 'eta')], t. Any] = _check_heartbeat_stop_test_runner_1.apply_async # pylint: disable=invalid-name
from paasta_tools.utils import load_system_paasta_config from paasta_tools.utils import SPACER from paasta_tools.utils import SystemPaastaConfig try: import yelp_meteorite except ImportError: yelp_meteorite = None log = logging.getLogger(__name__) CheckServiceReplication = Callable[[ Arg(InstanceConfig_T, "instance_config"), Arg(Sequence[Union[MarathonTask, V1Pod]], "all_tasks_or_pods"), Arg(Any, "replication_checker"), NamedArg(bool, "dry_run"), ], Optional[bool], ] def parse_args() -> argparse.Namespace: parser = argparse.ArgumentParser() parser.add_argument( "-d", "--soa-dir", dest="soa_dir", metavar="SOA_DIR", default=DEFAULT_SOA_DIR, help="define a different soa config directory", ) parser.add_argument( "--crit",
'FilterReceiverMapping', 'SignalHandlerT', 'SignalHandlerRefT', 'SignalT', 'SyncSignalT', 'T', 'T_contra', ] T = TypeVar('T') T_contra = TypeVar('T_contra', contravariant=True) signal = None # just here to fix flake8 bug SignalHandlerT = Union[Callable[ [T, VarArg(), NamedArg('BaseSignalT', name='signal'), KwArg()], None, ], Callable[ [T, VarArg( ), NamedArg('BaseSignalT', name='signal'), KwArg()], Awaitable[None], ], ] if typing.TYPE_CHECKING: SignalHandlerRefT = Union[Callable[[], SignalHandlerT], ReferenceType[SignalHandlerT]] else: SignalHandlerRefT = Any FilterReceiverMapping = MutableMapping[Any, MutableSet[SignalHandlerRefT]] class BaseSignalT(Generic[T]):
Type, Union, ) SerializableData = Any SerializedData = Union[str, bytes] PropertyName = Hashable PropertyValueType = Type[SerializableData] PropertyPathEntry = Tuple[PropertyName, PropertyValueType] PropertyPath = Tuple[PropertyPathEntry, ...] try: from mypy_extensions import NamedArg PropertyMatcher = Callable[ [ NamedArg(SerializableData, "data"), # noqa: F821 NamedArg(PropertyPath, "path"), # noqa: F821 ], Optional[SerializableData], ] PropertyFilter = Callable[ [ NamedArg(PropertyName, "prop"), # noqa: F821 NamedArg(PropertyPath, "path"), # noqa: F821 ], bool, ] except ImportError: globals()["PropertyMatcher"] = Callable[..., Optional[SerializableData]] globals()["PropertyFilter"] = Callable[..., bool]
value = snmp_modes.get_single_oid( oid, cp_name, do_snmp_scan=do_snmp_scan, backend=backend, ) if value is None: # check for "not_exists" return pattern == '.*' and not flag # ignore case! return bool(regex(pattern, re.IGNORECASE).fullmatch(value)) is flag SectionNameFilterFunction = Callable[[ Iterable[SNMPScanSection], NamedArg(str, 'on_error'), NamedArg(bool, 'do_snmp_scan'), NamedArg(bool, "binary_host"), NamedArg(ABCSNMPBackend, 'backend'), ], Set[CheckPluginNameStr]] # gather auto_discovered check_plugin_names for this host def gather_available_raw_section_names(sections: Iterable[SNMPScanSection], on_error: str, do_snmp_scan: bool, *, binary_host: bool, backend: ABCSNMPBackend) -> Set[CheckPluginNameStr]: try: return _snmp_scan( sections, on_error=on_error, do_snmp_scan=do_snmp_scan,
passback_grades = _passback_grades_1.delay # pylint: disable=invalid-name lint_instances = _lint_instances_1.delay # pylint: disable=invalid-name add = _add_1.delay # pylint: disable=invalid-name send_done_mail = _send_done_mail_1.delay # pylint: disable=invalid-name send_grader_status_mail = _send_grader_status_mail_1.delay # pylint: disable=invalid-name run_plagiarism_control = _run_plagiarism_control_1.delay # pylint: disable=invalid-name notify_broker_of_new_job = _notify_broker_of_new_job_1.delay # pylint: disable=invalid-name notify_broker_end_of_job = _notify_broker_end_of_job_1.delay # pylint: disable=invalid-name notify_broker_kill_single_runner = _notify_broker_kill_single_runner_1.delay # pylint: disable=invalid-name adjust_amount_runners = _adjust_amount_runners_1.delay # pylint: disable=invalid-name send_reminder_mails: t.Callable[ [t. Tuple[int], NamedArg(t.Optional[datetime.datetime], 'eta')], t.Any] = _send_reminder_mails_1.apply_async # pylint: disable=invalid-name stop_auto_test_run: t.Callable[ [t. Tuple[int], NamedArg(t.Optional[datetime.datetime], 'eta')], t.Any] = _stop_auto_test_run_1.apply_async # pylint: disable=invalid-name notify_slow_auto_test_run: t.Callable[ [t. Tuple[int], NamedArg(t.Optional[datetime.datetime], 'eta')], t.Any] = _notify_slow_auto_test_run_1.apply_async # pylint: disable=invalid-name check_heartbeat_auto_test_run: t.Callable[ [t.Tuple[str], DefaultNamedArg(t.Optional[datetime.datetime], 'eta')],
"FilterReceiverMapping", "SignalHandlerT", "SignalHandlerRefT", "SignalT", "SyncSignalT", "T", "T_contra", ] T = TypeVar("T") T_contra = TypeVar("T_contra", contravariant=True) signal = None # just here to fix flake8 bug SignalHandlerT = Union[Callable[ [T, VarArg(), NamedArg("BaseSignalT", name="signal"), KwArg()], None, ], Callable[ [T, VarArg( ), NamedArg("BaseSignalT", name="signal"), KwArg()], Awaitable[None], ], ] if typing.TYPE_CHECKING: SignalHandlerRefT = Union[Callable[[], SignalHandlerT], ReferenceType[SignalHandlerT]] else: SignalHandlerRefT = Any FilterReceiverMapping = MutableMapping[Any, MutableSet[SignalHandlerRefT]] class BaseSignalT(Generic[T]):