def _is_latest_version(suppress_on_error=True): try: from urllib.request import Request, urlopen import json from jina import __version__ import warnings req = Request('https://api.jina.ai/latest', headers={'User-Agent': 'Mozilla/5.0'}) with urlopen( req, timeout=5 ) as resp: # 'with' is important to close the resource after use latest_ver = json.load(resp)['version'] from distutils.version import LooseVersion latest_ver = LooseVersion(latest_ver) cur_ver = LooseVersion(__version__) if cur_ver < latest_ver: from jina.logging.predefined import default_logger default_logger.warning( f'You are using Jina version {cur_ver}, however version {latest_ver} is available. ' f'You should consider upgrading via the "pip install --upgrade jina" command.' ) return False return True except: # no network, two slow, api.jina.ai is down if not suppress_on_error: raise
def _extract_parameters(executor_yml): try: with BaseExecutor.load_config(executor_yml) as executor: if hasattr(executor, "DEFAULT_OPTIMIZATION_PARAMETER"): default_config = executor.DEFAULT_OPTIMIZATION_PARAMETER else: default_config = {} return default_config except TypeError: logger.warning( f"Failing building from {executor_yml}. All environment variables in {executor_yml} must be defined!" )
def parse( self, cls: Type['BaseExecutor'], data: Dict, runtime_args: Optional[Dict[str, Any]] = None, ) -> 'BaseExecutor': """ :param cls: target class type to parse into, must be a :class:`JAMLCompatible` type :param data: flow yaml file loaded as python dict :param runtime_args: Optional runtime_args to be directly passed without being parsed into a yaml config :return: the Flow YAML parser given the syntax version number """ from jina.logging.predefined import default_logger _meta_config = get_default_metas() _meta_config.update(data.get('metas', {})) if _meta_config: data['metas'] = _meta_config cls._init_from_yaml = True # tmp_p = {kk: expand_env_var(vv) for kk, vv in data.get('with', {}).items()} obj = cls( **data.get('with', {}), metas=data.get('metas', {}), requests=data.get('requests', {}), runtime_args=runtime_args, ) cls._init_from_yaml = False # check if the yaml file used to instanciate 'cls' has arguments that are not in 'cls' arguments_from_cls = LegacyParser._get_all_arguments(cls) arguments_from_yaml = set(data.get('with', {})) difference_set = arguments_from_yaml - arguments_from_cls # only log warnings about unknown args for main Pod if any(difference_set) and not LegacyParser.is_tail_or_head(data): default_logger.warning( f'The given arguments {difference_set} are not defined in `{cls.__name__}.__init__`' ) if not _meta_config: default_logger.warning( '"metas" config is not found in this yaml file, ' 'this map is important as it provides an unique identifier when ' 'persisting the executor on disk.') # for compound executor if 'components' in data: obj.components = lambda: data['components'] obj.is_updated = False return obj
def wrapper(*args, **kwargs): call_hash = f'{func.__name__}({", ".join(map(str, args))})' pickle_protocol = 4 file_lock = nullcontext() with ImportExtensions( required=False, help_text= f'FileLock is needed to guarantee non-concurrent access to the' f'cache_file {cache_file}', ): import filelock file_lock = filelock.FileLock(f'{cache_file}.lock', timeout=-1) cache_db = None with file_lock: try: cache_db = shelve.open(cache_file, protocol=pickle_protocol, writeback=True) except Exception: if os.path.exists(cache_file): # cache is in an unsupported format, reset the cache os.remove(cache_file) cache_db = shelve.open(cache_file, protocol=pickle_protocol, writeback=True) if cache_db is None: # if we failed to load cache, do not raise, it is only an optimization thing return func(*args, **kwargs), False else: with cache_db as dict_db: try: if call_hash in dict_db and not kwargs.get( 'force', False): return dict_db[call_hash], True result = func(*args, **kwargs) dict_db[call_hash] = result except urllib.error.URLError: if call_hash in dict_db: default_logger.warning( message.format(func_name=func.__name__)) return dict_db[call_hash], True else: raise return result, False
def _write_optimization_parameter(executor_configurations, target_file, overwrite_parameter_file): output = [ parameter for config in executor_configurations.values() for parameter in config ] if os.path.exists(target_file) and not overwrite_parameter_file: logger.warning( f"{target_file} already exists. Skip writing. Please remove it before parameter discovery." ) else: with open(target_file, "w") as outfile: JAML.dump(output, outfile)
def _replace_parameters(executor_yml, default_parameters): for parameter in default_parameters: if "\nwith:\n" not in executor_yml: executor_yml = executor_yml + "\nwith:\n" if f"{parameter.parameter_name}:" in executor_yml: logger.warning( f"Skipping the following parameter, since it is already defined: {parameter.parameter_name}" ) continue executor_yml = executor_yml.replace( "\nwith:\n", f"\nwith:\n {parameter.parameter_name}: ${parameter.jaml_variable}\n", ) return executor_yml
def wrapper(*args, **kwargs): for i in range(num_retry): try: return func(*args, **kwargs) except Exception as e: default_logger.warning( message.format( func_name=func.__name__, attempt=i + 1, num_retry=num_retry, error=e, )) if i + 1 == num_retry: raise
def __init__(self, args: 'argparse.Namespace'): """ Create a new :class:`NetworkChecker`. :param args: args provided by the CLI. """ import time from jina.logging.profile import TimeContext from jina.serve.runtimes.worker import WorkerRuntime ctrl_addr = f'{args.host}:{args.port}' try: total_time = 0 total_success = 0 for j in range(args.retries): with TimeContext(f'ping {ctrl_addr} at {j} round', default_logger) as tc: r = WorkerRuntime.is_ready(ctrl_addr) if not r: default_logger.warning( 'not responding, retry (%d/%d) in 1s' % (j + 1, args.retries)) else: total_success += 1 total_time += tc.duration time.sleep(1) if total_success < args.retries: default_logger.warning('message lost %.0f%% (%d/%d) ' % ( (1 - total_success / args.retries) * 100, args.retries - total_success, args.retries, )) if total_success > 0: default_logger.info('avg. latency: %.0f ms' % (total_time / total_success * 1000)) exit(0) except KeyboardInterrupt: pass # returns 1 (anomaly) when it comes to here exit(1)
"""Module for helper functions in the parser""" import argparse import os from typing import Tuple _SHOW_ALL_ARGS = 'JINA_FULL_CLI' in os.environ if _SHOW_ALL_ARGS: from jina.logging.predefined import default_logger default_logger.warning( f'Setting {_SHOW_ALL_ARGS} will make remote Pods with sharding not work when using JinaD' ) def add_arg_group(parser, title): """Add the arguments for a specific group to the parser :param parser: the parser configure :param title: the group name :return: the new parser """ return parser.add_argument_group(f'{title} arguments') class KVAppendAction(argparse.Action): """argparse action to split an argument into KEY=VALUE form on the first = and append to a dictionary. This is used for setting up --env """ def __call__(self, parser, args, values, option_string=None): """