def parse_args(argv): ''' Parse an array of command-line options into a argparse.Namespace ''' parser = argparse.ArgumentParser( description='Simulating small waves on the surface of a small planet.', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-n', type=int, default=200, help='Size of simulation grid; an n x n grid') parser.add_argument('--decay', default='1 week', help='The half-life of a wave') parser.add_argument('--gravity', type=float, default=9.8e-4, help='The planet\'s gravity, in metres per second') parser.add_argument('--width', default='100km', help=('Circumference of the planet, travelling ' 'east-west')) parser.add_argument('--height', default='100km', help=('Circumference of the planet, travelling ' 'north-south')) parser.add_argument('--depth', default='4km', help='Average depth of the planet\'s ocean') parser.add_argument('--time-per-frame', default='1 hour', help=('How much planetary time should pass in each ' 'frame of animation')) parser.add_argument('--frames-per-drop', type=int, default=100, help=('On average, how many frames before a new drop ' 'of water is created')) parser.add_argument('--maximum-speed', type=float, default=0.003, help=('For colouring the visualisation: the speed ' 'that should correspond to the lightest colour')) parser.add_argument('-v', '--debug', action='store_true', help='Verbose logging') args = parser.parse_args(argv[1:]) # parse human-readable unts args.seconds_per_frame = parse_timespan(args.time_per_frame) args.h_background = parse_length(args.depth) width = parse_length(args.width) height = parse_length(args.height) args.drag = 1 / parse_timespan(args.decay) # set dx and dy args.dx = width / args.n args.dy = height / args.n return args
def register(self, timer, make, first=False): if isinstance(timer, str): timer = wuji.counter.Time(humanfriendly.parse_timespan(timer), first) elif isinstance(timer, int): timer = wuji.counter.Number(timer, first) self.task.append((timer, make))
def _parse_field(self, field_metric, field_key, field_value): try: # sizes if (field_metric, field_key) in self._size_fields: return humanfriendly.parse_size(field_value) # time spans elif (field_metric, field_key) in self._time_span_fields: return sum(humanfriendly.parse_timespan(v) if v else 0 for v in re.findall(r'([\d,\.]+\s*\D+)', field_value)) # percentages elif (field_metric, field_key) in self._percentage_fields: return int(field_value.replace('%', '').strip()) # floats elif (field_metric, field_key) in self._float_fields: return float(decimal.Decimal(field_value)) # integers elif (field_metric, field_key) in self._integer_fields: return int(decimal.Decimal(field_value)) except ValueError: pass
def initialize(self): logger.info("Initializing projects follower") self.projects_store_mode = ( mlrun.mlconf.httpdb.projects.follower_projects_store_mode) if self.projects_store_mode not in self.ProjectsStoreMode.all(): raise mlrun.errors.MLRunInvalidArgumentError( f"Provided projects store mode is not supported. mode={self.projects_store_mode}" ) self._projects: typing.Dict[str, mlrun.api.schemas.Project] = {} self._projects_store_for_deletion = self.ProjectsStore(self) self._leader_name = mlrun.mlconf.httpdb.projects.leader self._sync_session = None if self._leader_name == "iguazio": self._leader_client = mlrun.api.utils.clients.iguazio.Client() if not mlrun.mlconf.httpdb.projects.iguazio_access_key: raise mlrun.errors.MLRunInvalidArgumentError( "Iguazio access key must be configured when the leader is Iguazio" ) self._sync_session = mlrun.mlconf.httpdb.projects.iguazio_access_key elif self._leader_name == "nop": self._leader_client = mlrun.api.utils.projects.remotes.nop_leader.Member( ) else: raise NotImplementedError("Unsupported project leader") self._periodic_sync_interval_seconds = humanfriendly.parse_timespan( mlrun.mlconf.httpdb.projects.periodic_sync_interval) self._synced_until_datetime = None # Only if we're storing the projects in cache, we need to maintain this cache i.e. run the periodic sync if self.projects_store_mode == self.ProjectsStoreMode.cache: # run one sync to start off on the right foot and fill out the cache but don't fail initialization on it try: self._sync_projects() except Exception as exc: logger.warning("Initial projects sync failed", exc=str(exc)) self._start_periodic_sync()
async def ptimer(self, ctx: commands.Context, *, timespan: str = "5 minutes"): timespan = humanfriendly.parse_timespan(timespan) embed = discord.Embed(title="PUG Timer", description=format_remaining(timespan), colour=discord.Colour(0x358bbb)) embed.set_footer( text="Timer by Echo", icon_url= "https://cdn.discordapp.com/app-icons/581523092363411493/9f85d39eb6321ad12b2d13396c4595f5.png?size=256" ) msg: discord.Message = await ctx.send(embed=embed) while True: await asyncio.sleep(10) timespan -= 10 if timespan > 0: embed = discord.Embed(title="PUG Timer", description=format_remaining(timespan), colour=discord.Colour(0x358bbb)) embed.set_footer( text="Timer by Echo", icon_url= "https://cdn.discordapp.com/app-icons/581523092363411493/9f85d39eb6321ad12b2d13396c4595f5.png?size=256" ) await msg.edit(embed=embed) else: await msg.delete() await ctx.send(f"{ctx.author.mention} Timer is up!") break
def _load_timeout(): """Load the maximum timeout value from the `model.yaml` metadata file.""" metadata = load_metadata(__file__) max_timeout = max( map(lambda timeout_: math.ceil(parse_timespan(str(timeout_))), metadata.timeout.values())) return max_timeout
def _parse_field(self, field_metric, field_key, field_value): try: # sizes if (field_metric, field_key) in self._size_fields: return humanfriendly.parse_size(field_value) # time spans elif (field_metric, field_key) in self._time_span_fields: return sum( humanfriendly.parse_timespan(v) if v else 0 for v in re.findall(r'([\d,\.]+\s*\D+)', field_value)) # percentages elif (field_metric, field_key) in self._percentage_fields: return int(field_value.replace('%', '').strip()) # floats elif (field_metric, field_key) in self._float_fields: return float(decimal.Decimal(field_value)) # integers elif (field_metric, field_key) in self._integer_fields: return int(decimal.Decimal(field_value)) except ValueError: pass
def __init__(self) -> None: super().__init__() http_adapter = requests.adapters.HTTPAdapter( max_retries=urllib3.util.retry.Retry(total=3, backoff_factor=1) ) self._session = requests.Session() self._session.mount("http://", http_adapter) self._api_url = mlrun.mlconf.httpdb.authorization.opa.address self._permission_query_path = ( mlrun.mlconf.httpdb.authorization.opa.permission_query_path ) self._permission_filter_path = ( mlrun.mlconf.httpdb.authorization.opa.permission_filter_path # a little hack to make this work until the provisioners of mlrun start configuring the filter path # TODO: remove me or mlrun.mlconf.httpdb.authorization.opa.permission_query_path.replace( "allow", "filter_allowed" ) ) self._request_timeout = int( mlrun.mlconf.httpdb.authorization.opa.request_timeout ) self._log_level = int(mlrun.mlconf.httpdb.authorization.opa.log_level) self._leader_name = mlrun.mlconf.httpdb.projects.leader self._allowed_project_owners_cache_ttl_seconds = humanfriendly.parse_timespan( mlrun.mlconf.httpdb.projects.project_owners_cache_ttl ) # owner id -> allowed project -> ttl self._allowed_project_owners_cache: typing.Dict[ str, typing.Dict[str, datetime] ] = {}
def create_model_repo_from_args(args) -> SQLAlchemyModelRepository: return SQLAlchemyModelRepository( db_endpoint=args.db, fs_root=args.fs, max_cache_mem=humanfriendly.parse_size(args.cache_size), ttl=int(humanfriendly.parse_timespan(args.cache_ttl)), engine_kwargs=args.db_kwargs)
def triggerIn(self, form, message=""): timers = self.get() if message: print(f"Got message {message}") values = form.split() total_seconds = 0 for item in values: total_seconds += parse_timespan(item) validated = arrow.now().shift(seconds=+total_seconds) if validated <= arrow.now(): return -2 fnow = f"{arrow.now().year}/{self.zerospace(arrow.now().month)}/{self.zerospace(arrow.now().day)} {self.zerospace(arrow.now().hour)}:{self.zerospace(arrow.now().minute)}:{self.zerospace(arrow.now().second)}" formatted = f"{validated.year}/{self.zerospace(validated.month)}/{self.zerospace(validated.day)} {self.zerospace(validated.hour)}:{self.zerospace(validated.minute)}:{self.zerospace(validated.second)}" data = {'timestamp': formatted, 'created_at': fnow, 'message': message} timers.append(data) self.setTimers(timers) index = timers.index(data) #check for if the date has passed, if yes, shift it one day forward then process return index #when complete, return the index number
def initialize(self): logger.info("Initializing projects follower") self._projects: typing.Dict[str, mlrun.api.schemas.Project] = {} self._leader_name = mlrun.config.config.httpdb.projects.leader self._sync_session = None if self._leader_name == "iguazio": self._leader_client = mlrun.api.utils.clients.iguazio.Client() if not mlrun.config.config.httpdb.projects.iguazio_access_key: raise mlrun.errors.MLRunInvalidArgumentError( "Iguazio access key must be configured when the leader is Iguazio" ) self._sync_session = mlrun.config.config.httpdb.projects.iguazio_access_key elif self._leader_name == "nop": self._leader_client = mlrun.api.utils.projects.remotes.nop_leader.Member() else: raise NotImplementedError("Unsupported project leader") self._periodic_sync_interval_seconds = humanfriendly.parse_timespan( mlrun.config.config.httpdb.projects.periodic_sync_interval ) self._synced_until_datetime = None # run one sync to start off on the right foot and fill out the cache but don't fail initialization on it try: self._sync_projects() except Exception as exc: logger.warning("Initial projects sync failed", exc=str(exc)) self._start_periodic_sync()
def initialize(self): logger.info("Initializing projects leader") self._initialize_followers() self._periodic_sync_interval_seconds = humanfriendly.parse_timespan( mlrun.config.config.httpdb.projects.periodic_sync_interval) # run one sync to start off on the right foot self._sync_projects() self._start_periodic_sync()
def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) assert not hasattr(self, _name) setattr( self, _name, wuji.counter.Time( humanfriendly.parse_timespan( self.config.get('record', 'scalar'))))
def do_wait( self, path: List[str], notify: bool, recursive: bool, poll_interval: Optional[str], notify_interval: Optional[str], ) -> None: """ Wait on the job(s) at PATH. This command will periodically poll the driver for updates on the given jobs, and will display a tally of the jobs' status. Note: notifications will only be sent if a notification provider is configured in the config file. """ update_interval: Optional[datetime.timedelta] if (notify and notify_interval is not None and notify_interval not in ("none", "None")): update_interval = datetime.timedelta( seconds=humanfriendly.parse_timespan(notify_interval)) else: update_interval = None logger.debug("Update interval is %s", update_interval) poll_interval_seconds: Optional[int] = None if poll_interval is not None: if poll_interval.isdigit(): poll_interval_seconds = int(poll_interval) else: print(poll_interval) poll_interval_seconds = int( humanfriendly.parse_timespan(poll_interval)) logger.debug("Poll interval is %s", poll_interval_seconds) self.state.wait( path, notify=notify, recursive=recursive, poll_interval=poll_interval_seconds, update_interval=update_interval, )
def time_to_numexposures(timestring): """ Convert a human-readable time string (e.g. '3m', '1h', etc.) to a number of exposures. """ import humanfriendly def roundup(x): return int(np.ceil(x / 10.0)) * 10 return roundup(config.frames_per_second * humanfriendly.parse_timespan(timestring))
def duration_str_to_time_delta(duration_str): if duration_str.startswith("P"): match = isodate.parse_duration(duration_str) now = datetime.datetime.utcnow() if isinstance(match, isodate.Duration): return to_rounded_delta(match, now) else: # isinstance(match, datetime.timedelta): return to_rounded_delta(match) return datetime.timedelta(seconds=int(parse_timespan(str(duration_str))))
def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) assert not hasattr(self, _name) attr = types.SimpleNamespace( start=time.time(), max=humanfriendly.parse_timespan( self.config.get( *end.split('/')) if isinstance(end, str) else end)) assert attr.max > 0, attr.max setattr(self, _name, attr)
def humanfriendly_time_parser( humanfriendly_input: tp.Union[int, float, str]) -> int: """ Convert a time definition from a string to a int. :param humanfriendly_input: Strings like '5s', '10m', '24h' or '1d' :returns the input time in seconds as int """ if isinstance(humanfriendly_input, str): return humanfriendly.parse_timespan(humanfriendly_input) return int(humanfriendly_input)
def test_parse_timespan(self): self.assertEqual(0, humanfriendly.parse_timespan('0')) self.assertEqual(0, humanfriendly.parse_timespan('0s')) self.assertEqual(5, humanfriendly.parse_timespan('5s')) self.assertEqual(60*2, humanfriendly.parse_timespan('2m')) self.assertEqual(60*60*3, humanfriendly.parse_timespan('3 h')) self.assertEqual(60*60*24*4, humanfriendly.parse_timespan('4d')) self.assertEqual(60*60*24*7*5, humanfriendly.parse_timespan('5 w')) self.assertRaises(humanfriendly.InvalidTimespan, humanfriendly.parse_timespan, '1z')
def backup_archive_options(self) -> Dict[str, Union[str, float]]: section = "Backup" # backward compatible with old config 'max_archive_size' and newer 'archive_max_size' archive_max_size = self.con.get(section, "max_archive_size", fallback=None) if archive_max_size: archive_max_size = humanfriendly.parse_size(archive_max_size) else: if self.con.get(section, "archive_max_size", fallback=None): archive_max_size = humanfriendly.parse_size( self.con.get(section, "archive_max_size", fallback=None)) # backward compatible with old config 'max_archive_duration' and newer 'archive_max_duration' archive_max_duration = self.con.get(section, "max_archive_duration", fallback=None) if archive_max_duration: archive_max_duration = humanfriendly.parse_timespan( archive_max_duration) else: if self.con.get(section, "archive_max_duration", fallback=None): archive_max_duration = humanfriendly.parse_timespan( self.con.get(section, "archive_max_duration", fallback=None)) return { "archive_dir": self.con.get(section, "archive_dir", fallback=None), # type: ignore "prepare_archive": self.con.get(section, "prepare_archive", fallback=None), # type: ignore "move_archive": self.con.get(section, "move_archive", fallback=None), # type: ignore "archive_max_size": str(archive_max_size), "archive_max_duration": str(archive_max_duration), }
def __call__(self, smart_status, dangers, **kwargs): """yeah, TackleTask has fixed args, and custom kwargs""" cooldown = int(parse_timespan(kwargs.get('cooldown', '1m'))) strategy = self.name key = CITADEL_TACKLE_TASK_THROTTLING_KEY.format(id_=smart_status.name, strategy=strategy) if key in rds: logger.debug('Skip tackle strategy {}'.format(strategy)) return logger.debug('Mark {} with ttl {}'.format(key, cooldown)) rds.setex(key, 'true', cooldown) super(TackleTask, self).__call__(smart_status, dangers, **kwargs)
def main(): args = parser.parse_args() size_kb = humanfriendly.parse_size(args.size) / 1024 read_units_per_second = humanfriendly.parse_timespan(args.time_span) # Scan operation is eventually consistent, which is half the cost (hence /2 ) # Each 4kb consumes one capacity unit. read_capacity_units_needed = size_kb / 2 / 4 time_needed = read_capacity_units_needed / read_units_per_second print 'It will take (roughly): {0}'.format(humanfriendly.format_timespan(time_needed))
def better_parse_timespan(s): if isinstance(s, str): try: seconds = parse_timespan(s) except InvalidTimespan as e: raise ValidationError(str(e)) elif isinstance(s, Number): seconds = float(s) else: raise ValidationError('erection_timeout should be int or str') return seconds
def __init__(self, endpoint=None): if not endpoint: cc = tell_cluster_config() endpoint = cc.get('prometheus') if not endpoint: raise click.Abort( f'prometheus not provided in cluster config: {cc}') ctx = context(silent=True) self.query_range = (ctx.obj.get('values', {}).get( 'prometheus_query_range', '7d') if ctx else '7d') self.query_step = int(int(parse_timespan(self.query_range)) / 1440) self.endpoint = endpoint
def backup_options(self) -> Dict[str, Union[str, float]]: section = "Backup" return { "pid_dir": self.con.get(section, "pid_dir", fallback="/tmp/"), "tmp_dir": self.con.get(section, "tmp_dir"), "pid_runtime_warning": humanfriendly.parse_timespan( self.con.get(section, "pid_runtime_warning")), "backup_dir": self.con.get(section, "backup_dir"), "full_dir": self.con.get(section, "backup_dir") + "/full", "inc_dir": self.con.get(section, "backup_dir") + "/inc", "backup_tool": self.con.get(section, "backup_tool"), "prepare_tool": self.con.get(section, "prepare_tool", fallback=None), # type: ignore "xtra_backup": self.con.get(section, "xtra_backup", fallback=None), # type: ignore "xtra_prepare_options": self.con.get(section, "xtra_prepare_options", fallback=None), # type: ignore "xtra_options": self.con.get(section, "xtra_options", fallback=None), # type: ignore "full_backup_interval": humanfriendly.parse_timespan( self.con.get(section, "full_backup_interval", fallback="86400.0")), "partial_list": self.con.get(section, "partial_list", fallback=None), # type: ignore }
def create_model_repo_from_args( args: argparse.Namespace) -> SQLAlchemyModelRepository: """ Get SQLAlchemyModelRepository from command line arguments. :param args: `argparse` parsed arguments. :return: Constructed instance of SQLAlchemyModelRepository. """ return SQLAlchemyModelRepository( db_endpoint=args.db, fs_root=args.fs, max_cache_mem=humanfriendly.parse_size(args.cache_size), ttl=int(humanfriendly.parse_timespan(args.cache_ttl)), engine_kwargs=args.db_kwargs)
def poll_connectors(config, computes, queue): consul = ConsulRegistry(config['consul']['host'], config['consul']['port'], config['consul']['resources_key_prefix']) consul_services = consul.get_services(config['consul']['service_prefix']) queue.put(consul_services) poll_interval = parse_timespan(config['connector']['poll_interval']) while True: services = queue.get() queue.put(services) for service in services: ensure_resources_for_service(consul, computes, service) time.sleep(poll_interval)
def _validate_cron_trigger( self, cron_trigger: schemas.ScheduleCronTrigger, # accepting now from outside for testing purposes now: datetime = None, ): """ Enforce no more then one job per min_allowed_interval """ logger.debug("Validating cron trigger") apscheduler_cron_trigger = self.transform_schemas_cron_trigger_to_apscheduler_cron_trigger( cron_trigger ) now = now or datetime.now(apscheduler_cron_trigger.timezone) next_run_time = None second_next_run_time = now # doing 60 checks to allow one minute precision, if the _min_allowed_interval is less then one minute validation # won't fail in certain scenarios that it should. See test_validate_cron_trigger_multi_checks for detailed # explanation for index in range(60): next_run_time = apscheduler_cron_trigger.get_next_fire_time( None, second_next_run_time ) # will be none if we got a schedule that has no next fire time - for example schedule with year=1999 if next_run_time is None: return second_next_run_time = apscheduler_cron_trigger.get_next_fire_time( next_run_time, next_run_time ) # will be none if we got a schedule that has no next fire time - for example schedule with year=2050 if second_next_run_time is None: return min_allowed_interval_seconds = humanfriendly.parse_timespan( self._min_allowed_interval ) if second_next_run_time < next_run_time + timedelta( seconds=min_allowed_interval_seconds ): logger.warn( "Cron trigger too frequent. Rejecting", cron_trigger=cron_trigger, next_run_time=next_run_time, second_next_run_time=second_next_run_time, delta=second_next_run_time - next_run_time, ) raise ValueError( f"Cron trigger too frequent. no more then one job " f"per {self._min_allowed_interval} is allowed" )
def parse_timespan(s): if isinstance(s, Number): return s elif isinstance(s, string_types): try: return humanfriendly.parse_timespan(s) except humanfriendly.InvalidTimespan: raise ValidationError( f'failed to parse timespan {s}, you can write int or humanfriendly timespan, see https://humanfriendly.readthedocs.io/en/latest/api.html#humanfriendly.parse_timespan' ) else: raise ValidationError( f'failed to parse timespan {s}, you can write int or humanfriendly timespan, see https://humanfriendly.readthedocs.io/en/latest/api.html#humanfriendly.parse_timespan' )
def __init__(self, params=None): Configurable.__init__(self, params) self.period = 0 self.start_time = 0 self.end_time = 24 * 60 * 60 if 'period' in self.params: self.period = humanfriendly.parse_timespan(self.params.period) if 'interval' in self.params: if 'start' in self.params.interval: self.start_time = self._parse_time_as_sec( self.params.interval.start, self.start_time) if 'end' in self.params: self.end_time = self._parse_time_as_sec( self.params.interval.end, self.end_time) pass
def clean_fs_age(): try: cron_cfg = config.frontend_config['cron_clean_file_age'] max_age_file = cron_cfg['clean_fs_max_age'] # 0 means disabled if max_age_file == "0": log.debug("disabled by config") return 0 # convert to seconds max_age_secs = int(humanfriendly.parse_timespan(max_age_file)) nb_files = file_ctrl.remove_files(max_age_secs) log.info("removed %d files (older than %s)", nb_files, max_age_file) return nb_files except (IrmaDatabaseError, IrmaFileSystemError) as e: log.exception(e)
def humanfriendly_time_parser( humanfriendly_input: tp.Optional[tp.Union[int, float, str]]) -> timedelta: """ Convert a time definition from a string to a int. :param humanfriendly_input: Strings like '5s', '10m', '24h' or '1d' :returns the input time in seconds as int """ time_seconds: int = 0 if humanfriendly_input is not None: if isinstance(humanfriendly_input, str): time_seconds = humanfriendly.parse_timespan(humanfriendly_input) elif isinstance(humanfriendly_input, (int, float)): time_seconds = int(humanfriendly_input) return timedelta(0, time_seconds)
async def on_message(m): if m.author == cl.user: return msg = m.content.split() if len(msg) == 0: return if msg[0] == "!remindme": await cl.send_message(m.channel, m.author.mention + ", I'll remind you ;)") try: await asyncio.sleep(parse_timespan(msg[1])) await cl.send_message(m.channel, m.author.mention + ", " + ' '.join(msg[2:])) except Exception as e: await cl.send_message(m.channel, m.author.mention + ", something went wrong.\n```" + str(e) + "```")
def makeService(self, options): # create Twisted application application = service.Application(TAP_NAME) serviceCollection = service.IServiceCollection(application) # check confguration file is specified and exists if not options["config"]: raise ValueError('Configuration file not specified (try to check --help option)') cfgFileName = options["config"]; if not os.path.isfile(cfgFileName): raise ConfigurationError('Configuration file not found:', cfgFileName) # read configuration file cfg = ConfigParser() with codecs.open(cfgFileName, 'r', encoding='utf-8') as f: cfg.readfp(f) # get Google login and password from configuration if not cfg.has_option('account', 'login') or not cfg.has_option('account', 'password'): raise ConfigurationError('Google account login and password must be specified ' 'in configuration file [account] section') self.googleLogin = cfg.get('account', 'login') self.googlePassword = cfg.get('account', 'password') self.googleDeveloperId = cfg.get('account', 'developer_id') \ if cfg.has_option('account', 'developer_id') else None # get ANDROID_ID from configuration if not cfg.has_option('account', 'android_id'): raise ConfigurationError('ANDROID_ID must be specified in configuration file [account] section') self.androidId = cfg.get('account', 'android_id') # get apps to monitor reviews apps = cfg.items('apps') if not apps: raise ConfigurationError('No apps to monitor reviews defined ' 'in configuration file [apps] section') for appId, appName in apps: self.apps.append(Application(appId, appName)) # open database dbFilename = cfg.get('db', 'filename') if cfg.has_option('db', 'filename') else DEFAULT_DB_FILENAME self.dbpool = adbapi.ConnectionPool("sqlite3", dbFilename, check_same_thread=False) # create XMPP client client = XMPPClient(JID(cfg.get('xmpp', 'jid')), cfg.get('xmpp', 'password')) # client.logTraffic = True client.setServiceParent(application) # join to all MUC rooms nickname = cfg.get('xmpp', 'nickname') if cfg.has_option('xmpp', 'nickname') else DEFAULT_NICKNAME notifications = cfg.items('chats') for chat, appIdPatterns in notifications: mucNotifier = MUCNotifier(JID(chat), nickname, appIdPatterns.split(',')) mucNotifier.setHandlerParent(client) self.mucNotifiers.append(mucNotifier) self.pollPeriod = humanfriendly.parse_timespan(cfg.get('poll', 'period')) \ if cfg.has_option('poll', 'period') else DEFAULT_POLL_PERIOD self.pollDelay = humanfriendly.parse_timespan(cfg.get('poll', 'delay')) \ if cfg.has_option('poll', 'delay') else DEFAULT_POLL_DELAY self.langs = [lang.strip() for lang in cfg.get('poll', 'lang').split(',')] \ if cfg.has_option('poll', 'lang') else [ DEFAULT_LANG ] templateLoader = None if cfg.has_option('notification', 'template'): templateFullName = cfg.get('notification', 'template') templatePath, self.templateName = os.path.split(templateFullName) templateLoader = FileSystemLoader(templatePath) else: self.templateName = DEFAULT_TEMPLATE_NAME templateLoader = PackageLoader('reviewnotify', 'templates') self.templateEnvironment = Environment(loader=templateLoader, extensions=['jinja2.ext.i18n']) localeDir = pkg_resources.resource_filename('reviewnotify', 'locales') locale = None if cfg.has_option('i18n', 'locale'): locale = cfg.get('i18n', 'locale') translations = babel.support.Translations.load(dirname=localeDir, locales=locale) self.templateEnvironment.install_gettext_translations(translations) self.templateEnvironment.filters['datetime'] = format_datetime self.templateEnvironment.filters['review_url'] = review_url reactor.callLater(3.0, self.run) # TODO make initial delay configurable return serviceCollection
def collect_options(): parser = OptionParser() parser.add_option("-b", "--bridge", help="The IP of the bridge to connect to") parser.add_option("-d", "--duration", type="str", default="", help="How long to run Hueni for") parser.add_option("-i", "--interval", type="int", default=15, help="How often to check for departures") parser.add_option("-t", "--token", help="A file containing a 511 API token") parser.add_option("--list-lights", action="store_true", help="Dump all the known lights") parser.add_option("--list-routes", action="store_true", help="Dump all monitored routes") parser.add_option("--list-stops", default=False, help="Dump stops along a specified route") options, args = parser.parse_args() if options.list_lights: if not options.bridge: parser.error("You must specify a bridge to connect to") lights = list_lights(get_bridge(options))['resource'] if lights: print "ID\tName" for light in lights: print "%d\t%s" % (light['id'], light['name']) else: print "No lights found." sys.exit(0) if options.list_routes: if not options.token: parser.error("You must supply a 511 API token!") token = load_config(options.token) print "Code\tName" for route in list_routes(token): print "%s\t%s" % (route.code, route.name) sys.exit(0) if options.list_stops: if not options.token: parser.error("You must supply a 511 API token!") token = load_config(options.token) print "Direction\tCode\tStop" for direction, stop in list_stops(token, options.list_stops): print "%s\t%s\t%s" % (direction.ljust(9), stop.code, stop.name) sys.exit(0) if not args or len(args) > 1: parser.error("You must specify a single configuration!") if not options.bridge: parser.error("You must specify a bridge to connect to") if not options.token: parser.error("You must supply a 511 API token!") if options.duration: options.duration = parse_timespan(options.duration) config_file = args[0] config = load_config(config_file) token = load_config(options.token) return options, config, token
def test_parse_timespan(self): """Test :func:`humanfriendly.parse_timespan()`.""" self.assertEqual(0, humanfriendly.parse_timespan('0')) self.assertEqual(0, humanfriendly.parse_timespan('0s')) self.assertEqual(0.001, humanfriendly.parse_timespan('1ms')) self.assertEqual(0.001, humanfriendly.parse_timespan('1 millisecond')) self.assertEqual(0.5, humanfriendly.parse_timespan('500 milliseconds')) self.assertEqual(0.5, humanfriendly.parse_timespan('0.5 seconds')) self.assertEqual(5, humanfriendly.parse_timespan('5s')) self.assertEqual(5, humanfriendly.parse_timespan('5 seconds')) self.assertEqual(60 * 2, humanfriendly.parse_timespan('2m')) self.assertEqual(60 * 2, humanfriendly.parse_timespan('2 minutes')) self.assertEqual(60 * 60 * 3, humanfriendly.parse_timespan('3 h')) self.assertEqual(60 * 60 * 3, humanfriendly.parse_timespan('3 hours')) self.assertEqual(60 * 60 * 24 * 4, humanfriendly.parse_timespan('4d')) self.assertEqual(60 * 60 * 24 * 4, humanfriendly.parse_timespan('4 days')) self.assertEqual(60 * 60 * 24 * 7 * 5, humanfriendly.parse_timespan('5 w')) self.assertEqual(60 * 60 * 24 * 7 * 5, humanfriendly.parse_timespan('5 weeks')) self.assertRaises(humanfriendly.InvalidTimespan, humanfriendly.parse_timespan, '1z')
def main(): """Command line interface for the ``apache-manager`` program.""" # Configure logging output. coloredlogs.install() # Command line option defaults. data_file = '/tmp/apache-manager.txt' dry_run = False max_memory_active = None max_memory_idle = None max_ss = None watch = False zabbix_discovery = False verbosity = 0 # Parse the command line options. try: options, arguments = getopt.getopt(sys.argv[1:], 'wa:i:t:f:znvqh', [ 'watch', 'max-memory-active=', 'max-memory-idle=', 'max-ss=', 'max-time=', 'data-file=', 'zabbix-discovery', 'dry-run', 'simulate', 'verbose', 'quiet', 'help', ]) for option, value in options: if option in ('-w', '--watch'): watch = True elif option in ('-a', '--max-memory-active'): max_memory_active = parse_size(value) elif option in ('-i', '--max-memory-idle'): max_memory_idle = parse_size(value) elif option in ('-t', '--max-ss', '--max-time'): max_ss = parse_timespan(value) elif option in ('-f', '--data-file'): data_file = value elif option in ('-z', '--zabbix-discovery'): zabbix_discovery = True elif option in ('-n', '--dry-run', '--simulate'): logger.info("Performing a dry run ..") dry_run = True elif option in ('-v', '--verbose'): coloredlogs.increase_verbosity() verbosity += 1 elif option in ('-q', '--quiet'): coloredlogs.decrease_verbosity() verbosity -= 1 elif option in ('-h', '--help'): usage(__doc__) return except Exception as e: sys.stderr.write("Error: %s!\n" % e) sys.exit(1) # Execute the requested action(s). manager = ApacheManager() try: if max_memory_active or max_memory_idle or max_ss: manager.kill_workers( max_memory_active=max_memory_active, max_memory_idle=max_memory_idle, timeout=max_ss, dry_run=dry_run, ) if watch and connected_to_terminal(sys.stdout): watch_metrics(manager) elif zabbix_discovery: report_zabbix_discovery(manager) elif data_file != '-' and verbosity >= 0: for line in report_metrics(manager): if line_is_heading(line): line = ansi_wrap(line, color=HIGHLIGHT_COLOR) print(line) finally: if (not watch) and (data_file == '-' or not dry_run): manager.save_metrics(data_file)
from humanfriendly import format_timespan, parse_timespan print format_timespan(12) print format_timespan(1209) print format_timespan(234451) """ 12 seconds 20 minutes and 9 seconds 2 days, 17 hours and 7 minutes """ print parse_timespan('1s') print parse_timespan('1m') print parse_timespan('1h') print parse_timespan('1d') print parse_timespan('1y') """ 1.0 60.0 3600.0 86400.0 31449600.0 """
def main(): """Command line interface for the ``executor`` program.""" # Enable logging to the terminal and system log. coloredlogs.install(syslog=True) # Command line option defaults. command_timeout = 0 exclusive = False fudge_factor = 0 lock_name = None lock_timeout = 0 # Parse the command line options. try: options, arguments = getopt.getopt(sys.argv[1:], 'eT:l:t:f:vqh', [ 'exclusive', 'lock-timeout=', 'lock-file=', 'timeout=', 'fudge-factor=', 'verbose', 'quiet', 'help', ]) for option, value in options: if option in ('-e', '--exclusive'): exclusive = True elif option in ('-T', '--lock-timeout'): lock_timeout = parse_timespan(value) elif option in ('-l', '--lock-file'): lock_name = value elif option in ('-t', '--timeout'): command_timeout = parse_timespan(value) elif option in ('-f', '--fudge-factor'): fudge_factor = parse_timespan(value) elif option in ('-v', '--verbose'): coloredlogs.increase_verbosity() elif option in ('-q', '--quiet'): coloredlogs.decrease_verbosity() elif option in ('-h', '--help'): usage(__doc__) sys.exit(0) else: assert False, "Unhandled option!" # Make sure the operator provided a program to execute. if not arguments: usage(__doc__) sys.exit(0) # Make sure the program actually exists. program_name = arguments[0] if not os.path.isfile(program_name): # Only search the $PATH if the given program name # doesn't already include one or more path segments. if program_name == os.path.basename(program_name): matching_programs = which(program_name) if matching_programs: program_name = matching_programs[0] # The subprocess.Popen() call later on doesn't search the $PATH so we # make sure to give it the absolute pathname to the program. arguments[0] = program_name except Exception as e: warning("Failed to parse command line arguments: %s", e) sys.exit(1) # Apply the requested fudge factor. apply_fudge_factor(fudge_factor) # Run the requested command. try: if exclusive: # Select a default lock file name? if not lock_name: lock_name = os.path.basename(arguments[0]) logger.debug("Using base name of command as lock file name (%s).", lock_name) lock_file = get_lock_path(lock_name) lock = InterProcessLock(path=lock_file, logger=logger) logger.debug("Trying to acquire exclusive lock: %s", lock_file) if lock.acquire(blocking=(lock_timeout > 0), max_delay=lock_timeout): logger.info("Successfully acquired exclusive lock: %s", lock_file) run_command(arguments, timeout=command_timeout) else: logger.error("Failed to acquire exclusive lock: %s", lock_file) sys.exit(1) else: run_command(arguments, timeout=command_timeout) except ExternalCommandFailed as e: logger.error("%s", e.error_message) sys.exit(e.command.returncode)
def get_seconds(self, key, value=None): """Get a value that represents a number of seconds.""" val = self.get(key, value) if isinstance(val, str): val = int(humanfriendly.parse_timespan(val)) return val