Example #1
0
    def determine_action(self, issue):
        """Determine the action we should take for the issue

        Args:
            issue: Issue to determine action for

        Returns:
             `dict`
        """
        resource_type = self.resource_types[issue.resource.resource_type_id]
        issue_alert_schedule = self.alert_schedule[resource_type] if \
            resource_type in self.alert_schedule \
            else self.alert_schedule['*']

        action_item = {
            'action': None,
            'action_description': None,
            'last_alert': issue.last_alert,
            'issue': issue,
            'resource': self.resource_classes[self.resource_types[issue.resource.resource_type_id]](issue.resource),
            'owners': [],
            'stop_after': issue_alert_schedule['stop'],
            'remove_after': issue_alert_schedule['remove'],
            'notes': issue.notes,
            'missing_tags': issue.missing_tags
        }

        time_elapsed = time.time() - issue.created
        stop_schedule = pytimeparse.parse(issue_alert_schedule['stop'])
        remove_schedule = pytimeparse.parse(issue_alert_schedule['remove'])

        if self.collect_only:
            action_item['action'] = AuditActions.IGNORE
        elif remove_schedule and time_elapsed >= remove_schedule:
            action_item['action'] = AuditActions.REMOVE
            action_item['action_description'] = 'Resource removed'
            action_item['last_alert'] = remove_schedule

        elif stop_schedule and time_elapsed >= stop_schedule:
            if issue.get_property('state').value == AuditActions.STOP:
                action_item['action'] = AuditActions.IGNORE
            else:
                action_item['action'] = AuditActions.STOP
                action_item['action_description'] = 'Resource stopped'
                action_item['last_alert'] = stop_schedule

        else:
            alert_selection = self.determine_alert(
                issue_alert_schedule['alert'],
                issue.get_property('created').value,
                issue.get_property('last_alert').value
            )
            if alert_selection:
                action_item['action'] = AuditActions.ALERT
                action_item['action_description'] = '{} alert'.format(alert_selection)
                action_item['last_alert'] = alert_selection
            else:
                action_item['action'] = AuditActions.IGNORE

        return action_item
Example #2
0
    def determine_alert(self, action_schedule, issue_creation_time,
                        last_alert):
        """Determine if we need to trigger an alert

        Args:
            action_schedule (`list`): A list contains the alert schedule
            issue_creation_time (`int`): Time we create the issue
            last_alert (`str`): Time we sent the last alert

        Returns:
            (`None` or `str`)
            None if no alert should be sent. Otherwise return the alert we should send
        """
        issue_age = time.time() - issue_creation_time
        alert_schedule_lookup = {
            pytimeparse.parse(action_time): action_time
            for action_time in action_schedule
        }
        alert_schedule = sorted(alert_schedule_lookup.keys())
        last_alert_time = pytimeparse.parse(last_alert)

        for alert_time in alert_schedule:
            if last_alert_time < alert_time <= issue_age and last_alert_time != alert_time:
                return alert_schedule_lookup[alert_time]
        else:
            return None
Example #3
0
    def _human_readable_to_number(self, query):
        query['queryStats']['elapsedTime'] = parse(query['queryStats']['elapsedTime']) or 0
        query['queryStats']['queuedTime'] = parse(query['queryStats']['queuedTime']) or 0
        query['queryStats']['resourceWaitingTime'] = parse(query['queryStats']['resourceWaitingTime']) or 0
        query['queryStats']['executionTime'] = parse(query['queryStats']['executionTime']) or 0
        query['queryStats']['analysisTime'] = parse(query['queryStats']['analysisTime']) or 0
        query['queryStats']['totalPlanningTime'] = parse(query['queryStats']['totalPlanningTime']) or 0
        query['queryStats']['finishingTime'] = parse(query['queryStats']['finishingTime']) or 0
        query['queryStats']['totalScheduledTime'] = parse(query['queryStats']['totalScheduledTime']) or 0
        query['queryStats']['totalCpuTime'] = parse(query['queryStats']['totalCpuTime']) or 0
        query['queryStats']['totalBlockedTime'] = parse(query['queryStats']['totalBlockedTime']) or 0

        query['queryStats']['userMemoryReservation'] = parse_size(query['queryStats']['userMemoryReservation']) or 0
        query['queryStats']['totalMemoryReservation'] = parse_size(query['queryStats']['totalMemoryReservation']) or 0
        query['queryStats']['peakUserMemoryReservation'] = parse_size(query['queryStats']['peakUserMemoryReservation']) or 0
        query['queryStats']['peakTotalMemoryReservation'] = parse_size(query['queryStats']['peakTotalMemoryReservation']) or 0
        query['queryStats']['peakTaskUserMemory'] = parse_size(query['queryStats']['peakTaskUserMemory']) or 0
        query['queryStats']['peakTaskTotalMemory'] = parse_size(query['queryStats']['peakTaskTotalMemory']) or 0
        query['queryStats']['rawInputDataSize'] = parse_size(query['queryStats']['rawInputDataSize']) or 0
        query['queryStats']['processedInputDataSize'] = parse_size(query['queryStats']['processedInputDataSize']) or 0
        query['queryStats']['outputDataSize'] = parse_size(query['queryStats']['outputDataSize']) or 0
        query['queryStats']['physicalWrittenDataSize'] = parse_size(query['queryStats']['physicalWrittenDataSize']) or 0
        query['queryStats']['logicalWrittenDataSize'] = parse_size(query['queryStats']['logicalWrittenDataSize']) or 0
        query['queryStats']['spilledDataSize'] = parse_size(query['queryStats']['spilledDataSize']) or 0
        return query
def reply(msg_time):
    user_time = str(parse('{}'.format(msg_time)))
    message_id = bot.send_message(
        telegram_id_chat,
        'Таймер запущен на {} {}'.format(user_time, 'секунд'))
    bot.create_timer(parse('{}'.format(msg_time)), notify, "Время вышло")
    bot.create_countdown(parse('{}'.format(msg_time)),
                         notify_progress,
                         update_message=message_id,
                         full_time=int(user_time))
Example #5
0
def kickban(client, arg: str, ban_hdid):
    args = shlex.split(arg)
    ban_id = None
    default_ban_duration = client.server.config['default_ban_duration']

    if len(args) < 2:
        raise ArgumentError('Not enough arguments.')

    elif len(args) == 2:
        ipid = _convert_ipid_to_int(args[0])
        ban_id = args[1]
        reason = None
        ban_duration = parse(str(default_ban_duration), granularity='hours')
        unban_date = arrow.get().shift(hours=ban_duration).datetime

    elif len(args) == 3:
        ipid = _convert_ipid_to_int(args[0])
        reason = args[1]
        duration = args[2]
        ban_duration = parse(str(duration), granularity='hours')

        if duration is None:
            raise ArgumentError('Invalid ban duration.')
        elif 'perma' in duration.lower():
            unban_date = None
        else:
            if ban_duration is not None:
                unban_date = arrow.get().shift(hours=ban_duration).datetime
            else:
                raise ArgumentError(f'{duration} is an invalid ban duration')

    else:
        raise ArgumentError(
            f'Ambiguous input: {arg}\nPlease wrap your arguments '
            'in quotes.')

    ban_id = database.ban(ipid,
                          reason,
                          ban_type='ipid',
                          banned_by=client,
                          ban_id=ban_id,
                          unban_date=unban_date)

    targets = client.server.client_manager.get_targets(client, TargetType.IPID,
                                                       ipid, False)
    if targets:
        for c in targets:
            if ban_hdid:
                database.ban(c.hdid, reason, ban_type='hdid', ban_id=ban_id)
            c.send_command('KB', reason)
            c.disconnect()
            database.log_misc('ban', client, target=c, data={'reason': reason})
        client.send_ooc(f'{len(targets)} clients were kicked.')
    client.send_ooc(f'{ipid} was banned. Ban ID: {ban_id}')
Example #6
0
def parse_whitelist_times(whitelist):
    if whitelist is None or len(whitelist) == 0:
        return [(timedelta(0), timedelta(hours=24))]
    elif type(whitelist) is list:
        return list(
            (timedelta(seconds=pytimeparse.parse(min, granularity='minutes')),
             timedelta(seconds=pytimeparse.parse(max, granularity='minutes')))
            for [min, max] in whitelist)
    else:
        raise ValueError(
            f"whitelist value was not an list, was a {type(whitelist)} ({whitelist})"
        )
Example #7
0
 def _remind_me_parse(self):
     """
     Given a note, we should process and save a reminder which maps back to the content.
     Examples:
         '!remindme 3d i should take out the bins!' - should set a task with reminder to today + 3 days
         '!remindme that i need to make dinner' - should set a task with a reminder thats due now
         '!remindme that in 3d i should create the 3d models!' - will parse the first 3d but not the second
     """
     parse_times = [
         parse(s) for s in self.content.split(" ") if parse(s) is not None
     ]
     delta_seconds = 0 if len(parse_times) == 0 else parse_times[0]
     self._task_parse(timedelta(seconds=delta_seconds))
Example #8
0
def adjust_time_range(queries, time_range):
    """
    Adjust the range setting of a list of queries to a new minimum value (so
    we're not fetching a ton of unused data)
    :param queries: list of ES json queries
    :param time_range: time range for the status check
    :return: the new query
    """
    minimum = '{}m'.format(time_range)

    for n, query in enumerate(queries):
        # Minimum adjustment for derived metrics
        date_histogram = _get_date_histogram(query)

        aggs = query['aggs']
        # Try to find derivative or moving average to extend bounds if necessary. This avoids situations where
        # we use partial/empty buckets to calculate derivates/moving averages.
        while aggs.get('agg'):
            aggs = aggs['agg']['aggs']
            if aggs.get('derivative'):
                # Derivatives are calculated based on the previous 1 datapoint, so extend the minimum
                # by one interval.
                minimum = '{}m'.format(time_range +
                                       parse(date_histogram['interval']) / 60)
                break
            elif aggs.get('moving_avg'):
                # Moving averages are calculated based on the previous <window> datapoints, so extend the minimum
                # by <window> intervals.
                window = int(aggs['moving_avg']['moving_avg'].get('window', 0))
                minimum = '{}m'.format(
                    time_range +
                    (parse(date_histogram['interval']) / 60) * window)
                break

        # Find the minimum range value and set it
        for m, subquery in enumerate(query['query']['bool']['must']):
            range = subquery.get('range')
            if range is not None:
                # There should only be one value in range
                for value in range:
                    time_field = value
                curr_minimum = range[time_field]['gte']
                if parse(minimum) != parse(curr_minimum):
                    query['query']['bool']['must'][m]['range'][time_field][
                        'gte'] = 'now-{}'.format(minimum)
                    queries[n] = query

        _adjust_extended_bounds(date_histogram, minimum)

    return queries
Example #9
0
    def test(self, d):
        """
        Test, for purposes of type inference, if a value could possibly be valid
        for this column type. This will work with values that are native types
        and values that have been stringified.
        """
        if d is None:
            return True

        if isinstance(d, datetime.timedelta):
            return True

        if not isinstance(d, six.string_types):
            return False

        d = d.strip()

        if d.lower() in self.null_values:
            return True

        seconds = pytimeparse.parse(d)

        if seconds is None:
            return False

        return True
Example #10
0
def timestamps(mp4_filename: str) -> str:
    """
    Return multi-line string with marker timestamps for given mp4 file. e.g.
    07:26 — Marker 37
    24:44 — Marker 38
    30:05 — Marker 39
    :rtype: str
    """
    sesx_filename = re.sub(r'\.mp4$', ' ru.sesx', mp4_filename)
    markers = _get_markers(sesx_filename)
    clips_recorded = _get_clips(sesx_filename, 'Track 1')
    clips_translation = _get_clips(sesx_filename, 'Translation')
    skip_time_str = meta.get_skip_time(mp4_filename)
    if skip_time_str == '':
        skip_time_str = '0:00'
    skip_time = pytimeparse.parse(skip_time_str)
    adjusted_markers = _adjust_markers(markers, clips_recorded, clips_translation, skip_time)

    timestamps_str = ''

    for marker in adjusted_markers:
        marker_time_sec = marker[0]
        marker_name = marker[1]
        if marker_time_sec is not None:
            marker_time = _seconds_to_time_stamp(marker_time_sec)
            timestamps_str += marker_time + ' — ' + marker_name + '\n'
    return timestamps_str
Example #11
0
 def parse_check(cls, check):
     check_schedule = []
     if "period" not in check and "schedule" not in check:
         check_schedule.append(
             TimelineRule(interval=300, unit='seconds', at=None))
     elif "period" in check:
         period = check["period"]
         if isinstance(period, six.string_types):
             seconds = int(pytimeparse.parse(period))
             logger.debug('Parsed "%s" to %d seconds', period, seconds)
             period = seconds
         check_schedule.append(
             TimelineRule(interval=period, unit='seconds', at=None))
     if "schedule" in check:
         schedule_value = check['schedule']
         if isinstance(schedule_value, dict):
             check_schedule.append(
                 cls._clean_single_schedule(check, schedule_value))
         elif isinstance(schedule_value, list):
             check_schedule.extend([
                 cls._clean_single_schedule(check, item)
                 for item in schedule_value
             ])
         else:
             raise ConfigurationError(
                 f"Check {check['name']} has invalid schedule configuration: {check['schedule']}"
             )
     return check_schedule
Example #12
0
    def __init__(self,
                 id,
                 frequency='one-time',
                 time_delta=None,
                 load_hour=None,
                 load_minutes=None,
                 **kwargs):
        """Constructor for the Schedule class

        Args:
            id(str): id of the Schedule object
            frequency(enum): rate at which pipeline should be run \
                can be daily, hourly and one-time
            time_delta(timedelta): Additional offset provided to the schedule
            load_hour(int): Hour at which the pipeline should start
            load_minutes(int): Minutes at which the pipeline should be run
            **kwargs(optional): Keyword arguments directly passed to base class
        """
        current_time = datetime.utcnow()

        # Set the defaults for load hour and minutes
        if load_minutes is None:
            load_minutes = 0

        if load_hour is None:
            load_hour = DAILY_LOAD_TIME

        if time_delta is None:
            time_delta = timedelta(seconds=0)
        elif isinstance(time_delta, int):
            time_delta = timedelta(days=time_delta)
        elif not isinstance(time_delta, timedelta):
            raise ETLInputError('time_delta must be an instance of timedelta or int')

        if frequency in FEQUENCY_PERIOD_CONVERTION:
            period, occurrences = FEQUENCY_PERIOD_CONVERTION[frequency]
        else:
            raise ETLInputError(
                'Frequency for the pipeline must be daily, hourly and one-time')

        # Calculate the start time of the pipeline
        start_time = current_time.replace(minute=load_minutes)
        if frequency == 'daily':
            start_time = start_time.replace(hour=load_hour)

        if current_time.hour < load_hour:
            if frequency == 'one-time':
                time_delta -= timedelta(days=1)
            else:
                time_delta -= timedelta(seconds=parse(period))

        start_time += time_delta

        super(Schedule, self).__init__(
            id=id,
            type='Schedule',
            startDateTime=start_time.strftime('%Y-%m-%dT%H:%M:%S'),
            period=period,
            occurrences=occurrences
        )
Example #13
0
def index():
    """View function for the home page. If there is form data, check it for validity and create a drop from that data if it's valid."""
    form = DropForm()
    if form.validate_on_submit():
        drop = models.Drop()
        if form.title.data != None and form.title.data != '':
            drop.title = form.title.data
        drop.created_at = datetime.datetime.utcnow()
        drop.data = form.drop_data.data
        drop.publicly_listed = form.publicly_listed.data
        drop.expires = form.expires
        if form.expires_in.data != None:
            expires_in = pytimeparse.parse(form.expires_in.data)
            if expires_in != None:
                drop.expires_in = expires_in
            else:  # the string provided in the post request isn't valid
                drop.expires_in = app.config['DATADROP_DEFAULT_EXPIRES_IN']
                flash(
                    "The given expiration time isn't valid; defaulted to {}.".
                    format(app.config['DATADROP_DEFAULT_EXPIRES_IN']))
        drop.self_destructs = form.self_destructs
        if form.self_destructs_in.data != None:
            drop.self_destructs_in = form.self_destructs_in.data
        db.save(drop)
        # now that the drop is in the database (and has a unique urlstring), redirect the user to it's page
        return redirect(url_for('show_drop', urlstring=drop.urlstring))
    else:  # form didn't validate; probably a get request
        return render_template('index.html', form=form)
Example #14
0
    async def mute(self, ctx: commands.Context, user: discord.Member, dur: str = "", *, reason: str = "No reason.") -> None:
        """Mute a user (mod only)

        Example usage:
        --------------
        `!mute <@user/ID> <duration> <reason (optional)>`

        Parameters
        ----------
        user : discord.Member
            Member to mute
        dur : str
            Duration of mute (i.e 1h, 10m, 1d)
        reason : str, optional
            Reason for mute, by default "No reason."

        """
        await self.check_permissions(ctx, user)

        reason = discord.utils.escape_markdown(reason)
        reason = discord.utils.escape_mentions(reason)

        now = datetime.datetime.now()
        delta = pytimeparse.parse(dur)

        if delta is None:
            if reason == "No reason." and dur == "":
                reason = "No reason."
            elif reason == "No reason.":
                reason = dur
            else:
                reason = f"{dur} {reason}"

        mute_role = self.bot.settings.guild().role_mute
        mute_role = ctx.guild.get_role(mute_role)

        if mute_role in user.roles:
            raise commands.BadArgument("This user is already muted.")

        case = Case(
            _id=self.bot.settings.guild().case_id,
            _type="MUTE",
            date=now,
            mod_id=ctx.author.id,
            mod_tag=str(ctx.author),
            reason=reason,
        )

        if delta:
            try:
                time = now + datetime.timedelta(seconds=delta)
                case.until = time
                case.punishment = humanize.naturaldelta(
                    time - now, minimum_unit="seconds")
                self.bot.settings.tasks.schedule_unmute(user.id, time)
            except Exception:
                raise commands.BadArgument(
                    "An error occured, this user is probably already muted")
        else:
            case.punishment = "PERMANENT"
def qstatus(url, influxdb_client):
    log.debug("Getting queue status")
    try:
        data = requests.get('{0}{1}'.format(url, '&mode=queue'),
                            verify=False).json()
    except Exception:
        log.exception("Error getting queue status from sabnzbd.")
        return
    if not data:
        log.debug("No data returned.")
        return
    log.debug("Data from sabnzbd: %s", data)

    queue = data['queue']
    try:
        speedlimit_abs = float(queue["speedlimit_abs"])
    except ValueError:
        speedlimit_abs = 0.0

    seconds_left = pytimeparse.parse(queue.get("timeleft"))

    json_body = {
        "measurement": "qstatus",
        "time": datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'),
        "fields": {
            "speed": float(queue["kbpersec"]),
            "total_mb_left": float(queue["mbleft"]),
            "speedlimit": float(queue["speedlimit"]),
            "speedlimit_abs": speedlimit_abs,
            "total_jobs": float(queue["noofslots"]),
            "status": queue.get("status"),
            "diskspace1": float(queue.get("diskspace1")),
            "diskspace2": float(queue.get("diskspace2")),
            "diskspacetotal1": float(queue.get("diskspacetotal1")),
            "diskspacetotal2": float(queue.get("diskspacetotal2")),
            "diskspace1_norm": queue.get("diskspace1_norm"),
            "diskspace2_norm": queue.get("diskspace2_norm"),
            "loadavg_1m": float(queue.get("loadavg").split('|')[0]),
            "loadavg_5m": float(queue.get("loadavg").split('|')[1]),
            "loadavg_15m": float(queue.get("loadavg").split('|')[2]),
            "have_warnings": queue.get("have_warnings"),
        }
    }

    if not queue.get('paused'):
        json_body["fields"].update({
            "timeleft":
            queue.get("timeleft"),
            "seconds_left":
            seconds_left,
            "eta":
            queue.get("eta"),
            "eta_timestamp":
            1000 * (seconds_left + time.time())
        })

    try:
        influxdb_client.write_points([json_body])
    except Exception:
        log.exception("Error posting queue status to InfluxDB")
Example #16
0
 def handle(self, event):
     if event.msg.startswith(".schedule"):
         try:
             split = event.msg.split()
             if len(split) < 6:
                 self.error(event.channel)
                 return
             chan = split[4]
             message = ' '.join(split[5:])
             delay = split[2]
             send_time = datetime.now(timezone.utc)
             td = timedelta(seconds=parse(delay))
             target = send_time + td
             if not chan.startswith("#"):
                 self.say(event.channel, "invalid channel.")
                 return
             self.bot.scheduler.schedule_task(chan,
                                              message,
                                              event.user,
                                              trigger_delay=delay,
                                              source_channel=event.channel)
             self.say(
                 event.channel, "registered event for channel " + chan +
                 " at " + str(target)[:-13] + " UTC.")
         except IndexError:
             self.error(event.channel)
             return
Example #17
0
def parse_timedelta(obj):
    """Attempt to parse `obj` as a ``timedelta`` from a string formatted
    duration.

    Args:
        obj (str|number|timedelta): Object to parse.

    Returns:
        timedelta

    Raises:
        TypeError: When `obj` is not a string or timedelta.
        ParseError: When `obj` can't be parsed as a timedelta.
    """
    if isinstance(obj, timedelta):
        return obj

    is_string = isinstance(obj, string_types)
    is_number = isinstance(obj, number_types)

    if not is_string and not is_number:
        raise TypeError('Expected string or number type, not {0}'
                        .format(type(obj).__name__))

    if is_string:
        seconds = pytimeparse.parse(obj)

        if seconds is None:
            raise ParseError('Value "{0}" is not a recognized duration format'
                             .format(obj))
    else:
        seconds = obj

    return timedelta(seconds=seconds)
Example #18
0
    async def mute(self, ctx, member: discord.Member, time, *, reason):
        """
        Mutes a member for the given time and reason
        Usage: .mute <member> <time> <reason>

        :param ctx: context object
        :param member: member to mute
        :param time: time to mute for
        :param reason: reason for the mute (dm'ed to the user)
        """
        muted_role = ctx.guild.get_role(615956736616038432)

        # Is user already muted
        if muted_role in member.roles:
            await ctx.send("This member is already muted")
            return

        # Check role hierarchy
        if ctx.author.top_role.position <= member.top_role.position:
            await ctx.send("You're not high enough in the role hierarchy to do that.")
            return

        username = member.name + "#" + str(member.discriminator)

        seconds = pytimeparse.parse(time)
        if seconds is None:
            await ctx.send("Not a valid time, try again")

        delta = timedelta(seconds=seconds)
        if len(reason) < 1:
            await ctx.send("You must include a reason for the mute")
            return

        Config.add_mute(member, datetime.now() + timedelta(seconds=seconds))

        mute_time = time_delta_string(datetime.utcnow(), datetime.utcnow() + delta)

        mute_embed = discord.Embed(
            title="Member muted",
            color=0xbe4041,
            description=(
                    "**Muted:** <@" + str(member.id) + ">\n**Time:** " + mute_time + "\n**__Reason__**\n> " + reason +
                    "\n\n**Muter:** <@" + str(ctx.author.id) + ">"
            )
        )

        mute_embed.set_author(name=member.name + "#" + member.discriminator, icon_url=member.avatar_url)
        mute_embed.set_footer(text="ID: " + str(member.id))
        mute_embed.timestamp = datetime.utcnow()

        await member.add_roles(muted_role)
        await ctx.send("**Muted** user **" + username + "** for **" + mute_time + "** for: **" + reason + "**")
        if Config.guilds[ctx.guild]["logging"]["overwrite_channels"]["mod"] is not None:
            await Config.guilds[ctx.guild]["logging"]["overwrite_channels"]["mod"].send(embed=mute_embed)
        await member.send(
            "**You were muted in the " + ctx.guild.name + " for " + mute_time + ". Reason:**\n> " +
            reason + "\n\nYou can respond here to contact staff."
        )

        await self.mute_helper(member, seconds, muted_role)
Example #19
0
def parse_duration(text):

    seconds = pytimeparse.parse(text)

    timee_duration = TimeeDuration(seconds)

    return timee_duration
Example #20
0
def pin(bot, update, args):
    msg = update.message.reply_to_message
    if msg == None:
        update.message.reply_text(
            "Usage:\n\nReplying to the message you wish to pin.\n/pin [time to pin]\n"
        )
        return
    gid = update.message.chat.id
    mid = msg.message_id
    force_notify = check_config(gid, "force_notify")
    disable_notify = not bool(force_notify)
    bot.pin_chat_message(chat_id=gid,
                         message_id=mid,
                         disable_notification=disable_notify)

    if len(args) == 0:
        return
    delay = pytimeparse.parse(args[0])
    if gid in unpin_events:
        unpin_events[gid].schedule_removal()

    def unpin(bot, job):
        bot.unpin_chat_message(chat_id=gid)
        del unpin_events[gid]

    event = queue.run_once(unpin, delay)
    unpin_events[gid] = event
Example #21
0
def logs(obj, api_id, line, output):
    """
    Get API logs
    """
    api_client = obj['api_client']

    (logs, _, to_timestamp) = api_client.logs(api_id,
                                              line,
                                              time_frame_seconds=parse("1d"))

    query = "[].{Date: datetime(timestamp,'%d-%m-%Y %H:%M:%S %f'), App: application_name, verbe: upper(method), Status: status, Path: path, Latency: responseTime}"

    try:
        logs_filtered = jmespath.search(
            query, logs['logs'],
            jmespath.Options(custom_functions=GioFunctions()))

        if logs_filtered and len(logs_filtered) > 0:
            if type(logs_filtered) is list and type(
                    logs_filtered[0]) is dict and len(logs_filtered) > 0:
                header = logs_filtered[0].keys()

            OutputFormatType.value_of(output).echo(logs_filtered,
                                                   header=header)
        else:
            click.echo("No logs")
    except exceptions.JMESPathError as jmespatherr:
        logging.exception("LIST JMESPathError exception")
        raise GraviteeioError(str(jmespatherr))
Example #22
0
def load_config(config_file, mqttc):
    """ load config from file and interpret values """
    try:
        logger.info('Loading config from %s', config_file)
        config = yaml.load(open(config_file), Loader=yaml.SafeLoader)
    except FileNotFoundError:
        logger.info("Config not found. Using default values.")
        config = {
            "interval":
            "90m",
            "hosts": [{
                "hostname": "ping.online.net",
                "ports": [
                    5200,
                    5201,
                    5202,
                ]
            }],
            "mqtt": {
                "host": "127.0.0.1",
                "port": "1883",
                "username": "",
                "password": "",
            }
        }

    config["interval"] = parse(config.get("interval", "60m"))
    mqtt_config = config.get("mqtt", {"username": "", "password": ""})
    logger.info("Using MQTT-Server %s:%s",
                mqtt_config.get("host", "127.0.0.1"),
                mqtt_config.get("port", 1883))
    mqttc.username_pw_set(mqtt_config.get("username", ""),
                          mqtt_config.get("password", ""))

    return (config, mqtt_config)
Example #23
0
def reply(text):
    time = parse(text)
    timerr = str(time)
    msg = "Timer set on " + timerr + " seconds"
    bot.send_message(chatid, msg)
    bot.create_countdown(time, notify_progress)
    bot.create_timer(time, notify)
Example #24
0
def reply(user_input_in_seconds):
    user_input_in_seconds = parse(user_input_in_seconds)
    bot.send_message(
        telegram_chat_id,
        "Таймер запущен на {} {}".format(user_input_in_seconds, 'seconds'))
    # user_input_in_seconds = parse(user_input_in_seconds)
    maxbar = int(user_input_in_seconds)

    # message_id_progress = bot.send_message(telegram_chat_id, "Осталось {} {}".format(user_input_in_seconds, "seconds"))
    # message_id_bar = bot.send_message(telegram_chat_id, render_progressbar(maxbar,1))
    sep = "\n"
    seq = "Осталось {} {}".format(user_input_in_seconds,
                                  "seconds"), render_progressbar(maxbar, 1)
    message_id = bot.send_message(
        telegram_chat_id,
        sep.join(seq)
        # (
        #     "Осталось {} {}".format(user_input_in_seconds, "seconds"),
        #     "\n",
        #     render_progressbar(maxbar,1)
        # )
    )

    bot.create_countdown(maxbar,
                         notify_progress,
                         message_id=message_id,
                         maxbar=int(user_input_in_seconds))

    bot.create_timer(maxbar, notify_end, user_input_in_seconds)
Example #25
0
    def test(self, d):
        """
        Test, for purposes of type inference, if a value could possibly be valid
        for this column type. This will work with values that are native types
        and values that have been stringified.
        """
        if d is None:
            return True

        if isinstance(d, datetime.timedelta):
            return True

        if not isinstance(d, six.string_types):
            return False

        d = d.strip()

        if d.lower() in self.null_values:
            return True

        seconds = pytimeparse.parse(d)

        if seconds is None:
            return False

        return True
Example #26
0
    def __call__(self, *args, **kwargs):
        portal = api.portal.get()
        if portal is None:
            return None
        self.log = []

        results = api.content.find(**{'portal_type': CT_HARVESTER})

        for brain in results:
            obj = brain.getObject()
            no_run_message = translate(_(u'<p>Nothing to do</p>'),
                                       context=self.request)
            self.log.append(u'<h2>{title}</h2>'.format(title=obj.title))

            if obj.reharvesting_period is None:
                self.log.append(no_run_message)
            elif obj.reharvesting_period and obj.last_run is None:
                self.log += self.real_run(obj)
            else:
                seconds = parse(obj.reharvesting_period)
                delta = timedelta(seconds=seconds)

                # noinspection PyTypeChecker
                current_delta = datetime.now() - obj.last_run

                if current_delta >= delta:
                    self.log += self.real_run(obj)
                else:
                    self.log.append(no_run_message)

        return super(RealRunCronView, self).__call__(*args, **kwargs)
Example #27
0
 def process_tags(self, message: Message, tags: list[str]) -> bool:
     ttl = pytimeparse.parse(tags[0])
     if ttl and valid_ttl(ttl):
         self.collector.add_message(message, int(ttl))
         return True
     else:
         return False
Example #28
0
    def cast(self, d):
        """
        Cast a single value to :class:`datetime.timedelta`.

        :param d:
            A value to cast.
        :returns:
            :class:`datetime.timedelta` or :code:`None`
        """
        if isinstance(d, datetime.timedelta) or d is None:
            return d
        elif isinstance(d, six.string_types):
            d = d.strip()

            if d.lower() in self.null_values:
                return None
        else:
            raise CastError('Can not parse value "%s" as timedelta.' % d)

        seconds = pytimeparse.parse(d)

        if seconds is None:
            raise CastError('Can not parse value "%s" to as timedelta.' % d)

        return datetime.timedelta(seconds=seconds)
Example #29
0
    def kick(self):
        if not self.message.reply_to_message:
            self.message.reply_text('You have to reply to a users message.')
            return
        user = self.get_user_settings(self.message.reply_to_message.from_user)

        time_text = self.message.text.replace('/kick', '').strip()
        delta = None
        if time_text:
            delta = pytimeparse.parse(time_text)
            if delta is None:
                self.message.reply_text(
                    'Could not understand time. Use something like 10m or 1h etc.'
                )
                return
            if not 30 < delta < 31622400:
                self.message.reply_text(
                    'Duration below 30 sec or above 366 days result in a ban. For that use /ban.'
                )
                return
            delta = timedelta(seconds=delta)

        if self._kick(user, delta) is False:
            self.message.reply_text(
                f'Could not kick {user.link or user.user_fullname}')
            return
        self.chat.send_message(
            f'Kicked {user.link or user.user_fullname}',
            reply_to_message_id=self.message.reply_to_message.message_id)
Example #30
0
def validate_datetime(ctx, param, value):
    try:
        from dateutil.parser import parse

        return parse(value)
    except ValueError:
        raise click.BadParameter("Invalid date/time")
Example #31
0
def parse_timedelta(obj):
    """
    Attempt to parse `obj` as a ``timedelta`` from a string formatted duration.

    Args:
        obj (str|number|timedelta): Object to parse.

    Returns:
        timedelta

    Raises:
        TypeError: When `obj` is not a string or timedelta.
        ParseError: When `obj` can't be parsed as a timedelta.
    """
    if isinstance(obj, timedelta):
        return obj

    is_string = isinstance(obj, str)
    is_number = isinstance(obj, number_types)

    if not is_string and not is_number:
        raise TypeError("Expected string or number type, not {0}".format(
            type(obj).__name__))

    if is_string:
        seconds = pytimeparse.parse(obj)

        if seconds is None:
            raise ParseError(
                'Value "{0}" is not a recognized duration format'.format(obj))
    else:
        seconds = obj

    return timedelta(seconds=seconds)
Example #32
0
    async def remindme(self, ctx: context.Context, dur: str, *, reminder: str):
        """Send yourself a reminder after a given time gap

        Example usage
        -------------
        !remindme 1h bake the cake

        Parameters
        ----------
        dur : str
            "After when to send the reminder"
        reminder : str
            "What to remind you of"
        """

        now = datetime.datetime.now()
        delta = pytimeparse.parse(dur)
        if delta is None:
            raise commands.BadArgument(
                "Please give a valid time to remind you! (i.e 1h, 30m)")

        time = now + datetime.timedelta(seconds=delta)
        if time < now:
            raise commands.BadArgument("Time has to be in the future >:(")
        reminder = discord.utils.escape_markdown(reminder)

        ctx.tasks.schedule_reminder(ctx.author.id, reminder, time)
        natural_time = humanize.naturaldelta(delta, minimum_unit="seconds")
        embed = discord.Embed(
            title="Reminder set",
            color=discord.Color.random(),
            description=f"We'll remind you in {natural_time} ")
        await ctx.message.delete(delay=5)
        await ctx.message.reply(embed=embed, delete_after=10)
Example #33
0
def read_worklogs(file: T.IO[str]) -> T.Iterable[Worklog]:
    last_date: T.Optional[datetime.date] = None
    for line in file:
        if not line.strip():
            continue

        match = re.search(r"#.*(\d{4}-\d{2}-\d{2})", line)
        if match:
            last_date = dateutil.parser.parse(match.group(1)).date()
            continue

        if line.startswith("#"):
            continue

        if not line:
            last_date = None

        row = [word.strip() for word in line.split("|")]
        if not last_date:
            raise ValueError("unknown date")
        yield Worklog(
            id=int(row[0]) if row[0] else None,
            date=last_date,
            duration=datetime.timedelta(seconds=pytimeparse.parse(row[1])),
            project_id=int(row[2]),
            ticket=row[3].strip() or None,
            description=row[4],
        )
Example #34
0
 def __init__(self, delay='1min'):
     try:
         secs = float(delay)*60
     except ValueError:
         secs = pytimeparse.parse(delay)
     if secs is None:
         raise ValueError('Could not convert string "%s" to time.'%delay)
     self.delay = secs
Example #35
0
 def get_seconds(self, section, option, **kwargs):
     """
     :param section:
     :param default: e.g. "1 seconds", "4 minutes"
     :param option:
     :param kwargs:
     :return:
     """
     return pytimeparse.parse(self._get_value_or_exception(section, option, **kwargs))
Example #36
0
def is_job_run_exceeding_expected_runtime(job_run, job_expected_runtime):
    if job_expected_runtime is not None and job_run.get(
        'state', 'unknown'
    ) == "running":
        duration_seconds = pytimeparse.parse(job_run.get('duration', ''))
        # TODO: duration_seconds will be None for a running job if it's root
        # action is waiting for external dependency. Maybe fix by setting
        # job's start_time to run_time when that happens.
        if duration_seconds and duration_seconds > job_expected_runtime:
            return True
    return False
Example #37
0
def to_timespan(value, allow_le_zero=False):
    if value is None:
        raise ValueError("Invalid timespan")
    try:
        seconds = float(value)
    except Exception:
        seconds = pytimeparse.parse(value)
        if seconds is None:
            raise ValueError("Unable to parse timespan")
    seconds = numpy.timedelta64(int(seconds * 10e8), 'ns')
    if not allow_le_zero and seconds <= numpy.timedelta64(0, 'ns'):
        raise ValueError("Timespan must be positive")
    return seconds
Example #38
0
def is_action_run_exceeding_expected_runtime(
    action_run, actions_expected_runtime
):
    if action_run.get('state', 'unknown') == 'running':
        action_name = action_run.get('action_name', None)
        if action_name in actions_expected_runtime and actions_expected_runtime[
            action_name
        ] is not None:
            duration_seconds = pytimeparse.parse(
                action_run.get('duration', '')
            )
            if duration_seconds > actions_expected_runtime[action_name]:
                return True
    return False
Example #39
0
    def test(self, d):
        """
        Test, for purposes of type inference, if a string value could possibly
        be valid for this column type.
        """
        d = d.strip()

        if d.lower() in self.null_values:
            return True

        seconds = pytimeparse.parse(d)

        if seconds is None:
            return False

        return True
Example #40
0
def activate():
    """Endpoint to activate the door system

    Activate door for two minutes is no argument is provided. Otherwise
    parse the provided time period and activate for that period of time.
    """

    time_period = request.args.get('time')
    time_seconds = pytimeparse.parse(str(time_period))

    if not time_seconds or time_seconds <= 0:
        time_seconds = 2 * 60  # Default activation period of 2 minutes
        flash(u'Invalid time period provided', 'danger')
    else:
        flash(u'Door activated!', 'success')

    door.activate(time_seconds=time_seconds)

    return redirect(url_for('index'))
Example #41
0
    def cast(self, d):
        """
        Cast a single value to :class:`datetime.timedelta`.

        :param d: A value to cast.
        :returns: :class:`datetime.timedelta` or :code:`None`
        """
        if isinstance(d, datetime.timedelta) or d is None:
            return d

        if isinstance(d, six.string_types):
            d = d.strip()

            if d.lower() in self.null_values:
                return None

        seconds = pytimeparse.parse(d)

        return datetime.timedelta(seconds=seconds)
Example #42
0
async def cmd_seek(self, message, player, leftover_args, seek=None):
    """
    Usage:
        {command_prefix}seek [seconds]

    Seeks the player to a specific time in seconds.
    """

    if player.is_stopped:
        raise CommandError("Can't seek! The player is not playing!", expire_in=20)

    if not seek:
        return Response('A time is required to seek.', reply=True, delete_after=20)

    try:
        original_seek = seek

        seek = ' '.join([seek, *leftover_args])
        seek = pytimeparse.parse(seek)

        if not seek:
            seek = int(original_seek)

        if seek < 0:
            raise ValueError()
    except (TypeError, ValueError):
        return Response('The time you have given is invalid.', reply=True, delete_after=20)

    try:
        player.seek(seek)
    except ValueError as e:
        return Response(str(e), delete_after=20)

    return Response('Seeked video to %s!' % (
        str(timedelta(seconds=seek)).lstrip('0').lstrip(':')
    ), delete_after=20)
Example #43
0
    def execute_backup(self):
        dst = os.path.join(self.root, self.args.dst)
        config = self.config
        backup_dir = self.backup_dir

        files = sorted(x for x in os.listdir(dst) if fs_backup_re.match(x))

        last = files[-1] if files else None
        last_path = os.path.join(dst, last) if last else None

        full_backup = True
        if last:
            incrementals = get_incrementals_for(last_path)
            max_span = pytimeparse.parse(config["MAX_INCREMENTAL_SPAN"])
            now = datetime.datetime.utcnow().replace(microsecond=0)
            if len(incrementals) < config["MAX_INCREMENTAL_COUNT"] and \
               now - datetime.datetime.strptime(last, "%Y-%m-%dT%H:%M:%S") \
               < datetime.timedelta(seconds=max_span):
                full_backup = False

        last_incremental = last and get_incrementals_for(last_path, True)[-1]
        if not full_backup:

            # We do this so that we don't start two backups in the same
            # second. It would indeed be a bizarre use of this software to
            # start two backups in the same second but we should check for
            # this eventuality anyway.
            #
            # rdiff-backup also detects the occurrence and fails
            # rather than wait.
            #
            last_incremental = datetime.datetime.strptime(
                last_incremental, "%Y-%m-%dT%H:%M:%S")
            while True:
                now = datetime.datetime.utcnow().replace(microsecond=0)
                if now - last_incremental >= datetime.timedelta(seconds=1):
                    break
                time.sleep(0.5)

            # We don't need to test ``last_path`` here as it must
            # necessarily not be ``None``.
            if self.compare(self.outfile,
                            os.path.join(last_path, self.outfile_base)):
                self.log(now.isoformat() +
                         ": no change in the data to be backed up: "
                         "skipping creation of new incremental backup")
            else:
                # rdiff-backup appears to first test the modification
                # time of a file with a resolution of a second. If the
                # modification time of the file is the same as what is
                # stored in the previous backup, then it is considered
                # "unchanged" and rdiff-backup does not further
                # examine the file... This is a problem for us. If we
                # are here, we've determined that the file is in fact
                # different.
                #
                # So we force the issue by touching the file here.
                os.utime(self.outfile, None)

                self.rdiff_backup(backup_dir, last_path)
                # This path won't have the final "/" unless we add it.
                self.sync_path(os.path.join(self.args.dst, last) + "/")
        else:
            #
            # We do this so that we don't start two backups in the same
            # second. It would indeed be a bizarre use of this software to
            # start two backups in the same second but we should check for
            # this eventuality anyway.
            #
            # We also check the last incremental. There's no hard reason
            # to prevent the next full backup from being on the same
            # second as the last incremental but it does simplify testing
            # a little bit and is consistent with the rest of the
            # software.
            #
            while True:
                now = datetime.datetime.utcnow().replace(microsecond=0)
                new_dir_name = now.isoformat()
                if last is None or (new_dir_name != last
                                    and new_dir_name != last_incremental):
                    break
                time.sleep(0.5)

            # Don't save the full backup unless it is actually different
            # from the previous one.
            if last_path is not None and \
               self.compare(self.outfile,
                            os.path.join(last_path, self.outfile_base)):
                self.log(new_dir_name +
                         ": no change in the data to be backed up: "
                         "skipping creation of new full backup")
            else:
                new_dir_path = os.path.join(dst, new_dir_name)
                os.mkdir(new_dir_path)

                self.rdiff_backup(backup_dir, new_dir_path)
                # This path will never have a "/" at the end unless we
                # add it.
                self.push_path(os.path.join(self.args.dst, new_dir_name) +
                               "/")
Example #44
0
    def parse_pubdate(pubdate, human_time=False, timezone=None, **kwargs):
        """
        Parse publishing date into a datetime object.

        :param pubdate: date and time string
        :param human_time: string uses human slang ("4 hours ago")
        :param timezone: use a different timezone ("US/Eastern")

        :keyword dayfirst: Interpret the first value as the day
        :keyword yearfirst: Interpret the first value as the year

        :returns: a datetime object or None
        """
        now_alias = ('right now', 'just now', 'now')

        df = kwargs.pop('dayfirst', False)
        yf = kwargs.pop('yearfirst', False)
        fromtimestamp = kwargs.pop('fromtimestamp', False)

        # This can happen from time to time
        if pubdate is None:
            log.debug('Skipping invalid publishing date.')
            return

        try:
            if human_time:
                if pubdate.lower() in now_alias:
                    seconds = 0
                else:
                    match = re.search(r'(?P<time>[\d.]+\W*)(?P<granularity>\w+)', pubdate)
                    matched_time = match.group('time')
                    matched_granularity = match.group('granularity')

                    # The parse method does not support decimals used with the month,
                    # months, year or years granularities.
                    if matched_granularity and matched_granularity in ('month', 'months', 'year', 'years'):
                        matched_time = int(round(float(matched_time.strip())))

                    seconds = parse('{0} {1}'.format(matched_time, matched_granularity))
                    if seconds is None:
                        log.warning('Failed parsing human time: {0} {1}', matched_time, matched_granularity)
                        raise ValueError('Failed parsing human time: {0} {1}'.format(matched_time, matched_granularity))

                return datetime.now(tz.tzlocal()) - timedelta(seconds=seconds)

            if fromtimestamp:
                dt = datetime.fromtimestamp(int(pubdate), tz=tz.gettz('UTC'))
            else:
                day_offset = 0
                if 'yesterday at' in pubdate.lower() or 'today at' in pubdate.lower():
                    # Extract a time
                    time = re.search(r'(?P<time>[0-9:]+)', pubdate)
                    if time:
                        if 'yesterday' in pubdate:
                            day_offset = 1
                        pubdate = time.group('time').strip()

                dt = parser.parse(pubdate, dayfirst=df, yearfirst=yf, fuzzy=True) - timedelta(days=day_offset)

            # Always make UTC aware if naive
            if dt.tzinfo is None or dt.tzinfo.utcoffset(dt) is None:
                dt = dt.replace(tzinfo=tz.gettz('UTC'))
            if timezone:
                dt = dt.astimezone(tz.gettz(timezone))

            return dt
        except (AttributeError, TypeError, ValueError):
            log.exception('Failed parsing publishing date: {0}', pubdate)
Example #45
0
File: mongo.py Project: mfiers/mad2
def search(app, args):
    """
    Find files
    """

    MONGO_mad = get_mongo_transient_db(app)

    query = {}

    for f in ['username', 'backup', 'volume', 'host', 'dirname',
              'sha1sum', 'project', 'project', 'pi', 'category',
              'filename', 'userid']:

        v = getattr(args, f)
        if v is None:
            continue
        elif v == '(none)':
            query[f] = { "$exists": False }
        elif v.startswith('/') and v.endswith('/'):
            rrr = re.compile(v[1:-1])
            query[f] = rrr
        else:
            query[f] = v

    if args.min_filesize:
        query['filesize'] = {"$gt": mad2.util.interpret_humansize(args.min_filesize)}

    if args.max_filesize:
        nq = query.get('filesize', {})
        nq["$lt"] = mad2.util.interpret_humansize(args.max_filesize)
        query['filesize'] = nq

    if args.atime_older_than:
        delta = datetime.timedelta(seconds=pytimeparse.parse(args.atime_older_than))
        cutoffdate = datetime.datetime.utcnow() - delta
        query['atime'] = {"$lte": cutoffdate}


    if args.delete:
        MONGO_mad.remove(query)
        return

    res = MONGO_mad.find(query)


    if args.sort:
        res = res.sort(args.sort, pymongo.ASCENDING)
    elif args.reverse_sort:
        res = res.sort(args.reverse_sort, pymongo.DESCENDING)

    if args.limit > 0:
        res = res.limit(args.limit)

    if args.tsv:
        if args.format == '{fullpath}':
            fields = 'host fullpath filesize category'.split()
        else:
            fields = args.format.split(',')
        for r in res:
            vals = [r.get(x, 'n.a.') for x in fields]
            print("\t".join(map(str, vals)))
    elif args.raw:
        print(yaml.safe_dump(list(res), default_flow_style=False))
    else:
        #ensure tab characters
        format = args.format.replace(r'\t', '\t')
        for r in res:
            while True:
                try:
                    print(format.format(**r))  # 'fullpath'])
                except KeyError as e:
                    r[e.args[0]] = '(no value)'
                    continue
                break
Example #46
0
    def __init__(self, runs: t.List[dict] = None, append: bool = None, show_report: bool = None):
        """
        Creates an instance and setup everything.

        :param runs: list of dictionaries that represent run program blocks if None Settings()["run/in"] is used
        :param append: append to the old benchmarks if there are any in the result file?
        :param show_report: show a short report after finishing the benchmarking?
        """
        if runs is None:
            typecheck(Settings()["run/in"], ValidYamlFileName())
            with open(Settings()["run/in"], "r") as f:
                runs = yaml.load(f)
        typecheck(runs, List(Dict({
            "attributes": Dict(all_keys=False, key_type=Str()),
            "run_config": Dict(all_keys=False)
        })))
        self.runs = runs  # type: t.List[dict]
        """ List of dictionaries that represent run program blocks """
        self.run_blocks = []  # type: t.List[RunProgramBlock]
        """ Run program blocks for each dictionary in ``runs```"""
        for (id, run) in enumerate(runs):
            self.run_blocks.append(RunProgramBlock.from_dict(id, copy.deepcopy(run)))
        self.append = Settings().default(append, "run/append")  # type: bool
        """ Append to the old benchmarks if there are any in the result file? """
        self.show_report = Settings().default(show_report, "run/show_report")  # type: bool
        """  Show a short report after finishing the benchmarking? """
        self.stats_helper = None  # type: RunDataStatsHelper
        """ Used stats helper to help with measurements """
        typecheck(Settings()["run/out"], FileName())
        if self.append:
            run_data = []
            try:
                if os.path.exists(Settings()["run/out"]):
                    with open(Settings()["run/out"], "r") as f:
                        run_data = yaml.load(f)
                self.stats_helper = RunDataStatsHelper.init_from_dicts(run_data, external=True)
                for run in runs:
                    self.stats_helper.runs.append(RunData(attributes=run["attributes"]))
            except:
                self.teardown()
                raise
        else:
            self.stats_helper = RunDataStatsHelper.init_from_dicts(copy.deepcopy(runs))
        #if Settings()["run/remote"]:
        #    self.pool = RemoteRunWorkerPool(Settings()["run/remote"], Settings()["run/remote_port"])
            if os.path.exists(Settings()["run/out"]):
                os.remove(Settings()["run/out"])
        self.pool = None  # type: AbstractRunWorkerPool
        """ Used run worker pool that abstracts the benchmarking """
        if Settings()["run/cpuset/parallel"] == 0:
            self.pool = RunWorkerPool()
        else:
            self.pool = ParallelRunWorkerPool()
        self.run_block_size = Settings()["run/run_block_size"]  # type: int
        """ Number of benchmarking runs that are done together """
        self.discarded_runs = Settings()["run/discarded_runs"]  # type: int
        """ First n runs that are discarded """
        self.max_runs = Settings()["run/max_runs"]  # type: int
        """ Maximum number of benchmarking runs """
        self.min_runs = Settings()["run/min_runs"]  # type: int
        """ Minimum number of benchmarking runs """
        if self.min_runs > self.max_runs:
            logging.warning("min_runs ({}) is bigger than max_runs ({}), therefore they are swapped."
                            .format(self.min_runs, self.max_runs))
            tmp = self.min_runs
            self.min_runs = self.max_runs
            self.max_runs = tmp

        self.shuffle = Settings()["run/shuffle"]  # type: bool
        """ Randomize the order in which the program blocks are benchmarked. """
        self.fixed_runs = Settings()["run/runs"] != -1  # type: bool
        """ Do a fixed number of benchmarking runs? """
        if self.fixed_runs:
            self.min_runs = self.max_runs = self.min_runs = Settings()["run/runs"]
        self.start_time = round(time.time())  # type: float
        """ Unix time stamp of the start of the benchmarking """
        self.end_time = None  # type: float
        """ Unix time stamp of the point in time that the benchmarking can at most reach """
        try:
            self.end_time = self.start_time + pytimeparse.parse(Settings()["run/max_time"])
        except:
            self.teardown()
            raise
        self.store_often = Settings()["run/store_often"]  # type: bool
        """ Store the result file after each set of blocks is benchmarked """
        self.block_run_count = 0  # type: int
        """ Number of benchmarked blocks """
        self.erroneous_run_blocks = []  # type: t.List[t.Tuple[int, BenchmarkingResultBlock]]
        """ List of all failing run blocks (id and results till failing) """
Example #47
0
def get_lifetime(environ, role):
    lifetime = environ.get('DTUF_' + role + '_LIFETIME')
    if lifetime is None:
        return lifetime
    return timedelta(seconds=pytimeparse.parse(lifetime))
Example #48
0
def parse_duration(input_string):
    """Wrapper around pytimeparse to parse a time duration and return a
    regular timedelta"""
    return datetime.timedelta(seconds=pytimeparse.parse(input_string))
Example #49
0
File: todo.py Project: jad-b/atomic
def log(orig, delta):
    """Attach a logged amount of work on the todo."""
    if isinstance(delta, str):  # Convert to timedelta
        delta = timedelta(seconds=pytimeparse.parse(delta))
    assert isinstance(delta, timedelta)
    return orig + delta