Пример #1
0
def parse_date(s):
    if re.match(r'^(\d+([ymwdhs]|min))+$', s):
        date = utcnow()
        units = {
            'y': 'years',
            'm': 'months',
            'w': 'weeks',
            'd': 'days',
            'h': 'hours',
            'min': 'minutes',
            's': 'seconds',
        }
        for value, unit in re.findall(r'(\d+)([ymwdhs]|min)', s):
            kw = {units[unit]: -int(value)}
            date += relativedelta(**kw)
    elif re.match(r'^\d\d\d\d$', s):
        date = parsetime(s) + relativedelta(yearday=1)
    elif re.match(r'^\d\d\d\d[-/]\d\d$', s):
        date = parsetime(s) + relativedelta(day=1)
    elif re.match(r'^(\d\d)?\d\d[-/]\d\d[-/]\d\d$', s):
        date = parsetime(s)
    elif re.match(r'^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\d(\+\d\d:\d\d)?$', s):
        try:
            # try converting timezone if one is specified
            date = parsetime(s).astimezone(utc)
        except ValueError:
            # otherwise default to UTC if none is specified
            date = parsetime(s).replace(tzinfo=utc)
    elif s == 'now':
        return utcnow()
    else:
        raise ValueError(f'invalid time value: {s!r}')

    # drop microsecond resolution since we shouldn't need it
    return date.replace(microsecond=0)
Пример #2
0
def parse_date(s):
    today = datetime.datetime.utcnow()
    offset = re.match(r'^(\d+)([ymwdhs]|min)$', s)

    if offset:
        units = {'y': 'years', 'm': 'months', 'w': 'weeks', 'd': 'days',
            'h': 'hours', 'min': 'minutes', 's': 'seconds'}
        unit = units[offset.group(2)]
        value = -int(offset.group(1))
        kw = {unit: value}
        date = today + relativedelta(**kw)
    elif re.match(r'^\d\d\d\d$', s):
        date = parsetime(s) + relativedelta(yearday=1)
    elif re.match(r'^\d\d\d\d[-/]\d\d$', s):
        date = parsetime(s) + relativedelta(day=1)
    elif re.match(r'^(\d\d)?\d\d[-/]\d\d[-/]\d\d$', s):
        date = parsetime(s)
    elif re.match(r'^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\d(\+\d\d:\d\d)?$', s):
        try:
            # try converting timezone if one is specified
            date = parsetime(s).astimezone(utc)
        except ValueError:
            # otherwise default to UTC if none is specified
            date = parsetime(s).replace(tzinfo=utc)
    else:
        msg = '"{}" is not a valid date argument'.format(s)
        raise argparse.ArgumentTypeError(msg)
    return (s, date.isoformat())
Пример #3
0
    def parse(cls, data):
        l = []
        for i, c in enumerate(data, start=1):
            # don't count creation as a modification
            updated = parsetime(c['updated']) if c['updated'] != c['created'] else None

            l.append(cls(
                id=c['id'], count=i, creator=c['author']['name'],
                created=parsetime(c['created']), modified=updated,
                text=c['body'].strip()))
        return tuple(l)
Пример #4
0
 def __init__(self, **kw):
     self.id = kw['id']
     self.desc = kw['description']
     self.owner, self.name = kw['path_with_namespace'].split('/', 1)
     self.created = parsetime(kw['created_at'])
     self.updated = parsetime(kw['last_activity_at'])
     self.git_repo = kw['http_url_to_repo']
     self.webbase = kw['web_url']
     self.tags = tuple(kw['tag_list'])
     self.stars = kw['star_count']
     self.forks = kw['forks_count']
Пример #5
0
    def __init__(self, get_comments=False, get_attachments=False, get_changes=False, **kw):
        # TODO: add support for parsing changes
        self.changes = None
        self.attachments = None
        self.comments = None

        for k, v in kw.items():
            if k in ('assignee', 'reporter', 'creator', 'status', 'priority'):
                v = v.get('name') if v else None
            elif k in ('updated', 'created'):
                v = parsetime(v)
            elif k == 'votes':
                v = v.get('votes')
            elif k == 'watches':
                v = v.get('watchCount')
            elif k == 'attachment' and get_attachments:
                k = 'attachments'
                v = JiraAttachment.parse(v)
            elif k == 'comment' and get_comments:
                k = 'comments'
                v = JiraComment.parse(v['comments'])
            setattr(self, k, v)

        if get_comments:
            desc = self.description.strip() if self.description else None
            if desc:
                desc = JiraComment(
                    count=0, creator=self.creator, created=self.created, text=desc)
            self.description = desc
            if self.description:
                self.comments = (self.description,) + self.comments
Пример #6
0
 def parse(self, data):
     """Parsing function for the raw HTML pages."""
     try:
         table = data.xpath('//table[@class="listing tickets"]')[0]
     except IndexError:
         # no issues exist
         return ()
     for row in table.xpath('./tbody/tr'):
         cols = row.xpath('./td')
         # no issues exist
         if len(cols) <= 1:
             break
         d = {}
         for c in cols:
             k = c.get('class')
             try:
                 a = c.xpath('./a')[0]
                 if k.endswith('time'):
                     v = parsetime(
                         parse_qs(urlparse(next(a.iterlinks())[2])[4])['from'][0])
                 else:
                     v = a.text
             except IndexError:
                 v = c.text.strip()
             # strip number symbol from IDs if it exists
             if k == 'id' and v[0] == '#':
                 v = v[1:]
             d[k] = v
         yield self.service.item(self.service, get_desc=False, **d)
def parseAccelPacket(line):
    t0 = 0
    delimiter = ','
    newline = '\r\n'
    accel_output_line = ''
    axis_sample = 0 # 3 axis samples equals one accel sample
    parsed_time = parsetime(line[0:24])
    if(t0 == 0):
        t0 = parsed_time
    time_delta = parsed_time - t0
    
    # Print timestamp and tick count
    #accel_output_line = str(time_delta) + delimiter     # Timestamp
    #accel_output_line += str(int(line[62:64], 16)) + delimiter        #Tickcount
    holder = []
    for i in range(26, 62, 3):
        # Print tab separated values, each line contains x, y, z sample
        # Need to manually implement 16 bit twos complement
        val = int(line[i:(i+3)], 16)<<4
        if val > 0x7FFF:
            val -= 0x10000
        accel_output_line += str(val)
        axis_sample += 1
        
        #print accel_output_line

        if axis_sample%3 == 0:
            #outputAccelData(accel_output_line)
            #accel_output_line = delimiter + delimiter
            holder.append(accel_output_line)
            accel_output_line = delimiter + delimiter
        else:
            accel_output_line += delimiter
    return holder
def parseAltimPacket(line, type):
    delimiter = ','
    newline = '\r\n'
    t0 = 0
    altim_output_line = ''
    parsed_time = parsetime(line[0:24])
    if(t0 == 0):
        t0 = parsed_time
    time_delta = parsed_time - t0
    
    # Write timestamp and tickcount
    #altim_output_line = str(time_delta) + delimiter      #Time
    #altim_output_line += str((int(line[28:30], 16) <<8) + \
    #    int(line[26:28], 16)) + delimiter                   #Tickcount
    if type != 1:
        #print type
        pass
    # Now write data
    if(type == 1):
        # Interpret the altimeter data packet
        d1 = str(bigEndianUint32_t(line[32:40]))
        d2 = str(bigEndianUint32_t(line[40:48]))
        steps = str(bigEndianUint32_t(line[48:56]))

        # Format the data for output file
        altim_output_line += d1 + delimiter     #D1
        altim_output_line += d2 + delimiter     #D2
        altim_output_line += steps              #steps
        
        # Write data to output file
        #print altim_output_line
        #outputAltData(altim_output_line)
        return altim_output_line
    elif(type == 2):
        #global calibration_coefficients
        # Calibration from altimeter, only report it if the CRC is correct
        if int(line[62:64], 16) == 0x11:
            for i in range(32, 60, 4):
                # Print tab separated values, each line contains 7 calibration values
                altim_output_line += str(int(line[i:(i+2)], 16) + (int(line[(i+2):(i+4)], 16)<<8)) + delimiter
            altim_output_line += hex(int(line[62:64], 16))  # CRC
            #outputAltCalData(altim_output_line)
            return altim_output_line
    elif(type == 3):
        return None
        # Calibration from device config, only report it if the CRC is correct
        if int(line[62:64], 16) == 0x11:
            for i in range(32, 60, 4):
                # Print tab separated values, each line contains 7 calibration values
                altim_output_line += str(int(line[i:(i+2)], 16) + (int(line[(i+2):(i+4)], 16)<<8)) + delimiter
            altim_output_line += hex(int(line[62:64], 16))  # CRC
            #outputAltCalData(altim_output_line)
            #print altim_output_line
            return altim_output_line
    else:
        # Unrecognized packet type
        pass
Пример #9
0
    def changes(self, ids, dry_run, creation_time, change_num, fields, output, creator, match, **kw):
        request = self.service.history(ids)

        self.log('Getting changes matching the following options:')
        self.log(request.options)

        if creator is not None and self.service.suffix is not None:
            creator = list(map(self.service._resuffix, creator))

        if dry_run: return
        history = request.send()

        for i in ids:
            changes = next(history)

            if creator is not None:
                changes = (x for x in changes if x.creator in creator)
            if creation_time is not None:
                changes = (x for x in changes if x.date >= parsetime(creation_time[1]).replace(tzinfo=utc))
            if match is not None:
                changes = (event for event in changes
                           for change in event.changes
                           if self._match_change(change=change, fields=match))
            if change_num is not None:
                if len(change_num) == 1 and change_num[0] < 0:
                    changes = list(changes)[change_num[0]:]
                else:
                    changes = (x for x in changes if x.count in change_num)

            if fields and output is None:
                output = ' '.join(['{}' for x in fields])

            if output == '-':
                for change in changes:
                    for field in fields:
                        try:
                            value = getattr(change, field)
                        except AttributeError:
                            raise CliError('"{}" is not a valid bug field'.format(field))
                        if value is None:
                            continue
                        if isinstance(value, list):
                            print('\n'.join(map(str, value)))
                        else:
                            print(value)
            elif fields and output:
                for change in changes:
                    try:
                        values = [getattr(change, field) for field in fields]
                    except AttributeError:
                        raise CliError('"{}" is not a valid bug field'.format(field))
                    self._print_lines(output.format(*values))
            else:
                changes = list(str(x) for x in changes)
                if changes:
                    print(self._header('=', 'Bug: {}'.format(str(i))))
                    self._print_lines(changes)
Пример #10
0
 def parse(cls, data):
     l = []
     for a in data:
         l.append(cls(
             id=a['id'], creator=a['author']['name'],
             created=parsetime(a['created']), size=a['size'],
             filename=a['filename'], mimetype=a['mimeType'],
             url=a['content']))
     return tuple(l)
 def _get_datetime(self, section, parameter):
     """
     Read and parse time information as datetime.datetime
     Arguments:
         section: settings file section
         parameter: settings file parameter name
     Returns:
         datetime.datetime instance
     """
     return parsetime(self.read_parameter(section, parameter))
Пример #12
0
    def process(self):
        report = self.receive_message()

        # E-mail contents ...
        raw = utils.base64_decode(report.value("raw"))

        xmlstr = re.search("\<\?xml .*\<\/Infringement\>", raw, re.DOTALL)

        # FIXME: do we need this?
        if xmlstr is None:
            self.logger.error("Failed to extract XML part")
            self.acknowledge_message()
            return

        # XXX: clean up the Infringement tag
        xmlsan = re.sub('<Infringement.*>', '<Infringement>', xmlstr.group(0))

        try:
            root = ET.fromstring(xmlsan)
        except ParseError as e:
            self.logger.error("Parser error")
            self.acknowledge_message()
            return

        # Find and fill required harmonization fields
        ip = root.find(".//Source/IP_Address").text
        port = root.find(".//Source/Port").text
        proto = root.find(".//Source/Type").text

        tm = parsetime(root.find(".//Source/TimeStamp").text)
        timestamp = time.strftime("%Y-%m-%d %H:%M:%S UTC", tm.utctimetuple())

        event = Event(report) 
        event.add('time.source', timestamp, sanitize=True)
        event.add('source.ip', ip, sanitize=True)
        event.add('source.port', port, sanitize=True)
        event.add('classification.identifier', proto, sanitize=True)
        event.add('classification.type', 'piracy')

        items = {}
        content = root.find(".//Content")

        for n, item in enumerate(content):
            items[n] = { e.tag: e.text for e in item }

        event.add('extra', items)

        self.send_message(event)
        self.acknowledge_message()
Пример #13
0
def asdatetime(dt):
    """
    makes sure the inmput is a datetime.datetime object

    if it already is, it will be passed through.

    If not it will attempt to parse a string to make a datetime object.

    None will also be passed through silently
    """
    if dt is None:
        return dt
    if not isinstance(dt, datetime):
        # assume it's an iso string, or something that dateutils can parse.
        return parsetime(dt, ignoretz=True)
    else:
        return dt
Пример #14
0
    def __init__(self, comments=None, attachments=None, **kw):
        # TODO: map out which attrs to save instead of saving all
        for k, v in kw.items():
            if k == 'id':
                continue
            elif k == 'number':
                k = 'id'
            elif k == 'user':
                v = v['login']
            elif k in ('created_at', 'updated_at', 'closed_at') and v:
                v = parsetime(v)
            elif k == 'assignee' and v:
                v = v['login']
            setattr(self, k, v)

        self.attachments = attachments if attachments is not None else ()
        self.comments = comments if comments is not None else ()
Пример #15
0
def asdatetime(dt):
    """
    makes sure the inmput is a datetime.datetime object

    if it already is, it will be passed through.

    If not it will attempt to parse a string to make a datetime object.

    None will also be passed through silently
    """
    if dt is None:
        return dt
    if not isinstance(dt, datetime):
        # assume it's an iso string, or something that dateutils can parse.
        return parsetime(dt, ignoretz=True)
    else:
        return dt
Пример #16
0
    def __init__(self, repo=None, comments=None, attachments=None, **kw):
        for k, v in kw.items():
            # Prefix project ID to issue iid depending on the connection type.
            # The 'id' field unique across all issues is essentially useless
            # for us since most API calls only use project IDs and iids.
            # https://docs.gitlab.com/ee/api/README.html#id-vs-iid
            if k == 'id':
                continue
            elif k == 'iid':
                k = 'id'
                if repo is None:
                    v = f"{kw['project_id']}-{v}"
            elif k in ('created_at', 'updated_at', 'closed_at') and v:
                v = parsetime(v)
            elif k in ('author', 'assignee') and v:
                v = v['username']
            setattr(self, k, v)

        self.attachments = attachments if attachments is not None else ()
        self.comments = comments if comments is not None else ()
Пример #17
0
    def _keys(self):
        with self._service.web_session() as session:
            # get the apikeys page
            r = session.get(f'{self._userprefs_url}?tab=apikey')
            self._doc = lxml.html.fromstring(r.text)
            # verify API keys table still has the same id
            table = self._doc.xpath('//table[@id="email_prefs"]')
            if not table:
                raise RequestError('failed to extract API keys table')

            # extract API key info from table
            apikeys = self._doc.xpath('//table[@id="email_prefs"]/tr/td[1]/text()')
            descriptions = self._doc.xpath('//table[@id="email_prefs"]/tr/td[2]/input/@value')
            last_used = self._doc.xpath('//table[@id="email_prefs"]/tr/td[3]//text()')
            revoked = self._doc.xpath('//table[@id="email_prefs"]/tr/td[4]/input')
            revoked = [bool(getattr(x, 'checked', False)) for x in revoked]

            existing_keys = []
            for desc, key, used, revoked in zip(descriptions, apikeys, last_used, revoked):
                if used != 'never used':
                    used = parsetime(used)
                existing_keys.append(self._ApiKey(key, desc, used, revoked))

        return existing_keys
Пример #18
0
 def __new__(cls, timestr):
     return parsetime(timestr)
Пример #19
0
    def parse_search(self, **kw):
        params = {}
        options_log = []
        for k, v in ((k, v) for (k, v) in kw.iteritems() if v):
            if k in self.service.attributes:
                if k == 'creation_time' or k == 'last_change_time':
                    params[k] = v[1]
                    options_log.append('  {}: {} (since {} UTC)'.format(self.service.attributes[k], v[0], parsetime(v[1])))
                elif k == 'status':
                    params[k] = []
                    for status in v:
                        if status.lower() == 'all':
                            params[k].extend(['UNCONFIRMED', 'NEW', 'CONFIRMED', 'ASSIGNED', 'IN_PROGRESS', 'REOPENED', 'RESOLVED', 'VERIFIED'])
                        elif status.lower() == 'open':
                            params[k].extend(['UNCONFIRMED', 'NEW', 'CONFIRMED', 'ASSIGNED', 'IN_PROGRESS', 'REOPENED'])
                        elif status.lower() == 'closed':
                            params[k].extend(['RESOLVED', 'VERIFIED'])
                        else:
                            params[k].append(status)
                    options_log.append('  {}: {}'.format(self.service.attributes[k], ', '.join(params[k])))
                else:
                    params[k] = v
                    options_log.append('  {}: {}'.format(self.service.attributes[k], ', '.join(v)))
            elif k == 'terms':
                params['summary'] = v
                options_log.append('  {}: {}'.format('Summary', ', '.join(v)))
            elif k == 'order':
                if v == 'id':
                    params['order'] = 'Bug Number'
                elif v == 'importance':
                    params['order'] = 'Importance'
                elif v == 'assignee':
                    params['order'] = 'Assignee'
                elif v == 'modified':
                    params['order'] = 'Last Changed'
                options_log.append('  {}: {}'.format('Sort by', v))

        if not params.keys():
            raise RuntimeError('Please specify search terms or options')

        if not 'status' in params.keys():
            params['status'] = ['UNCONFIRMED', 'NEW', 'CONFIRMED', 'ASSIGNED', 'IN_PROGRESS', 'REOPENED']
            options_log.append('  {}: {}'.format('Status', ', '.join(params['status'])))

        if kw['fields'] is None:
            fields = ['id', 'assigned_to', 'summary']
        else:
            fields = kw['fields']

        params['include_fields'] = [','.join(fields)]
        return (options_log, fields, params)