def parsetime(text): ''' Parse an interval time string and return a (min,max) tuple. Args: text (str): A time interval string Returns: ((int,int)): A epoch millis epoch time string ''' mins, maxs = text.split('-', 1) minv = s_time.parse(mins) maxv = s_time.parse(maxs, base=minv) return minv, maxv
def _normPyStr(self, valu): valu = valu.strip().lower() if valu == 'now': return self._normPyInt(s_common.now()) # an unspecififed time in the future... if valu == '?': return self.futsize, {} # self contained relative time string # we need to be pretty sure this is meant for us, otherwise it might # just be a slightly messy time parse unitcheck = [u for u in s_time.timeunits.keys() if u in valu] if unitcheck and '-' in valu or '+' in valu: splitter = '+' if '-' in valu: splitter = '-' bgn, end = valu.split(splitter, 1) delt = s_time.delta(splitter + end) if bgn: bgn = self._normPyStr(bgn)[0] else: bgn = s_common.now() return self._normPyInt(delt + bgn) valu = s_time.parse(valu) return self._normPyInt(valu)
async def test_model_filebytes_pe(self): # test to make sure pe metadata is well formed async with self.getTestCore() as core: async with await core.snap() as snap: exp_time = '201801010233' exp_time_parse = s_time.parse(exp_time) props = { 'mime:pe:imphash': 'e' * 32, 'mime:pe:pdbpath': r'c:\this\is\my\pdbstring', 'mime:pe:exports:time': exp_time, 'mime:pe:exports:libname': 'ohgood', 'mime:pe:richhdr': 'f' * 64, } fnode = await snap.addNode('file:bytes', 'a' * 64, props=props) # pe props self.eq(fnode.get('mime:pe:imphash'), 'e' * 32) self.eq(fnode.get('mime:pe:pdbpath'), r'c:/this/is/my/pdbstring') self.eq(fnode.get('mime:pe:exports:time'), exp_time_parse) self.eq(fnode.get('mime:pe:exports:libname'), 'ohgood') self.eq(fnode.get('mime:pe:richhdr'), 'f' * 64) # pe resource rbnode = await snap.addNode('file:bytes', 'd' * 64) rnode = await snap.addNode( 'file:mime:pe:resource', (fnode.ndef[1], 2, 0x409, rbnode.ndef[1])) self.eq(rnode.get('langid'), 0x409) self.eq(rnode.get('type'), 2) self.eq(rnode.repr('langid'), 'en-US') self.eq(rnode.repr('type'), 'RT_BITMAP') # pe section s1node = await snap.addNode('file:mime:pe:section', (fnode.ndef[1], 'foo', 'b' * 64)) self.eq(s1node.get('name'), 'foo') self.eq(s1node.get('sha256'), 'b' * 64) # pe export enode = await snap.addNode('file:mime:pe:export', (fnode.ndef[1], 'myexport')) self.eq(enode.get('file'), fnode.ndef[1]) self.eq(enode.get('name'), 'myexport') # vsversion vskvnode = await snap.addNode('file:mime:pe:vsvers:keyval', ('foo', 'bar')) self.eq(vskvnode.get('name'), 'foo') self.eq(vskvnode.get('value'), 'bar') vsnode = await snap.addNode('file:mime:pe:vsvers:info', (fnode.ndef[1], vskvnode.ndef[1])) self.eq(vsnode.get('file'), fnode.ndef[1]) self.eq(vsnode.get('keyval'), vskvnode.ndef[1])
def test_time_parse(self): self.eq(s_time.parse('2050'), 2524608000000) self.eq(s_time.parse('205012'), 2553465600000) self.eq(s_time.parse('20501217'), 2554848000000) self.eq(s_time.parse('2050121703'), 2554858800000) self.eq(s_time.parse('205012170304'), 2554859040000) self.eq(s_time.parse('20501217030432'), 2554859072000) self.eq(s_time.parse('20501217030432101'), 2554859072101)
def test_time_parse(self): self.eq(s_time.parse('2050'), 2524608000000) self.eq(s_time.parse('205012'), 2553465600000) self.eq(s_time.parse('20501217'), 2554848000000) self.eq(s_time.parse('2050121703'), 2554858800000) self.eq(s_time.parse('205012170304'), 2554859040000) self.eq(s_time.parse('20501217030432'), 2554859072000) self.eq(s_time.parse('20501217030432101'), 2554859072101)
def parse_macro_filt(text, off=0, trim=True, mode='must'): _, off = nom(text, off, whites) # special + #tag (without prop) based filter syntax if nextchar(text, off, '#'): _, off = nom(text, off, whites) prop, off = nom(text, off, tagfilt, trim=True) parts = prop.split('@', 1) if len(parts) == 1: inst = ('filt', {'cmp': 'tag', 'mode': mode, 'valu': prop[1:]}) return inst, off prop, istr = parts if istr.find('-') == -1: tick = s_time.parse(istr) inst = ('filt', { 'cmp': 'ival', 'mode': mode, 'valu': (prop, tick) }) return inst, off ival = s_interval.parsetime(istr) inst = ('filt', { 'cmp': 'ivalival', 'mode': mode, 'valu': (prop, ival) }) return inst, off # check for non-macro syntax name, xoff = nom(text, off, varset) _, xoff = nom(text, xoff, whites) if nextchar(text, xoff, '('): inst, off = parse_oper(text, off) opfo = {'cmp': inst[0], 'mode': mode} opfo['args'] = inst[1].get('args', ()) opfo['kwlist'] = inst[1].get('kwlist', ()) return ('filt', opfo), off ques, off = parse_ques(text, off, trim=trim) ques['mode'] = mode return ('filt', ques), off
async def test_model_filebytes_pe(self): # test to make sure pe metadata is well formed async with self.getTestCore() as core: async with await core.snap() as snap: exp_time = '201801010233' exp_time_parse = s_time.parse(exp_time) props = { 'mime:pe:imphash': 'e' * 32, 'mime:pe:pdbpath': r'c:\this\is\my\pdbstring', 'mime:pe:exports:time': exp_time, 'mime:pe:exports:libname': 'ohgood', 'mime:pe:richhdr': 'f' * 64, } fnode = await snap.addNode('file:bytes', 'a' * 64, props=props) # pe props self.eq(fnode.get('mime:pe:imphash'), 'e' * 32) self.eq(fnode.get('mime:pe:pdbpath'), r'c:/this/is/my/pdbstring') self.eq(fnode.get('mime:pe:exports:time'), exp_time_parse) self.eq(fnode.get('mime:pe:exports:libname'), 'ohgood') self.eq(fnode.get('mime:pe:richhdr'), 'f' * 64) # pe resource rbnode = await snap.addNode('file:bytes', 'd' * 64) rnode = await snap.addNode('file:mime:pe:resource', (fnode.ndef[1], 2, 0x409, rbnode.ndef[1])) self.eq(rnode.get('langid'), 0x409) self.eq(rnode.get('type'), 2) self.eq(rnode.repr('langid'), 'en-US') self.eq(rnode.repr('type'), 'RT_BITMAP') # pe section s1node = await snap.addNode('file:mime:pe:section', (fnode.ndef[1], 'foo', 'b' * 64)) self.eq(s1node.get('name'), 'foo') self.eq(s1node.get('sha256'), 'b' * 64) # pe export enode = await snap.addNode('file:mime:pe:export', (fnode.ndef[1], 'myexport')) self.eq(enode.get('file'), fnode.ndef[1]) self.eq(enode.get('name'), 'myexport') # vsversion vskvnode = await snap.addNode('file:mime:pe:vsvers:keyval', ('foo', 'bar')) self.eq(vskvnode.get('name'), 'foo') self.eq(vskvnode.get('value'), 'bar') vsnode = await snap.addNode('file:mime:pe:vsvers:info', (fnode.ndef[1], vskvnode.ndef[1])) self.eq(vsnode.get('file'), fnode.ndef[1]) self.eq(vsnode.get('keyval'), vskvnode.ndef[1])
async def test_layer_nodeedits_created(self): async with self.getTestCore() as core: nodes = await core.nodes('[ test:int=1 :loc=us ]') created00 = nodes[0].get('.created') layr = core.getLayer() editlist00 = [nes async for nes in layr.iterLayerNodeEdits()] await core.nodes('test:int=1 | delnode') self.len(0, await core.nodes('test:int')) # Simulate a nexus edit list (no .created) nexslist00 = [(ne[0], ne[1], [e for e in ne[2] if e[1][0] != '.created']) for ne in editlist00] # meta used for .created await asyncio.sleep(0.01) await layr.storNodeEdits(nexslist00, {'time': created00}) nodes = await core.nodes('test:int') self.len(1, nodes) self.eq(created00, nodes[0].get('.created')) await core.nodes('test:int=1 | delnode') self.len(0, await core.nodes('test:int')) # If meta is not specified .created gets populated to now await asyncio.sleep(0.01) await layr.storNodeEdits(nexslist00, None) nodes = await core.nodes('test:int') self.len(1, nodes) created01 = nodes[0].get('.created') self.gt(created01, created00) # edits with the same node has the same .created await asyncio.sleep(0.01) nodes = await core.nodes('[ test:int=1 ]') self.eq(created01, nodes[0].get('.created')) nodes = await core.nodes('[ test:int=1 :loc=us +#foo]') self.eq(created01, nodes[0].get('.created')) await core.nodes('test:int=1 | delnode') self.len(0, await core.nodes('test:int')) # Tests for behavior of storing nodeedits directly prior to using meta (i.e. meta['time'] != .created) # .created is a MINTIME therefore earlier value wins, which is typically meta created02 = s_time.parse('1990-10-10 12:30') await layr.storNodeEdits(editlist00, {'time': created02}) nodes = await core.nodes('test:int') self.len(1, nodes) self.eq(created02, nodes[0].get('.created')) await core.nodes('test:int=1 | delnode') self.len(0, await core.nodes('test:int')) # meta could be after .created for manual store operations created03 = s_time.parse('2050-10-10 12:30') await layr.storNodeEdits(editlist00, {'time': created03}) nodes = await core.nodes('test:int') self.len(1, nodes) self.eq(created00, nodes[0].get('.created'))
async def runCmdOpts(self, opts): line = opts.get('line') if line is None: self.printf(self.__doc__) return core = self.getCmdItem() parseinfo = await s_syntax.getRemoteParseInfo(core) argv = s_syntax.Parser(parseinfo, line).stormcmd() # Currently, using an argparser is overkill for this command. Using for future extensibility (and help). try: opts = self._make_argparser().parse_args(argv) except s_exc.ParserExit: return query = None consumed_next = False tslist = [] # TODO: retrieve time from cortex in case of wrong cmdr time now = time.time() for pos, arg in enumerate(opts.args): try: if consumed_next: consumed_next = False continue if arg.startswith('{'): if query is not None: self.printf('Error: only a single query is allowed') return query = arg[1:-1] continue if arg.startswith('+'): if arg[-1].isdigit(): if pos == len(opts.args) - 1: self.printf('Time delta missing unit') return arg = f'{arg} {opts.args[pos + 1]}' consumed_next = True ts = now + s_time.delta(arg) / 1000.0 tslist.append(ts) continue ts = s_time.parse(arg) / 1000.0 tslist.append(ts) except (ValueError, s_exc.BadTypeValu): self.printf(f'Error: Trouble parsing "{arg}"') return if query is None: self.printf('Error: Missing query argument') return def _ts_to_reqdict(ts): dt = datetime.datetime.fromtimestamp(ts, datetime.timezone.utc) return { 'minute': dt.minute, 'hour': dt.hour, 'dayofmonth': dt.day, 'month': dt.month, 'year': dt.year } if not tslist: self.printf('Error: at least one requirement must be provided') return reqdicts = [_ts_to_reqdict(ts) for ts in tslist] iden = await core.addCronJob(query, reqdicts, None, None) self.printf(f'Created cron job {s_common.ehex(iden)}')
def _norm_str(self, text, oldval=None): return s_time.parse(text), {}
def _norm_str(self, text, oldval=None): if text.strip().lower() == 'now': return s_common.now(), {} return s_time.parse(text), {}
def parse_time(text, off): tstr, off = nom(text, off, timeset) valu = s_time.parse(tstr) return valu, off
async def runCmdOpts(self, opts): line = opts.get('line') if line is None: self.printf(self.__doc__) return core = self.getCmdItem() argv = s_syntax.Parser(line).stormcmd() # Currently, using an argparser is overkill for this command. Using for future extensibility (and help). try: opts = self._make_argparser().parse_args(argv) except s_exc.ParserExit: return query = None consumed_next = False tslist = [] # TODO: retrieve time from cortex in case of wrong cmdr time now = time.time() for pos, arg in enumerate(opts.args): try: if consumed_next: consumed_next = False continue if arg.startswith('{'): if query is not None: self.printf('Error: only a single query is allowed') return query = arg[1:-1] continue if arg.startswith('+'): if arg[-1].isdigit(): if pos == len(opts.args) - 1: self.printf('Time delta missing unit') return arg = f'{arg} {opts.args[pos + 1]}' consumed_next = True ts = now + s_time.delta(arg) / 1000.0 tslist.append(ts) continue ts = s_time.parse(arg) / 1000.0 tslist.append(ts) except (ValueError, s_exc.BadTypeValu): self.printf(f'Error: Trouble parsing "{arg}"') return if query is None: self.printf('Error: Missing query argument') return def _ts_to_reqdict(ts): dt = datetime.datetime.fromtimestamp(ts, datetime.timezone.utc) return { 'minute': dt.minute, 'hour': dt.hour, 'dayofmonth': dt.day, 'month': dt.month, 'year': dt.year } if not tslist: self.printf('Error: at least one requirement must be provided') return reqdicts = [_ts_to_reqdict(ts) for ts in tslist] iden = await core.addCronJob(query, reqdicts, None, None) self.printf(f'Created cron job {iden}')
def test_time_parse_tz(self): # explicit iso8601 self.eq(s_time.parse('2020-07-07T16:29:53Z'), 1594139393000) self.eq(s_time.parse('2020-07-07T16:29:53.234Z'), 1594139393234) self.eq(s_time.parse('2020-07-07T16:29:53.234567Z'), 1594139393234) self.eq(s_time.parse('2020-07-07T16:29:53+00:00'), 1594139393000) self.eq(s_time.parse('2020-07-07T16:29:53-04:00'), 1594153793000) self.eq(s_time.parse('2020-07-07T16:29:53-04:30'), 1594155593000) self.eq(s_time.parse('2020-07-07T16:29:53+02:00'), 1594132193000) self.eq(s_time.parse('2020-07-07T16:29:53.234+02:00'), 1594132193234) self.eq(s_time.parse('2020-07-07T16:29:53.234567+02:00'), 1594132193234) self.eq(s_time.parse('2020-07-07T16:29:53.234567+10:00'), 1594103393234) # shorthand utc = s_time.parse('2020-07') self.eq(s_time.parse('2020-07-04:00'), utc + 4 * s_time.onehour) utc = s_time.parse('2020-07-07') self.eq(s_time.parse('2020-07-07 +4:00'), utc - 4 * s_time.onehour) self.eq(s_time.parse('2020-07-07 +04:00'), utc - 4 * s_time.onehour) utc = s_time.parse('2020-07-07 16:29') self.eq(s_time.parse('2020-07-07 16:29-06:00'), utc + 6 * s_time.onehour) self.eq(s_time.parse('20200707162953+00:00'), 1594139393000) self.eq(s_time.parse('20200707162953-04:00'), 1594153793000) # A malformed timezone can still be parsed incorrectly self.eq(s_time.parse('202007+04'), s_time.parse('20200704')) self.eq(s_time.parse('20200707162953+04'), 1594139393040) self.eq(s_time.parse('20200707162953+423'), 1594139393423) # invalid self.raises(s_exc.BadTypeValu, s_time.parse, '2020-07-07T16:29:53+36:00')
def test_time_parse(self): self.eq(s_time.parse('2050'), 2524608000000) self.eq(s_time.parse('205012'), 2553465600000) self.eq(s_time.parse('20501217'), 2554848000000) self.eq(s_time.parse('2050121703'), 2554858800000) self.eq(s_time.parse('205012170304'), 2554859040000) self.eq(s_time.parse('20501217030432'), 2554859072000) self.eq(s_time.parse('20501217030432101'), 2554859072101) self.eq(s_time.parse('205012170304321015'), 2554859072101) self.eq(s_time.parse('20501217030432101567'), 2554859072101) self.raises(s_exc.BadTypeValu, s_time.parse, '2050121703043210156789') # malformed times that can still be parsed self.eq(s_time.parse('2020 jun 10 12:14:34'), s_time.parse('2020-10-12 14:34'))