def test_3(self): t1 = Timestamp({"id": 0}) t2 = Timestamp({"id": 1}) t2.merge(t1) self.assertNotEqual(t2.replicas["id"], 0)
def test_1(self): t1 = Timestamp({"id": 0}) t2 = Timestamp({"id": 1}) l = list(t1.compare(t2)) self.assertEqual(l[0], "id")
def range(self, start=None, end=None): """ """ expect_entries = True if end: start = Timestamp(start) end = Timestamp(end) url = '%s/range/%s/%s' % (self.source, start.compact(), end.compact()) elif start: start = Timestamp(start) url = '%s/range/%s' % (self.source, start.compact()) else: url = '%s/range' % (self.source) expect_entries = False #print url req = urllib2.urlopen(url) response = req.read() req.close() if expect_entries: result = json.loads(response) elist = [] for e in result['entries']: m = Moment(data=e['data'], tags=e['tags'], created=e['created'], path=e['path']) #m = Moment(data=e['data'], tags=e['tags'], created=e['created']) elist.append(m) return elist else: return response
def test_2(self): t1 = Timestamp() t2 = Timestamp({"id": 1}) t1.merge(t2) self.assertEqual(t1.replicas["id"], 1)
def validate(data, tags, created): tags = tags.split(' ') if re.search('\.', created): ts = Timestamp(created) else: ts = Timestamp(compact=created) return [data, tags, ts]
def test_parse(self): t = Timestamp(ms_int=10) self.assertEqual(t.ms_int(), 10) self.assertEqual(str(t), "10") t = Timestamp(ms_str="10") self.assertEqual(t.ms_int(), 10) self.assertEqual(str(t), "10") t2 = Timestamp(ms_int=10) self.assertEqual(t.ms_int(), t2.ms_int()) t3 = Timestamp(ms_str=str(t)) self.assertEqual(t.ms_int(), t3.ms_int())
def range(self, start=None, end=None): """ if no start *and* end specified return the time range for the entries in the currently loaded journal if only start return the entries in range for the accuracy of the start (e.g. 1 day) if start and end return all entries in the journal that fall in that range should accept a string, a datetime object, or a Timestamp object """ if start is None and end is None: dates = self._dates.keys() dates.sort() start = dates[0] end = dates[-1] #might have entries with no timestamp first: if start is None: start = dates[1] print start, end return Timerange(start=start, end=end) else: start = Timestamp(start) if end: end = Timestamp(end) else: relative = Timerange(start) end = relative.end times = self._dates.keys() times.sort() matches = [] for t in times: #not sure why we're using just time here #seems like we would want to use the date too? #pytime = Timestamp(t).time #sometimes t is None... those don't fit in a range. if t: pytime = Timestamp(t).datetime if (pytime >= start.datetime) and (pytime <= end.datetime): matches.extend(self._dates[t]) return matches
def test_comparison(self): t = Timestamp() self.assertTrue(t == t) self.assertTrue(not t != t) self.assertTrue(not t > t) self.assertTrue(not t < t) self.assertTrue(t <= t) self.assertTrue(t >= t) t2 = Timestamp() self.assertTrue(not t == t2) self.assertTrue(t != t2) self.assertTrue(t < t2) self.assertTrue(t2 > t) self.assertTrue(t <= t2) self.assertTrue(t2 >= t)
def testSelfDecrement( self ): ticks = self.ts1.getLocalTicks() + 30000 self.ts2 = Timestamp( ticks ) self.ts2 -= 30000 assertEquals( 'Ticks', self.ts2.getLocalTicks() , self.ts1.getLocalTicks() )
def __init__(self, kind=None, nick_full=None, chan=None, msg=None, time=None, serialized_str=None): if serialized_str is None: if time is None: self.time = Timestamp() else: self.time = time if kind not in ["wa", "irc"]: raise Exception("Message kind must be 'wa' or 'irc'") self.kind = kind self.nick_full = nick_full self.chan = chan self.msg = msg self.target = None try: split = msg.split(":", 1) if len(split) == 2: if not split[0].endswith(("http", "https", "image")): self.target = split[0] self.msg = split[1].lstrip() except IndexError: pass else: self.deserialize(serialized_str)
def get_times(self, box_file): '''adds each line from input file as an object to list''' del self.times[:] #clear the list of timestamp objects with open(box_file) as file: for line in file: time = Timestamp(line) self.times.append(time)
def test_basic(self): t = Timestamp() self.assertTrue(t is not None) ms = t.ms_int() self.assertTrue(ms is not None) st = str(t) self.assertTrue(st is not None)
def deserialize(self, string): fields = string.split(" @@@ ") time = Timestamp(ms_str=fields[0]) kind = fields[1] nick_full = fields[2] chan = fields[3] msg = fields[4] self.__init__(kind, nick_full, chan, msg, time, serialized_str=None)
def onGroup_MessageReceived(self, messageId, jid, author, messageContent, timestamp, wantsReceipt, pushName): message = Message(kind="wa", nick_full=author, chan=jid, msg=messageContent) message.time = Timestamp(ms_int = timestamp*1000) self.msg_handler(message) sendReceipts = True if wantsReceipt and sendReceipts: self.wait_connected() self.methodsInterface.call("message_ack", (jid, messageId))
def ptp_data(): t = timespec() utc_time = time.time() _ts = Timestamp(int(utc_time), int((utc_time % 1) * 1e9)) nanosec = _ts.to_nanosec() t.tv_sec = int(nanosec * 1e-9) t.tv_nsec = int(nanosec - (t.tv_sec * 1e9)) return t
def __init__(self): self._id = "replica-" + str(uuid.uuid4()) # This RM's ID # Represents updates currently reflected in the value. Contains one entry for every replica manager, and is # updated whenever an update operation is applied to the value. self.value_timestamp = Timestamp({self.id: 0}) # Represents those updates that have been accepted by the RM (placed in the RM's update log). Differs from the # value timestamp because not all updates in the log are stable. self._replica_timestamp = Timestamp({self.id: 0}) # The Pyro name server. Storing it locally removes the overhead of re-locating it every time this RM gets # replicas_with_updates. self.ns = Pyro4.locateNS() # This RM's update log, containing Records. self._update_log = Log()
def test_basic(self): m = Message("irc", "complete_nick!foobar", "channel_name", "message contents") t = Timestamp() self.assertTrue(m.time < t) self.assertEquals(m.nick_full, "complete_nick!foobar") self.assertEquals(m.nick_full, "complete_nick!foobar") self.assertEquals(m.get_nick(), "complete_nick") self.assertEquals(m.chan, "channel_name") self.assertEquals(m.msg, "message contents") self.assertTrue(m.target is None)
def __init__(self, data=u'', tags=[], created='', path=u'', now=False): self.data = data self.tags = Tags(tags) #could rename this to path potentially #self.source_file = None #*2011.06.21 09:59:10 #now wishing it was just self.source #maybe both should be available? self.path = path self.source = path #*2011.08.14 18:56:17 #path implies a source and destination #self.created = '' #*2011.07.06 08:24:43 #this may closely mimic the way Timestamp initializes #may want to leverage that #or just pass created and now values in to there if now: self.created = Timestamp() #elif type(created) == type(now): elif isinstance(created, datetime): self.created = Timestamp(created) #passed in an actual Timestamp here: elif isinstance(created, Timestamp): self.created = created elif isinstance(created, str) or isinstance(created, unicode): if created: self.created = Timestamp(created) else: self.created = created else: raise TypeError, "Unknown time format for moment created value: %s type: %s" % ( created, type(created))
def testSubtract( self ): ticks = self.ts1.getLocalTicks() + 30000 self.ts2 = Timestamp( ticks ) self.ts1 = self.ts2 - 30000 assertEquals( 'Ticks' , self.ts1.getLocalTicks(), ticks1 ) # Make sure ts2 was unchanged by the operation. assertEquals( 'Ticks' , self.ts2.getLocalTicks() , ticks )
def __init__(self): self.id = "frontend-" + str(uuid.uuid4()) # The ID of this FE # Reflects the version of the replicated data accessed by the FE; contains an entry for every RM. The FE sends # this with every query or update operation. When a RM returns a value as the result of a query operation, it # supplies a new vector timestamp, since the RMs may have been updated since the last operation. Each returned # timestamp is merged with the FE's previous timestamp to record the version the data observed by the client. self.prev = Timestamp() self.ns = Pyro4.locateNS() # The Pyro Name Server
def __init__(self, message_repository): self._logger = logging.getLogger(self.__class__.__name__) assert isinstance(message_repository, MessageRepository), type(message_repository) self.message_repository = message_repository self._trades = deque(maxlen=100) # List of trades with a limit of 100 self._bids = Side() self._asks = Side() self._last_message = None # The last message processed by this order book self._last_timestamp = Timestamp(0.0) # The time at which the last message was processed
def load_clean(self, line, sep='\x01'): items = self.split_line(line, sep) try: self.tid = long(items[0]) self.uid = long(items[1]) self.location = Location(float(items[2]), float(items[3])) self.timestamp = Timestamp(items[4]) self.timestamp.timestamp = long(float(items[5])) self.message = Message(items[7]) self.message.words = items[6].strip().split(' ') except: print 'Error when loading clean tweets' print line sys.exit(0)
def __init__(self, kind=None, nick_full=None, chan=None, msg=None, time=None, serialized_str=None): if serialized_str is None: if time is None: self.time = Timestamp() else: self.time = time if kind not in ["wa", "irc"]: raise Exception("Message kind must be 'wa' or 'irc'") self.kind = kind self.nick_full = nick_full self.chan = chan self.target, self.msg = split_nick(msg) else: self.deserialize(serialized_str)
def blog(): user_id = request.args.get('user') if user_id != None: owner = User.query.filter_by(id=user_id).first() posts = Post.query.filter_by(owner=owner).all() posts.sort(key=lambda post: post.timestamp, reverse=True) timestamps = [] for post in range(len(posts)): timestamps.append( Timestamp(posts[post].timestamp).timestampformatter()) return render_template('blog.html', title="Blogz", heading=owner.username + "'s Posts", posts=posts, timestamps=timestamps, individual_post=False) post_id = request.args.get('id') if post_id != None: post = Post.query.filter_by(id=post_id) timestamp = Timestamp(post[0].timestamp).timestampformatter() return render_template('blog.html', title=post[0].name, posts=post[0], timestamp=timestamp, individual_post=True) posts = Post.query.all() posts.sort(key=lambda post: post.timestamp, reverse=True) timestamps = [] for post in range(len(posts)): timestamps.append( Timestamp(posts[post].timestamp).timestampformatter()) return render_template('blog.html', title="Blogz", heading="Blogz", posts=posts, timestamps=timestamps, individual_post=False)
def __init__(self, n_days, n_steps_per_day, asset_config_path, agent_config_path): self.n_days = n_days self.n_steps_per_day = n_steps_per_day initial_shares_per_agent = 10 n_agents = AgentGenerator.n_agents(agent_config_path) print(n_agents) self.companies = CompanyGenerator.generate_companies(n_companies=10, n_sectors=3) self.tickers = [each.ticker for each in self.companies] # self.tickers = 'EQNR DNO AKER MHG NRS LSG'.split(' ') # self.companies = [] # for ticker in self.tickers: # comp = Company(ticker=ticker, # n_shares=initial_shares_per_agent * n_agents, # cash=100 * initial_shares_per_agent * n_agents, # yield_policy=0.10) # self.companies.append(comp) self.assets = [Stock(comp) for comp in self.companies] self.agents = AgentGenerator.generate(agent_config_path, self.assets, verbose=True) n_agents = len(self.agents) for agent in self.agents: for stock in self.assets: agent.portfolio.assets[stock] = initial_shares_per_agent self.cov_matrix = test.generate_corr_matrix(len(self.tickers), 2, 0.2) self.weights = np.ones(len(self.tickers)) self.weights = self.weights / self.weights.size self.book = Orderbook([], []) # self.assets = ... self.timestamp = Timestamp(0, 0) self.market_portfolio = Portfolio.empty_portfolio(self.assets) self.exchange = Exchange(self.agents, self.market_portfolio, n_steps_per_day) self.history = []
def log(text, timestamp=None): def path_leaf(path): head, tail = ntpath.split(path) return tail or ntpath.basename(head) frame, filename, line_number, function_name, lines, index = inspect.getouterframes( inspect.currentframe())[2] # log caller stack filename = path_leaf(filename) _, _, _, log_type, _, _ = inspect.getouterframes( inspect.currentframe())[1] # log function (info, error...) log_type = log_type[0].upper() * 2 # II for info, EE for error... if timestamp is None: timestamp = Timestamp() text = "%s %s %s:%s: %s" % (timestamp.to_human_str(), log_type, filename, line_number, text) print text logfile.write("\n%s" % text)
def dates(self): """ return a dictionary with: all dates as keys, and number of entries for each date as values """ ddict = {} for key in self._dates.keys(): #print "KEY:", key #key might be blank here (i.e. no timestamp) if key: ts = Timestamp(compact=key) #print ts ddict[ts.compact()] = len(self._dates[key]) else: ddict[key] = len(self._dates[key]) return ddict
def blog(): post_id = request.args.get('id') if post_id != None: post = Post.query.filter_by(id=post_id).all() timestamp = Timestamp(post[0].timestamp).timestampformatter() return render_template('blog.html', title=post[0].name, posts=post[0], timestamp=timestamp, individual_post=True) else: posts = Post.query.all() #for post in range(len(posts)): #posts[post].timestamp = Timestamp(posts[post].timestamp).totalstamp() #posts = SortPosts(posts).sortpostsbytimestampnormal() posts.sort(key=lambda post: post.timestamp, reverse=True) return render_template('blog.html', title="Build A Blog", posts=posts, individual_post=False)
def date(self, date_key=None): """ lookup date_key in self._dates date_key should be compact stamp """ if date_key: if isinstance(date_key, Timestamp): ts = date_key else: ts = Timestamp(compact=date_key) #print ts, type(ts) #print ts.accuracy if ts.accuracy and ts.accuracy != "second": rr = Timerange(ts) #get the timerange tr = rr.default() #print tr #print tr.start.datetime #print tr.end.datetime entries = self.range(tr.start, tr.end) #return {ts.compact():entries} return entries elif self._dates.has_key(ts.compact()): entries = self._dates[ts.compact()] #print "LEN ENTRIES: %s" % len(entries) #print entries #return { ts.compact():entries } return entries else: #return { ts.compact():[] } return [] else: #could also return self.dates() #return self.dates() #return { date_key:[] } return []
def date(self, date_key=''): values = {} params = urllib.urlencode(values) if isinstance(date_key, Timestamp): ts = date_key else: ts = Timestamp(compact=date_key) url = '%s/date/%s' % (self.source, ts.compact()) #print url #GET: req = urllib2.urlopen(url) json_raw = req.read() #print "json from server: %s" % json_raw req.close() result = json.loads(json_raw) elist = [] for e in result[ts.compact()]: m = Moment(data=e['data'], tags=e['tags'], created=e['created'], path=e['path']) elist.append(m) return {ts.compact(): elist}