def timestamp(self): """ Non-static timestamp that changes based on the user's defined timezone """ from util import Clock clock = Clock() return (clock.from_time(self.time).strftime('%Y-%m-%d %H:%M:%S'))
def timestamp(self): """ Non-static timestamp that changes based on the user's defined timezone """ from util import Clock clock = Clock() return (clock.from_time(self.time) .strftime('%Y-%m-%d %H:%M:%S'))
def can_process_event(event, scenario=False): # check if we should wait until daytime to process clock = Clock() sc = SC() if (clock.nighttime() is True) and (scenario is False): if event.magnitude < sc.night_eq_mag_cutoff: return False return True
def __init__(self, gui): """Create a new pymodoro object, initialising variables""" self.num_pomodoros = 0 self.num_breaks = 0 self.num_big_breaks = 0 self.work_clock = Clock(self) self.break_clock = Clock(self) self.big_break_clock = Clock(self) self.is_working = False self.is_break = False self.is_big_break = False self.gui = gui self.current_clock = self.work_clock
def main(config=None, wifi=None, console=None): # Ensure messages from Boot.py are visible for at least 1 sec time.sleep(5) if config is None: config = ujson.loads('{"SECRET_SALT": "", "AUTH_SERVER_IP": ""}') ip = wifi.ip if not ip == config['AUTH_SERVER_IP']: console.error(['Invalid', 'Wifi or IP', ip, config['AUTH_SERVER_IP']]) return # Socket listen to my current IP on port 80 addr = socket.getaddrinfo(ip, 80)[0][-1] s = socket.socket() s.bind(addr) s.listen(1) console.log(['Init Auth Server', addr]) # Ensure messages is displayed for at least 2 sec time.sleep(2) while True: cl, addr = s.accept() current_time = Clock.get_time() client_ip = addr[0] console.log([current_time, 'Request from', 'Client IP', client_ip]) # TODO to be continued if client_ip == config['MAIN_SERVER_IP']: # Compare password to the list return True if authorize other wise return False cl.send('HTTP/1.0 200 OK\r\nContent-Type: text/plain\r\n\r\n') cl.send('True') cl.close() else: # Add password_hash to the list use the client_ip_hash to map cl.send('HTTP/1.0 200 OK\r\nContent-Type: text/plain\r\n\r\n') cl.send('Password added to the list') cl.close()
class Pymodoro: def __init__(self, gui): """Create a new pymodoro object, initialising variables""" self.num_pomodoros = 0 self.num_breaks = 0 self.num_big_breaks = 0 self.work_clock = Clock(self) self.break_clock = Clock(self) self.big_break_clock = Clock(self) self.is_working = False self.is_break = False self.is_big_break = False self.gui = gui self.current_clock = self.work_clock def stop(self, widget=None): """Stop all timers and reset""" self.is_working = False self.is_break = False self.is_big_break = False self.work_clock.reset(0) self.break_clock.reset(0) self.big_break_clock.reset(0) def update_clocks(self): """Make the clock tick""" if self.is_working: self.work_clock.tick() elif self.is_break: self.break_clock.tick() elif self.is_big_break: self.big_break_clock.tick() return True def start_pomodoro(self, widget=None): """docstring for start_pomodoro""" self.is_working = True self.work_clock.reset(1500) self.current_clock = self.work_clock def start_break(self): """docstring for start_break""" self.is_break = True self.break_clock.reset(300) self.current_clock = self.break_clock def start_extended_break(self): """docstring for start_extended_break""" self.is_big_break = True self.big_break_clock.reset(1800) self.current_clock = self.big_break_clock def clock_expired(self): """docstring for clock_expired""" if self.is_working: self.is_working = False self.gui.notify_expiry("Pomodoro complete", "Time for a break!") self.num_pomodoros += 1 print self.num_pomodoros if (self.num_pomodoros % 4) != 0: self.start_break() else: self.start_extended_break() elif self.is_break: self.is_break = False self.gui.notify_expiry("Break over", "Time to get back to work!") self.start_pomodoro() elif self.is_big_break: self.is_big_break = False self.gui.notify_expiry("Extended break over", "Time to get back to work!") self.start_pomodoro() return True
def process_events(events=None, session=None, scenario=False): ''' Process or reprocess events passed into the function. Will send NEW_EVENT and UPDATE emails Args: new_events (list): List of Event objects to process session (Session()): SQLAlchemy session Returns: dict: a dictionary that contains information about the function run :: data = {'status': either 'finished' or 'failed', 'message': message to be returned to the UI, 'log': message to be added to ShakeCast log and should contain info on error} ''' clock = Clock() sc = SC() groups_affected = [] all_groups_affected = set() for event in events: # check if we should wait until daytime to process if (clock.nighttime() is True) and (scenario is False): if event.magnitude < sc.night_eq_mag_cutoff: continue if scenario is True: in_region = (session.query(Group) .filter(Group.point_inside(event)) .all()) groups_affected = [group for group in in_region if group.gets_notification('new_event', scenario=True)] all_groups_affected.update(groups_affected) elif event.event_id != 'heartbeat': groups_affected = (session.query(Group) .filter(Group.point_inside(event)) .all()) filtered_groups = [group for group in groups_affected if group.gets_notification('new_event')] all_groups_affected.update(filtered_groups) else: all_groups = session.query(Group).all() groups_affected = [group for group in all_groups if group.gets_notification('new_event', heartbeat=True)] all_groups_affected.update(groups_affected) if not groups_affected: event.status = 'processed - no groups' session.commit() else: event.status = 'processing_started' for group in all_groups_affected: # check new_event magnitude to make sure the group wants a # notificaiton event_spec = group.get_new_event_spec(scenario=scenario) if (event_spec is None or event_spec.minimum_magnitude > event.magnitude): continue notification = Notification(group=group, event=event, notification_type='NEW_EVENT', status='created') session.add(notification) session.commit() if all_groups_affected: for group in all_groups_affected: # get new notifications nots = (session.query(Notification) .filter(Notification.notification_type == 'NEW_EVENT') .filter(Notification.status == 'created') .filter(Notification.group_id == group.shakecast_id) .all()) last_day = time.time() - 60 * 60 * 5 filter_nots = filter(lambda x: x.event is not None and (x.event.time > last_day or scenario is True), nots) if len(filter_nots) > 0: new_event_notification(notifications=filter_nots, scenario=scenario) processed_events = [n.event for n in filter_nots] for e in processed_events: e.status = 'processed' if scenario is True: for event in events: event.status = 'scenario'
def process_shakemaps(shakemaps=None, session=None, scenario=False): ''' Process or reprocess the shakemaps passed into the function Args: shakemaps (list): List of ShakeMap objects to process session (Session()): SQLAlchemy session scenario (boolean): True for manually triggered events Returns: dict: a dictionary that contains information about the function run :: data = {'status': either 'finished' or 'failed', 'message': message to be returned to the UI, 'log': message to be added to ShakeCast log and should contain info on error} ''' clock = Clock() sc = SC() for shakemap in shakemaps: # check if we should wait until daytime to process if (clock.nighttime()) is True and scenario is False: if shakemap.event.magnitude < sc.night_eq_mag_cutoff: continue shakemap.status = 'processing_started' # open the grid.xml file and find groups affected by event grid = create_grid(shakemap) if scenario is True: in_region = (session.query(Group) .filter(Group.in_grid(grid)) .all()) groups_affected = [group for group in in_region if group.gets_notification('damage', scenario=True)] else: in_region = (session.query(Group) .filter(Group.in_grid(grid)) .all()) groups_affected = [group for group in in_region if group.gets_notification('damage')] if not groups_affected: shakemap.status = 'processed - no groups' session.commit() continue # send out new events and create inspection notifications for group in groups_affected: notification = Notification(group=group, shakemap=shakemap, event=shakemap.event, notification_type='DAMAGE', status='created') session.add(notification) session.commit() notifications = (session.query(Notification) .filter(Notification.shakemap == shakemap) .filter(Notification.notification_type == 'DAMAGE') .filter(Notification.status != 'sent') .all()) # get a set of all affected facilities affected_facilities = set(itertools .chain .from_iterable( [(session.query(Facility) .filter(Facility.in_grid(grid)) .filter(Facility.groups .any(Group.shakecast_id == group.shakecast_id)) .all()) for g in groups_affected])) geoJSON = {'type': 'FeatureCollection', 'features': [None] * len(affected_facilities), 'properties': {}} if affected_facilities: fac_shaking_lst = [None] * len(affected_facilities) f_count = 0 for facility in affected_facilities: fac_shaking = make_inspection_priority(facility=facility, shakemap=shakemap, grid=grid) if fac_shaking is False: continue fac_shaking_lst[f_count] = FacilityShaking(**fac_shaking) geoJSON['features'][f_count] = makeImpactGeoJSONDict(facility, fac_shaking) f_count += 1 # Remove all old shaking and add all fac_shaking_lst shakemap.facility_shaking = [] session.commit() session.bulk_save_objects(fac_shaking_lst) session.commit() geoJSON['properties']['impact-summary'] = get_event_impact(shakemap) saveImpactGeoJson(shakemap, geoJSON) # get and attach pdf pdf.generate_impact_pdf(shakemap, save=True) shakemap.status = 'processed' else: shakemap.status = 'processed - no facs' if scenario is True: shakemap.status = 'scenario' if notifications: # send inspection notifications for the shaking levels we # just computed for n in notifications: inspection_notification(notification=n, scenario=scenario, session=session) session.commit()
led = Pin(2, Pin.OUT) led.off() i2c = I2C(-1, Pin(5), Pin(4)) console = Console(i2c) # Clear Serial Monitor console.log(['', '', '']) console.log('Hello from boot!') console.log(' Init Wifi', console.y) wifi = Wifi() wifi.connect(config['SSID'], config['PASSWORD']) console.log('IP: ' + wifi.ip, console.y) IS_MAIN_SERVER = config['MAIN_SERVER_IP'] == wifi.ip console.log('Is main: ' + str(IS_MAIN_SERVER), console.y) console.log(' Init Clock', console.y) Clock.fetch_time() Clock.set_time() current_time = Clock.get_time() console.log(current_time, console.y) bme = BME(i2c) led.on() gc.collect()
def main(config=None, wifi=None, console=None, bme=None): # Ensure messages from Boot.py are visible for at least 1 sec time.sleep(5) if config is None: config = ujson.loads('{"SECRET_SALT": "", "MAIN_SERVER_IP": ""}') ip = wifi.ip if not ip == config['MAIN_SERVER_IP']: console.error(['Invalid', 'Wifi or IP', ip, config['MAIN_SERVER_IP']]) return # Socket listen to my current IP on port 80 addr = socket.getaddrinfo(ip, 80)[0][-1] s = socket.socket() s.bind(addr) s.listen(1) console.log(['Init Main Server', addr]) # Ensure messages is displayed for at least 2 sec time.sleep(2) while True: cl, addr = s.accept() data = cl.recv(4096) data = data.decode('utf-8') data = data.split('\n') data = data[0] password = data[0].split('/') # Validate password try: client_ip = addr[0] # client_ip_hash = Hash.encrypt(client_ip) # password_hash = Hash.encrypt(password) urequests.get('http://' + config['AUTH_SERVER_IP'] + '/' + client_ip) # if not response.text == 'True': # cl.send('HTTP/1.0 401 OK\r\nContent-Type: text/plain\r\n\r\n') # cl.send('401 Unauthorized') # cl.close() # continue except: cl.send('HTTP/1.0 401 OK\r\nContent-Type: text/plain\r\n\r\n') cl.send('401 Unauthorized') cl.close() continue current_time = Clock.get_time() temperature = bme.temperature pressure = bme.pressure humidity = bme.humidity console.log([ current_time, 'Temperature', temperature, 'Pressure', pressure, 'Humidity: ' + humidity ]) response = ujson.dumps({ 'client_ip': client_ip, 'password': password, 'current_time': current_time, 'temperature': temperature, 'pressure': pressure, 'humidity': humidity, }) cl.send('HTTP/1.0 200 OK\r\nContent-Type: text/plain\r\n\r\n') cl.send(response) cl.close()
# python psync.py source dest if __name__ == "__main__": source_root, dest_root = sys.argv[1:] # TODO: What else can we use for peerids when there are no peers? source_peerid = source_root dest_peerid = dest_root # TODO: implement reading .psync. source_groupids = Groupids({"": source_root}) dest_groupids = Groupids({"": dest_root}) conf = Config() clock = Clock() slog = StatusLog(clock) fs = FileSystem(slog) source_db_path = os.path.join(source_root, conf.db_path) dest_db_path = os.path.join(dest_root, conf.db_path) revisions_root = os.path.join(dest_root, conf.revisions_path) fs.create_parent_dirs(source_db_path) fs.create_parent_dirs(dest_db_path) with sqlite3.connect(source_db_path) as source_db: with sqlite3.connect(dest_db_path) as dest_db: source_history_store = HistoryStore(SqlDb(source_db), slog) dest_history_store = HistoryStore(SqlDb(dest_db), slog) revisions = RevisionStore(fs, revisions_root) merge_log = MergeLog(SqlDb(source_db), clock)