def update_monster_move(self, monster, update_dict): # Save old index oi = self.index(monster.x, monster.y) # Try to move if self.update_movable_entity(monster): ev = [] # Check for collision with explosion expl = self.explosion_at(monster.x, monster.y) if expl: ev.append(Event(Event.BOMB_HIT_MONSTER, expl.owner, monster)) if update_dict: # Remove monster self.monsters[oi].remove(monster) return ev # Otherwise, the monster can walk safely if update_dict: # Remove monster from previous position self.monsters[oi].remove(monster) # Put monster in new position ni = self.index(monster.x, monster.y) np = self.monsters.get(ni, []) np.append(monster) self.monsters[ni] = np # Check for collisions with characters characters = self.characters_at(monster.x, monster.y) if characters: for c in characters: ev.append( Event(Event.CHARACTER_KILLED_BY_MONSTER, c, monster)) return ev return []
def boot(self, view: View, event: Event): view.share({ 'show_if': self._show_if, 'env': os.getenv, 'DEBUG': application.DEBUG }) event.subscribe(UserSignedUp)
def post(self): self.response.headers.add_header("Access-Control-Allow-Origin", "*") body = json.loads(self.request.body) eventId = body["eventId"] Event.removeEvent(eventId) self.response.write("Clean")
def update_character_move(self, character, update_dict): # Save old index oi = self.index(character.x, character.y) # Try to move if self.update_movable_entity(character): ev = [] # Check for collision with explosion expl = self.explosion_at(character.x, character.y) if expl: ev.append( Event(Event.BOMB_HIT_CHARACTER, expl.owner, character)) if update_dict: # Remove character self.characters[oi].remove(character) return ev # Otherwise, the character can walk if update_dict: # Remove character from previous position self.characters[oi].remove(character) # Put character in new position ni = self.index(character.x, character.y) np = self.characters.get(ni, []) np.append(character) self.characters[ni] = np # Check for collision with monster monsters = self.monsters_at(character.x, character.y) if monsters: return [ Event(Event.CHARACTER_KILLED_BY_MONSTER, character, monsters[0]) ] # Check for exit cell if self.exitcell == (character.x, character.y): return [Event(Event.CHARACTER_FOUND_EXIT, character)] return []
def onboard_user(): session_token = request.form["session_token"] db = database.Database() user_id = db.validate_session(session_token) if user_id: user_ctx = UserContext(user_id, db, current_app.logger) auth = user_ctx.check_acl("onboard-users") if auth: acl = json.loads(request.form["acl"]) # TODO make sure the user is not issuing permissions they don't have themselves full_name = request.form["full_name"] email = request.form["email_address"] pw_hash = request.form["pw_hash"] ip_addr = request.access_route[-1] new_user_id = db.onboard_user(full_name, email, pw_hash, json.dumps(acl), ip_addr) if new_user_id: new_user_context = UserContext(new_user_id, db, current_app.logger) new_user_event = Event("Users Create User", db, current_app.logger) new_user_event.log_event(user_id, {"user_id": new_user_id, "email": email, "full_name": full_name, "acl": json.dumps(acl)}) db.update_user_permissions(new_user_id, json.dumps(acl)) message = "Successfully created new user " + new_user_context.user_info["email_address"] return render_template("admin/admin_confirmation.jinja2", session_token=session_token, confirmation_type="onboarded_new_user", confirmation_title="Created New User", confirmation_message=message, default_choice="Create Another User", choices=["Administration"]) else: abort(500) abort(403)
def __init__(self, uuid, dir_path, index_name='.syncall_index', load_index=True, temp_dir_name='.syncall_temp', create_temp_dir=False): self.logger = logging.getLogger(__name__) self.uuid = uuid self.dir_path = dir_path self.index_name = index_name self.index_path = os.path.join(self.dir_path, self.index_name) self.temp_dir = os.path.join(self.dir_path, temp_dir_name) self.last_update = datetime.now().timestamp() if create_temp_dir and not os.path.exists(self.temp_dir): os.mkdir(self.temp_dir) self.fs_access_lock = threading.Lock() self.temp_dir_lock = threading.Lock() self.temp_files = set() self.transfer_manager = syncall.TransferManager(self) self.index_updated = Event() # Contains tuple(uuid, file_name, file_index) as data self.transfer_finalized = Event() if load_index: self.load_index() else: self._index = dict()
def test_divide_into_blocks(): event = Event(start_time=datetime(2020, 4, 1, 11, 0), end_time=datetime(2020, 4, 1, 15, 0), type='schedulable') blocks = list(divide_into_blocks(event)) print('\nblocks:') for block in blocks: print(' ', block) assert len(blocks) == 4 assert blocks == [ Event(datetime(2020, 4, 1, 11, 0), datetime(2020, 4, 1, 12, 0), name='Available', type='open slot'), Event(datetime(2020, 4, 1, 12, 0), datetime(2020, 4, 1, 13, 0), name='Available', type='open slot'), Event(datetime(2020, 4, 1, 13, 0), datetime(2020, 4, 1, 14, 0), name='Available', type='open slot'), Event(datetime(2020, 4, 1, 14, 0), datetime(2020, 4, 1, 15, 0), name='Available', type='open slot') ]
def review_progress(self, event): current_position = self.sensors.position if self._arrived(current_position, self.destination_waypoint): self.logger.info('Navigator, arrived at {:+f},{:+f}'.format( self.destination_waypoint.latitude, self.destination_waypoint.longitude)) self.exchange.publish( Event(EventName.arrived, self.destination_waypoint)) else: time_to_next_review = self._time_to_review( current_position, self.destination_waypoint) bearing = self.globe.bearing(current_position, self.destination_waypoint.position) if isNaN(bearing): self.logger.warn( 'Navigator, no information from sensors, continuing on current course' ) else: self.logger.info( 'Navigator, steering to {:+f},{:+f}, bearing {:5.1f}, distance {:.1f}m, review after {:.0f}s' .format( self.destination_waypoint.latitude, self.destination_waypoint.longitude, bearing, self._distance(current_position, self.destination_waypoint), time_to_next_review)) self.exchange.publish( Event(EventName.set_course, heading=bearing)) self.exchange.publish( Event(EventName.after, seconds=time_to_next_review, next_event=Event(EventName.navigate_review)))
def collect_events(): events = [] step = None for detection in read_file(): if step is None: step = detection.pos event = Event(detection.pos) event.detections.append(detection) continue # если между детекциями меньше 5 секунд (20 * 25fps)- это детекции одной сцены if detection.pos - step <= 125: event.detections.append(detection) else: events.append(event) event = Event(detection.pos) event.detections.append(detection) step = detection.pos for event in events: for det in event.detections: event.detections_map.setdefault(det.pos, []).append(det) # counter = 0 # for event in events: # counter += event.length return events
def post(self): self.response.headers.add_header("Access-Control-Allow-Origin", "*") body = json.loads(self.request.body) userId = body["userId"] body.pop("userId", None) Event.createEvent(userId, body) self.response.write('OK')
def __init__( self, simulation_manager: SimulationStateManager, entity_component_manager: EntityComponentManager, status_effect_manager: StatusEffectManager, being_factory, human_behavior_factory, ai_behavior_factory, being_fsm_factory, being_component_factory, ): super(CharacterCreator, self).__init__() # TODO: configuration for different aspects of the character creator. # e.g. # - rules on what skills, advantages, items, etc are allowed. # - contextual rules on what selections can be made together (exclusivity, etc) # - costs for different stats, skills, adv/dis, etc. self.simulation_manager = simulation_manager self.entity_component_manager = entity_component_manager self.status_effect_manager = status_effect_manager self.being_factory = being_factory self.human_behavior_factory = human_behavior_factory self.ai_behavior_factory = ai_behavior_factory self.being_fsm_factory = being_fsm_factory self.being_component_factory = being_component_factory Event.register("generate_random_character", self, self.generate_random_character)
def change_user_acl(): session_token = request.form['session_token'] acl_data = request.form['acl'] db = database.Database() auth_user_id = db.validate_session(session_token) if auth_user_id: auth_user_ctx = UserContext(auth_user_id, db, current_app.logger) if auth_user_ctx.check_acl("change-permissions"): user_id = int(request.form['user_id']) user_ctx = UserContext(user_id, db, current_app.logger) user_event = Event("Users Changed Permissions", db, current_app.logger) event_data = {"email_address": user_ctx.user_info["email_address"], "user_id": user_id, "new_acl_data": acl_data} user_event.log_event(auth_user_id, event_data) result = db.update_user_permissions(user_id, json.loads(acl_data)) if result: message = "Access Control List updated for " + user_ctx.user_info['email_address'] return render_template("admin/admin_confirmation.jinja2", session_token=session_token, title="ACL Updated", confirmation_title="ACL Updated", confirmation_message=message, confirmation_type="acl_updated", default_choice="OK") else: abort(500) abort(403)
def _mouseMoveEvent(self,event): if self.widgetsAt(event.x, event.y) is not None: # if there are widgets at the cursor for widget in self.widgetsAt(event.x,event.y): event.x -= widget.x() # adjust coordinates to widget top left event.y -= widget.y() if not Event.shouldPropagate(widget._mouseMoveEvent(event)): event.x += widget.x() # reset coordinates event.y += widget.y() return False # tell the child the event happened and stop propagation if asked to event.x += widget.x() # reset coordinates event.y += widget.y() if self.onMouseMovedPropagateRule == Event.SingleDeny: if not Event.shouldPropagate(self.mouseMoveEvent(event)) or \ any(not Event.shouldPropagate(r(event)) for r in self._onMouseMovedFuncs): return False return True if self.onMouseMovedPropagateRule == Event.SingleAccept: if Event.shouldPropagate(self.mouseMoveEvent(event)) or \ any(Event.shouldPropagate(r(event)) is True for r in self._onMouseMovedFuncs): return True return False
def __init__(self): asyncore.dispatcher.__init__(self, map=protocol_map) self.create_socket(socket.AF_INET, socket.SOCK_STREAM) self.recv_buffer = "" self.send_buffer = "" self.on_connect = Event() self.on_packet = Event()
def on_mousedown(self, evt): x, y = self.region.offset width, height = self.size if evt.x == x + width - 1 and evt.y == y + height - 1: fire_event( self.window, Event("window_resize_start", window=self.window, direction="both", x=evt.x, y=evt.y)) elif evt.x == x + width - 1: fire_event( self.window, Event("window_resize_start", window=self.window, direction="x", x=evt.x)) elif evt.y == y + height - 1: fire_event( self.window, Event("window_resize_start", window=self.window, direction="y", y=evt.y))
def _handle_yomi(self): for i in [0, 1]: formation = self.formations[i] cost = rps.formation_info(str(formation), "morale_cost")[str(self.armies[i].order)] if cost: # self.armies[i].bet_morale_change = cost self.armies[i].commitment_bonus = False Event( self, "order_change", Context({ "ctarget_army": self.armies[i], "morale_cost": cost })).activate() else: # self.armies[i].bet_morale_change = 0 self.armies[i].commitment_bonus = True self.yomi_winner_id = rps.orders_to_winning_armyid( self.orders) # -1 if None self.yomis = (rps.beats(self.orders[0], self.orders[1]), rps.beats(self.orders[1], self.orders[0])) self.yomi_list.append(self.yomis) if self.yomi_winner_id in [0, 1]: Event(self, "order_yomi_win", Context({})).activate(self.armies[self.yomi_winner_id]) Event(self, "order_yomi_completed", Context({})).activate(self.yomi_winner_id)
def __init__(self, gps, windsensor, compass, time_fn, exchange, logger, config): self.gps = gps self.exchange = exchange self.windsensor = windsensor self.compass = compass self.logger = logger position = gps.position self._position = Position(position.latitude, position.longitude) self.config = config self._wind_relative_avg = 0.0 self._compass_avg = 0.0 self.system_time = time_fn self._compass_smoothed = 0 self._previous_compass_smoothed = 0 self._previous_time = self.system_time() - 1 self._rate_of_turn = 0 self._rate_of_turn_average = 0 exchange.subscribe(EventName.update_averages, self.update_averages) exchange.subscribe(EventName.log_position, self.log_values) exchange.subscribe(EventName.tick, self.update_compass_smoothed) exchange.publish( Event(EventName.every, seconds=config['log interval'], next_event=Event(EventName.log_position))) exchange.publish( Event(EventName.every, seconds=config['update averages interval'], next_event=Event(EventName.update_averages)))
def _get_formations(self): drawn_cards, scouted_cards = self._draw_and_scout() Event(self, "scout_completed", Context({})).activate(drawn_cards, scouted_cards) for i in [0, 1]: self.armies[i].formation = self.armies[ i].intelligence.get_formation(self) Event(self, "formation_completed", Context({})).activate()
def test_should_not_fire_event_on_tick_if_its_not_yet_time(self): self.listen(EventName.start) mock_time = Mock(side_effect=[145.6, 146.4]) timeshift = TimeShift(self.exchange,mock_time) self.exchange.publish(Event(EventName.after,seconds = 3,next_event=Event(EventName.start))) self.exchange.publish(Event(EventName.tick)) self.assertEqual(self.event_count(EventName.start),0,"start should not have been called")
def rotate_boat(self, rudder_effect, jitter): self.compass.bearing = to_360(self.compass.bearing + randint(-jitter, jitter) - self.rudder_servo.get_position() * rudder_effect) self.time += 0.2 self.exchange.publish(Event(EventName.tick)) # for compass smoothing! self.exchange.publish(Event(EventName.update_averages))
def post(self): self.response.headers.add_header("Access-Control-Allow-Origin", "*") body = json.loads(self.request.body) eventId = body["eventId"] goodDates = body["goodDates"] userId = body["userId"] Event.confirmDates(eventId, userId, goodDates) self.response.write("ok")
def admin_create_user_acl(): session_token = request.form["session_token"] full_name = request.form["full_name"] email_address = request.form["email_address"] password = request.form["password"] password_repeat = request.form["password_repeat"] if EMAIL_REGEX.match(email_address) is None: return render_template("admin/admin_create_user.jinja2", session_token=session_token, create_user_error="Invalid e-mail address.") if len(password) < 8: return render_template("admin/admin_create_user.jinja2", session_token=session_token, create_user_error="Password must be at least 8 characters in length.") if password == password_repeat: data = email_address + password pw_hash = sha256(data.encode("utf-8")).hexdigest() else: return render_template("admin/admin_create_user.jinja2", session_token=session_token, create_user_error="Password must match both times.") db = database.Database(current_app.logger) user_id = db.validate_session(session_token) ctx = UserContext(user_id, db, current_app.logger) if user_id: authorized = db.validate_permission(user_id, "onboard-users") if authorized: ip_addr = request.access_route[-1] result = db.create_user(full_name, email_address, password, ip_addr) if result: if result[0] == -1: return render_template("admin/admin_create_user.jinja2", session_token=session_token, create_user_error="E-mail address already exists in the database.") else: user_ctx = UserContext(result[0], db, current_app.logger) # default permissions user_ctx.add_permission("own-any-token") user_ctx.add_permission("transfer-owned-token") db.update_user_permissions(result[0], user_ctx.acl()) create_user_event = Event("Users Create User", db, logger=current_app.logger) metadata = {"ip_addr": ip_addr, "created_by": ctx.user_info['email_address'], "new_user_email_address": email_address, "new_user_id": result[0]} create_user_event.log_event(user_id, json.dumps(metadata)) return render_template("admin/admin_create_user.jinja2", session_token=session_token, create_user_error="User created successfully.") abort(403)
class NetworkDiscovery: """ Discovers remote SyncAll instances on the same network. """ def __init__(self, port, version, uuid): self.logger = logging.getLogger(__name__) # Store UUID based on the hostname and current time to check # if the received broadcast packet is from self self.uuid = uuid self.client_discovered = Event() self.port = port self.version = version self.__init_network_objects() def __init_network_objects(self): self.listener = BroadcastListener(self.port) self.listener.packet_received += self.__receive_packet self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) def start_listening(self): self.listener.start() def shutdown(self): self.listener.shutdown() try: self.socket.shutdown(socket.SHUT_RDWR) except: pass def request(self): """ Sends a discovery request to all hosts on the LAN """ self.__broadcast({ 'version': self.version, 'uuid': self.uuid }, self.port) def __broadcast(self, data, port): self.socket.sendto(msgpack.packb(data), (BROADCAST_ADDRESS, port)) def __receive_packet(self, data): if self.__is_self(data): return self.logger.debug("Received discovery request from {}" .format(data['source'])) del data['server'] self.client_discovered.notify(data) def __is_self(self, data): return data['data']['uuid'] == self.uuid
def __init__(self, port): UDPServer.__init__(self, ('', port), BroadcastEventNotifierHandler) Thread.__init__(self) # self.daemon = True self.packet_received = Event() self.packet_received_error = Event() self.allow_reuse_address = 1
class NetworkDiscovery: """ Discovers remote SyncAll instances on the same network. """ def __init__(self, port, version, uuid): self.logger = logging.getLogger(__name__) # Store UUID based on the hostname and current time to check # if the received broadcast packet is from self self.uuid = uuid self.client_discovered = Event() self.port = port self.version = version self.__init_network_objects() def __init_network_objects(self): self.listener = BroadcastListener(self.port) self.listener.packet_received += self.__receive_packet self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) def start_listening(self): self.listener.start() def shutdown(self): self.listener.shutdown() try: self.socket.shutdown(socket.SHUT_RDWR) except: pass def request(self): """ Sends a discovery request to all hosts on the LAN """ self.__broadcast({ 'version': self.version, 'uuid': self.uuid }, self.port) def __broadcast(self, data, port): self.socket.sendto(msgpack.packb(data), (BROADCAST_ADDRESS, port)) def __receive_packet(self, data): if self.__is_self(data): return self.logger.debug("Received discovery request from {}".format( data['source'])) del data['server'] self.client_discovered.notify(data) def __is_self(self, data): return data['data']['uuid'] == self.uuid
def test_getAllEvents_twoEvent_connected(self): db = get_db() expected_first = Event(sx=10, sy=15, ex=20, ey=25, c_flag=10) expected_second = Event(sx=20, sy=25, ex=50, ey=75, c_flag=10) db.addEvent(expected_first) db.addEvent(expected_second) events = list(db.getAllEvents(500)) db.close() self.assertEqual(expected_first, events[0]) self.assertEqual(expected_second, events[1])
def submit_order(self, order): account = self._env.get_account(order.order_book_id) self._env.event_bus.publish_event( Event(EVENT.ORDER_PENDING_NEW, account=account, order=order)) if order.is_final(): return order.active() self._env.event_bus.publish_event( Event(EVENT.ORDER_CREATION_PASS, account=account, order=order)) self._match(account, order)
def __init__(self, event_types): super(InteractionEventManager, self).__init__() self.event_instances = dict() # Bind to each event type and save an entry in the event dictionary for event in event_types: self.event_instances[event] = 0 lmb = partial(self.on_event, event) Event.register(event, self, lmb)
def test_db_get_tail_two_event_disconnected(self): db = get_db() event1 = Event(sx=5, sy=10, ex=100, ey=150, c_flag=10) event2 = Event(sx=120, sy=130, ex=200, ey=300, c_flag=10) expected = Link(x=200, y=300, c_flag=10) db.addEvent(event1) db.addEvent(event2) result = db.get_tail(count=1)[0] db.close() self.assertEqual(expected, result)
def test_should_steer_following_set_course_event(self): self.averagely_tracking(204, 200) self.steerer.on_course.side_effect = [False, False] self.exchange.publish(Event(EventName.set_course, heading=196)) self.steerer.steer.assert_called_with(196, 200, -4) self.averagely_tracking(200, 202) self.sensors.rate_of_turn_average = -20 self.exchange.publish(Event(EventName.check_course)) self.steerer.steer.assert_called_with(196, 202, -20)
def test_serialization(self): event_type = "MyEvent" event = Event(event_type, a=1, b=2) self.assertEqual(from_json(event.toJson()), event) event_subclass = PlayerJoined("Jeb") self.assertEqual(from_json(event_subclass.toJson()), event_subclass)
def test_should_fire_event_on_tick_if_its_past_its_time(self): self.listen(EventName.start) mock_time = Mock(side_effect=[145.6, 155.4]) timeshift = TimeShift(self.exchange,mock_time) self.exchange.publish(Event(EventName.after,seconds = 5,next_event=Event(EventName.start))) self.exchange.publish(Event(EventName.tick)) self.assertEqual(self.event_count(EventName.start),1,"start should have been called") self.assertEqual(self.last_event.name,EventName.start)
def enterPlayAreaLoad(self, *args): """ Loading the play area configures containers with the data required for the simulation. An alternative to loading the static play area as seen here, would be to load it from a configuration source and fill in all the data here (loading a saved simulation). :param args: :return: """ self.logger.info("[PlayAreaLoad] entering") # TODO: actor setup system (character creation) # Initialize all players. # food = BasicMealComponentContainer.component() # self.entity_component_manager[food.id] = food # food.load() # self.grid_model.insert((0,0), food.id) # TODO: Wire character creator, and generate characters here. # for _ in range(3): # # stat_set = self.character_creator.generate_stats_via_normals(0.5) # entity_id = BeingModelContainer.entity_id() # actor_model = self.character_creator.generate_full_character(entity_id, modified_stats=stat_set) # # TODO: remove # # actor_model = self.character_creator.generate_character_via_normals(0.5) # # # # TODO: have behavior come from container. wire managers, id, etc. # behavior = HumanPlayerBehavior(actor_model.entity_id) # actor_fsm = ActorFsmContainer.fsm(data_model=actor_model, # behavior=behavior, # action_resolver=self.action_resolver) # actor = ActorComponentContainer.component(data_model=actor_model, fsm=actor_fsm) # # self.entity_component_manager[actor.id] = actor # # # TODO: move this to a different location (a place that manages what is displayed at what time) # actor.load() # # # Signal a stats change. # RefreshStats.signal(actor.id) # # actor_fsm.request('WaitForTurn') # # loc = (random.randint(0,9), random.randint(0,9)) # self.grid_model.insert(loc, actor.id) for _ in range(3): Event.signal("generate_random_character", AiBehavior) Event.signal("notify_grid_update") # Trigger the turn management object to bootstrap self.turn_management_fsm.request("ManagerSetup")
def test_should_return_wind_relative_average_after_several_update_averages_events( self): mock_angle = PropertyMock(side_effect=[10.0, 20.0]) windsensor = Mock() type(windsensor).angle = mock_angle sensors = Sensors(StubGPS(), windsensor, self.compass, self.mock_time, self.exchange, self.logger, DEFAULT_CONFIG) self.exchange.publish(Event(EventName.update_averages)) self.exchange.publish(Event(EventName.update_averages)) self.assertEqual(sensors.wind_direction_relative_average, round(((0.0 + 10) / 2 + 20) / 2), 0)
def test_should_only_fire_after_events_once(self): self.listen(EventName.start) mock_time = Mock(side_effect=[145.6, 155.4, 166.2]) timeshift = TimeShift(self.exchange,mock_time) self.exchange.publish(Event(EventName.after,seconds = 5,next_event=Event(EventName.start))) self.exchange.publish(Event(EventName.tick)) self.exchange.publish(Event(EventName.tick)) event_count = self.event_count(EventName.start) self.assertEqual(event_count,1,"start should have been called once, but was called {0} times".format(event_count))
def convert_event(event): if event.type == pygame.QUIT: return True, Event(0, 0) elif event.type == pygame.KEYDOWN: return True, Event(1, event.key) elif event.type == pygame.MOUSEBUTTONUP: return True, Event(2, (event.button, event.pos)) elif event.type >= pygame.USEREVENT: return True, Event(3, event.channel) else: return False, ''
def __init__(self, socket, address, my_uuid, remote_uuid): super().__init__() # self.daemon = True self.logger = logging.getLogger(__name__) self.packet_received = Event() self.disconnected = Event() self.address = address self.socket = socket self.my_uuid = my_uuid self.remote_uuid = remote_uuid self.__unpacker = msgpack.Unpacker()
def get(self): proj_handler = ProjectsHandler(self.request, self.response) projects = ProjectsHandler.get_project_list(proj_handler) result = [] if projects: colors = ["event-important", "event-success", "event-warning", "event-info", "event-inverse", "event-special"] for proj in projects: color = colors[randint(0, len(colors)-1)] events = Event.query(ancestor=proj.key).fetch() if events: for event in events: json_event = {} json_event['id'] = proj.key.urlsafe() json_event['title'] = str(event.title) json_event['url'] = '/projects/' + proj.key.urlsafe() json_event['class'] = color ''' Calendar.js sets date in utc format only, so i have to substract millisec of 2 hours [(60*60*2 = 7200)*1000] in order to be consistent with application time''' json_event['start'] = str(int(time.mktime(event.start_time.timetuple()) * 1000) - 7200000) json_event['end'] = str(int(time.mktime(event.end_time.timetuple()) * 1000) - 7200000) result.append(json_event) result = json.dumps(result) self.render_template("calendar.html", {'events_list': result})
class FirefoxDevtoolsProtocol(asyncore.dispatcher): def __init__(self): asyncore.dispatcher.__init__(self, map=protocol_map) self.create_socket(socket.AF_INET, socket.SOCK_STREAM) self.recv_buffer = "" self.send_buffer = "" self.on_connect = Event() self.on_packet = Event() def handle_connect(self): self.on_connect.emit() pass def handle_close(self): self.close() def handle_read(self): self.recv_buffer += self.recv(8192) try: length, remaining = self.recv_buffer.split(":", 1) except ValueError: return length = int(length) if len(remaining) < length: return packet = remaining[0:length] self.recv_buffer = remaining[length:] print "got " + packet self.on_packet.emit(json.loads(packet)) def writable(self): return len(self.send_buffer) > 0 def handle_write(self): sent = self.send(self.send_buffer) self.send_buffer = self.send_buffer[sent:] def send_packet(self, packet): data = json.dumps(packet) print "sending " + data self.send_buffer += str(len(data)) + ":" + data
def __init__(self, port, version, uuid): self.logger = logging.getLogger(__name__) # Store UUID based on the hostname and current time to check # if the received broadcast packet is from self self.uuid = uuid self.client_discovered = Event() self.port = port self.version = version self.__init_network_objects()
def __init__(self, directory, messanger, file_name=None, block_size=4098): super().__init__() self.logger = logging.getLogger(__name__) if file_name is None: self.type = self.FROM_REMOTE else: self.type = self.TO_REMOTE self.directory = directory self.messanger = messanger self.timestamp = None self.file_name = file_name if file_name is not None: self.file_data = self.directory.get_index(self.file_name) else: self.file_data = None self.block_size = block_size self.remote_file_data = None self.remote_checksums = None self.messanger.packet_received += self.__packet_received self.messanger.disconnected += self.__disconnected self.__transfer_started = False self.__transfer_completed = False self.__transfer_cancelled = False self.__temp_file_name = None self.__temp_file_handle = None self.__file_handle = None self.transfer_started = Event() self.transfer_completed = Event() self.transfer_failed = Event() self.transfer_cancelled = Event()
def __init__(self, messanger, directory): self.logger = logging.getLogger(__name__) self.messanger = messanger self.directory = directory self.directory.transfer_finalized += self.__transfer_finalized self.my_index_last_updated = 0 self.remote_index = None self.address = self.messanger.address[0] self.my_uuid = self.messanger.my_uuid self.uuid = self.messanger.remote_uuid self.disconnected = Event() self.messanger.disconnected += self.__disconnected self.messanger.packet_received += self._packet_received
class Messanger(Thread): """ Delivers and receives packets to/from remote instances using TCP. """ BUFFER_SIZE = 1024 * 1024 CONNECT_TIMEOUT = 5 def __init__(self, socket, address, my_uuid, remote_uuid): super().__init__() # self.daemon = True self.logger = logging.getLogger(__name__) self.packet_received = Event() self.disconnected = Event() self.address = address self.socket = socket self.my_uuid = my_uuid self.remote_uuid = remote_uuid self.__unpacker = msgpack.Unpacker() @staticmethod def connect(address, my_uuid, remote_uuid): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.settimeout(Messanger.CONNECT_TIMEOUT) sock.connect(address) sock.sendall(uuid.UUID(my_uuid).bytes) sock.settimeout(None) return Messanger(sock, address, my_uuid, remote_uuid) def disconnect(self): try: self.socket.shutdown(socket.SHUT_RDWR) except: pass def start_receiving(self): self.start() def run(self): with self.socket: while True: try: data = self.socket.recv(self.BUFFER_SIZE) except: break if not data: break processed = self.__handle_received_data(data) if not processed: self.disconnect() break self.disconnected.notify() self.disconnected.clear_handlers() self.packet_received.clear_handlers() def send(self, data): packet = msgpack.packb(data) try: self.socket.send(packet) except Exception as ex: self.logger.error( "Couldn't send data to {}" .format(self.address[0]) ) self.logger.exception(ex) self.disconnect() def __handle_received_data(self, data): self.__unpacker.feed(data) for packet in self.__unpacker: try: unpacked_packet = bintools.decode_object( packet, except_keys=('hash', 'binary_data', 'checksums') ) except Exception as ex: self.logger.error( "Error trying to decode strings to utf-8 in packet from {}" .format(self.address[0]) ) self.logger.exception(ex) return False else: try: self.packet_received.notify(unpacked_packet) except Exception as ex: self.logger.error("Error processing packet from {}" .format(self.address[0])) self.logger.exception(ex) return False return True
def __init__(self, conn): self.root = None self.conn = conn self.conn.on_packet += self.on_packet self.on_connected = Event() self.pools = set()
class FileTransfer(threading.Thread): # Transfer types FROM_REMOTE = 0 TO_REMOTE = 1 # Message types MSG_INIT = 0 MSG_INIT_ACCEPT = 1 MSG_CANCEL = 2 MSG_BLOCK_DATA = 3 MSG_DONE = 4 MSG_DONE_ACCEPT = 5 def __init__(self, directory, messanger, file_name=None, block_size=4098): super().__init__() self.logger = logging.getLogger(__name__) if file_name is None: self.type = self.FROM_REMOTE else: self.type = self.TO_REMOTE self.directory = directory self.messanger = messanger self.timestamp = None self.file_name = file_name if file_name is not None: self.file_data = self.directory.get_index(self.file_name) else: self.file_data = None self.block_size = block_size self.remote_file_data = None self.remote_checksums = None self.messanger.packet_received += self.__packet_received self.messanger.disconnected += self.__disconnected self.__transfer_started = False self.__transfer_completed = False self.__transfer_cancelled = False self.__temp_file_name = None self.__temp_file_handle = None self.__file_handle = None self.transfer_started = Event() self.transfer_completed = Event() self.transfer_failed = Event() self.transfer_cancelled = Event() def get_temp_path(self): return self.__temp_file_name def initialize(self): self.messanger.start_receiving() def is_done(self): return self.__transfer_cancelled or self.__transfer_completed def has_started(self): return self.__transfer_started def get_remote_uuid(self): return self.messanger.remote_uuid def shutdown(self): self.__transfer_cancelled = True self.transfer_cancelled.notify(self) self.messanger.send({ "type": self.MSG_CANCEL }) self.messanger.disconnect() self.__release_resources() def terminate(self): self.messanger.disconnect() self.__release_resources() def __release_resources(self): if self.__temp_file_handle is not None: self.__temp_file_handle.close() self.__temp_file_handle = None if self.__file_handle is not None: self.__file_handle.close() self.__file_handle = None if self.__temp_file_name is not None: self.directory.release_temp_file(self.__temp_file_name) self.__temp_file_name = None def start(self): """ Transfer a file to the remote end. Do not call this if a transfer request should be handled. """ if self.type != self.TO_REMOTE: raise ValueError("Transfer was not created as TO_REMOTE type") self.__transfer_started = True self.transfer_started.notify(self) self.messanger.send({ "type": self.MSG_INIT, "name": self.file_name, "data": self.file_data }) def __transfer_file(self, remote_checksums, block_size): self.logger.debug( "Started transferring file {} to remote {}" .format(self.file_name, self.messanger.address[0]) ) self.block_size = block_size self.remote_checksums = remote_checksums super().start() def run(self): """ Send the delta data to the remote side. """ try: with open(self.directory.get_file_path(self.file_name), 'rb') \ as file: delta_generator = pyrsync2.rsyncdelta( file, self.remote_checksums, blocksize=self.block_size, max_buffer=self.block_size ) # Actual transfer of data for block in delta_generator: self.messanger.send({ "type": self.MSG_BLOCK_DATA, "binary_data": block }) except Exception as ex: self.logger.exception(ex) self.logger.error( "File {} couldn't be read transferred to {}. Maybe it changed." .format(self.file_name, self.messanger.address[0]) ) self.shutdown() else: self.messanger.send({ "type": self.MSG_DONE }) def is_delete(self): if self.type == self.TO_REMOTE: return 'deleted' in self.file_data and self.file_data['deleted'] else: return 'deleted' in self.remote_file_data and \ self.remote_file_data['deleted'] def __accept_file(self, file_name, file_data): """ Make sure the file needs to be transferred and accept it if it does. """ file_status = syncall.IndexDiff.compare_file( file_data, self.directory.get_index().get(file_name, None) ) if file_status == syncall.index.NEEDS_UPDATE: self.file_name = file_name self.file_data = self.directory.get_index(self.file_name) self.remote_file_data = file_data if not self.is_delete(): self.__temp_file_name = self.directory.get_temp_path( self.file_name ) self.__temp_file_handle = open(self.__temp_file_name, 'wb') if os.path.exists( self.directory.get_file_path(self.file_name) ): self.__file_handle = open( self.directory.get_file_path(self.file_name), 'rb' ) else: self.__file_handle = BytesIO() self.__transfer_started = True self.transfer_started.notify(self) if self.is_delete(): self.messanger.send({ "type": self.MSG_INIT_ACCEPT }) self.logger.debug( "Accepted a file delete request for {} from {}" .format(file_name, self.messanger.address[0]) ) else: self.messanger.send({ "type": self.MSG_INIT_ACCEPT, "block_size": self.block_size, "checksums": self.directory.get_block_checksums( self.file_name, self.block_size ) }) self.logger.debug( "Accepted a file transfer request for {} from {}" .format(file_name, self.messanger.address[0]) ) else: self.logger.error( "File transfer requested for {} from {} shouldn't be updated" .format(file_name, self.messanger.address[0]) ) self.shutdown() def __packet_received(self, data): """ Message sequence should be: 1. MSG_INIT | sender -> receiver - Contains file_name and file_data (index data) 2. MSG_INIT_ACCEPT or MSG_CANCEL | receiver -> sender - Contains block_size and block checksums 3. Multiple MSG_BLOCK_DATA | sender -> receiver - Contains the delta data for each block, in sequence 4. MSG_DONE | sender -> receiver - No other data is going to be transfered (no more MSG_BLOCK_DATA) 5. MSG_DONE_ACCEPT | receiver -> sender - The receiver successfuly received and processed the data and the file index for the file should be updated on both ends to reflect the sync time. - Contains `time` field with the current timestamp on the receiver machine. It's used to update both indexes to handle time offsets between the two machines. - The sender should close the connection after receiving this packet. If the transfer is supposed to delete a file then step 3 is skipped and the sender should send MSG_DONE immedeately after MSG_INIT_ACCEPT. The file itself should be deleted on the receiver after the MSG_DONE message and MSG_DONE_ACCEPT is sent if the delete is successful. MSG_CANCEL can be sent at any time from the receiver or the sender and the one that receives it should close the connection. If no MSG_CANCEL or MSG_DONE_ACCEPT message is received then the connection is regarded as closed unexpectedly and the transfer is considered failed. """ if data['type'] == self.MSG_INIT: self.__accept_file(data['name'], data['data']) elif data['type'] == self.MSG_INIT_ACCEPT: if self.is_delete(): self.logger.debug( "Transferring delete of {} to {}" .format(self.file_name, self.messanger.address[0]) ) self.messanger.send({ "type": self.MSG_DONE }) else: self.__transfer_file(data['checksums'], data['block_size']) elif data['type'] == self.MSG_CANCEL: self.__transfer_cancelled = True self.terminate() self.transfer_cancelled.notify(self) elif data['type'] == self.MSG_BLOCK_DATA: if not self.__transfer_started: self.logger.error( "Received data from {} for {}, but transfer not started" .format(self.messanger.address[0], self.file_name) ) self.terminate() return self.__data_received(data['binary_data']) elif data['type'] == self.MSG_DONE: self.__complete_transfer() elif data['type'] == self.MSG_DONE_ACCEPT: self.__transfer_completed = True self.timestamp = data['time'] self.terminate() self.transfer_completed.notify(self) else: self.logger.error("Unknown packet from {}: {}".format( self.messanger.address[0], data['type'] )) def __data_received(self, block): try: pyrsync2.patchstream_block( self.__file_handle, self.__temp_file_handle, block, blocksize=self.block_size ) except Exception as ex: self.logger.exception(ex) self.logger.error( "Block couldn't be applied to temp file of {}. Remote: {}" .format(self.file_name, self.messanger.address[0]) ) self.shutdown() def __complete_transfer(self): self.timestamp = int(datetime.now().timestamp()) if not self.is_delete(): # Flush the file contents self.__file_handle.close() self.__file_handle = None self.__temp_file_handle.close() self.__temp_file_handle = None # Remote side should disconnect after MSG_DONE_ACCEPT self.__transfer_completed = True self.transfer_completed.notify(self) self.messanger.send({ 'type': self.MSG_DONE_ACCEPT, 'time': self.timestamp }) def __disconnected(self, data): self.__release_resources() if not self.__transfer_cancelled and not self.__transfer_completed: self.transfer_failed.notify(self)
def __init__(self, agentIndex, prevId=None): Event.__init__(self, prevId) self.index = agentIndex
def getCalEvents(settings, service): """ Get list of events for our calendar. The Events are structured into a dict containing the following keys: date: Date in yyyy-mm-dd format start: Start time in hh:mm format - adjusted to UTC dur: Duration in hh:mm (optional) summary description (optional) location """ events = [] pageToken = None while True: # for each page e = service.events().list(calendarId=settings.calendarID, pageToken=pageToken).execute() for event in e['items']: # for all events this page # Unpack the event if ('extendedProperties' in event) and \ ('shared' in event['extendedProperties'] ) and \ ('X-MOZ-CATEGORIES' in event['extendedProperties']['shared']) and \ (event['extendedProperties']['shared']['X-MOZ-CATEGORIES'] == 'autoGen'): # Only pay attention to events that we automatically generated. These are # flagged with "autoGen" as shown above. s = event['start'] if 'date' in s: start = parseDateTime(s['date']) else: start = parseDateTime(s['dateTime']) # Adjust to meeting timezone start += timedelta(hours=settings.timeZoneOffset) s = event['end'] if 'date' in s: end = None else: end = parseDateTime(s['dateTime']) # Adjust to meeting timezone end += timedelta(hours=settings.timeZoneOffset) if 'summary' in event: summary = event['summary'] else: summary = '' if 'location' in event: location = event['location'] else: location = '' calEvent = Event(settings, start, end, summary, location, False, "") calEvent.id = event['id'] events.append(calEvent) pageToken = e.get('nextPageToken') if not pageToken: break return events
methods=["GET", "POST"]) app.add_url_rule('/videos/', view_func=Videos.as_view('videos'), methods=["GET", "POST"]) app.add_url_rule('/display_videos/', view_func=vidDisplay.as_view('display_videos'), methods=["GET", "POST"]) app.add_url_rule('/contact/', view_func=Contact.as_view('contact'), methods=["GET", "POST"]) app.add_url_rule('/donate/', view_func=Sponsors.as_view('support'), methods=["GET"]) app.add_url_rule('/members/', view_func=Members.as_view('members'), methods=["GET", "POST"]) app.add_url_rule('/events/', view_func=Event.as_view('events'), methods=["GET", "POST"]) @app.errorhandler(404) def page_not_found(error): return flask.render_template('404.html'), 404 app.debug = True if __name__ == "__main__": init_db() init_db_members() init_db_events() app.run()
def post(self): self.response.headers.add_header("Access-Control-Allow-Origin", "*") body = json.loads(self.request.body) userId = body["userId"] self.response.write(json.dumps(Event.getUserEvents(userId)))
def __str__(self): ss = Event.__str__(self) ss += '\n Frame packet: ' + str(self.frmpkt) ss += '\n Payload: ' + repr(self.payload) return ss
new_paths.append([node.label] + path) return new_paths leaf_categ_to_categ_path = {'untracked': ['untracked']} for path in onto_tree.transform(func): assert path[-1] not in leaf_categ_to_categ_path leaf_categ_to_categ_path[path[-1]] = path def event_to_dict(event): data = { 'begin': event.begin.isoformat(), 'end': event.end.isoformat(), 'category': leaf_categ_to_categ_path[event.category], } if event.comment: data['comment'] = event.comment return data event_dicts = [] for i, segment in enumerate(segments): # Add "untracked" event between segments. if i > 0: event = Event() event.begin = segments[i - 1][-1].end event.end = segment[0].begin event.category = 'untracked' event_dicts.append(event_to_dict(event)) for event in segment: event_dicts.append(event_to_dict(event)) with open("out.json", "w") as f: json.dump(event_dicts, f, indent=4)
class RemoteStore: """ Manages communication to a single remote SyncAll instance. """ def __init__(self, messanger, directory): self.logger = logging.getLogger(__name__) self.messanger = messanger self.directory = directory self.directory.transfer_finalized += self.__transfer_finalized self.my_index_last_updated = 0 self.remote_index = None self.address = self.messanger.address[0] self.my_uuid = self.messanger.my_uuid self.uuid = self.messanger.remote_uuid self.disconnected = Event() self.messanger.disconnected += self.__disconnected self.messanger.packet_received += self._packet_received def __transfer_finalized(self, data): remote_uuid, file_name, file_data = data if self.uuid != remote_uuid: return if self.remote_index is not None: self.remote_index[file_name] = file_data def request_transfer(self, transfer_messanger): # Pass the transfer request to the transfer manager self.directory.transfer_manager.process_transfer( self, transfer_messanger ) def index_received(self): return self.remote_index is not None def start_receiving(self): self.messanger.start_receiving() self.send_index(request=False) def send_index(self, request=True, force=False): if not force and \ self.my_index_last_updated == self.directory.get_last_update(): # Nothing to do here, index is already up-to-date # self.logger.debug( # "Index update requested but there are no changes" # ) self.messanger.send({ 'type': MSG_INDEX_NO_CHANGE }) return self.my_index_last_updated = self.directory.get_last_update() self.messanger.send({ 'type': MSG_INDEX, 'index': self.directory.get_index() }) if request: self.messanger.send({ 'type': MSG_REQUEST_INDEX }) def send_index_delta(self, changes, request=True): """ Send only the changed files (`changes`) index data to the remote. Use ONLY when ALL changed files are sent this way. """ self.my_index_last_updated = self.directory.get_last_update() index = self.directory.get_index() if self.remote_index is not None: for file_name in list(changes): if file_name in self.remote_index and \ index[file_name] == self.remote_index[file_name]: changes.remove(file_name) if len(changes) == 0: return self.messanger.send({ 'type': MSG_INDEX_DELTA, 'index': {file_name: index[file_name] for file_name in changes} }) if request: self.messanger.send({ 'type': MSG_REQUEST_INDEX }) def __disconnected(self, no_data): self.directory.transfer_manager.remote_disconnect(self) self.disconnected.notify(self) def disconnect(self): self.messanger.disconnect() def _packet_received(self, packet): if 'type' not in packet: self.logger.error("Received packet with no type from {}".format( self.address )) return # self.logger.debug("Received packet from {}: {}".format( # self.address, # packet['type'] # )) if packet['type'] == MSG_INDEX: self.remote_index = packet['index'] self.__remote_index_updated() elif packet['type'] == MSG_INDEX_DELTA: updates = False for file_name, file_data in packet['index'].items(): if self.remote_index is None or \ file_name not in self.remote_index or \ self.remote_index[file_name] != file_data: updates = True self.remote_index[file_name] = file_data if updates: self.__remote_index_updated() elif packet['type'] == MSG_REQUEST_INDEX: self.send_index(request=False) elif packet['type'] == MSG_INDEX_NO_CHANGE: self.__remote_index_updated() else: self.logger.error("Unknown packet from {}: {}".format( self.address, packet['type'] )) def __remote_index_updated(self): # self.logger.debug("{}'s index updated".format(self.address)) diff = self.directory.diff(self.remote_index) if diff[2]: self.logger.debug( "File conflicts with {}: {}" .format(self.uuid, diff[2]) ) # TODO: Handle conflicted files self.directory.transfer_manager.sync_files(self, diff[0]) self.directory.transfer_manager.sync_files(self, diff[1])
class FirefoxDevtoolsClient(object): def __init__(self, conn): self.root = None self.conn = conn self.conn.on_packet += self.on_packet self.on_connected = Event() self.pools = set() def add_pool(self, pool): self.pools.add(pool) def remove_pool(self, pool): self.pools.discard(pool) def pool_for(self, actor_id): for pool in self.pools: if pool.has_front(actor_id): return pool return None def get_front(self, actor_id): pool = self.pool_for(actor_id) if not pool: return None return pool.get_front(actor_id) def on_packet(self, packet): if not self.root: # Yeah these should be runtime errors. assert packet["from"] == "root" assert "applicationType" in packet from fronts import RootFront self.root = RootFront(self, packet) d = self.root.protocol_description() d.addCallback(self.register_actor_descriptions) d.addErrback(self.describe_failed) return if packet["from"] == "root": front = self.root else: front = self.get_front(packet["from"]) front.on_packet(packet) def describe_failed(self, e): print "Error listing actor descriptions: %s" % (e,) import protodesc #self.register_actor_descriptions(protodesc.actor_descriptions) def register_actor_descriptions(self, descriptions): for desc in descriptions["types"].values(): type_name = desc["typeName"] category = desc["category"] if category == "actor": t = get_type(desc["typeName"]) if isinstance(t, ActorType): concrete = t.cls elif isinstance(t, PlaceholderType) and t.concrete: concrete = t.concrete.cls else: concrete = type( str(type_name), (Front,), {"typeName": type_name}) concrete.implement_actor(desc) continue if type_exists(type_name): continue if category == "dict": add_type(DictType(type_name, desc["specializations"])) self.on_connected.emit(self) def send_packet(self, packet): self.conn.send_packet(packet)
class Directory: """ Listens for file system changes in specific directory and applies changes from different sources. """ IGNORE_PATTERNS = r'\.syncall_.*' def __init__(self, uuid, dir_path, index_name='.syncall_index', load_index=True, temp_dir_name='.syncall_temp', create_temp_dir=False): self.logger = logging.getLogger(__name__) self.uuid = uuid self.dir_path = dir_path self.index_name = index_name self.index_path = os.path.join(self.dir_path, self.index_name) self.temp_dir = os.path.join(self.dir_path, temp_dir_name) self.last_update = datetime.now().timestamp() if create_temp_dir and not os.path.exists(self.temp_dir): os.mkdir(self.temp_dir) self.fs_access_lock = threading.Lock() self.temp_dir_lock = threading.Lock() self.temp_files = set() self.transfer_manager = syncall.TransferManager(self) self.index_updated = Event() # Contains tuple(uuid, file_name, file_index) as data self.transfer_finalized = Event() if load_index: self.load_index() else: self._index = dict() def get_last_update(self): return self.last_update def get_temp_path(self, proposed_name): """ Return a path to a temp file that can be written to. Use `proposed_name` if it's available or modify it so it is. """ proposed_name = os.path.basename(proposed_name) name = proposed_name file_suffix = 0 with self.temp_dir_lock: while os.path.isfile(os.path.join(self.temp_dir, name)): file_suffix += 1 name = "{}-{}".format(proposed_name, file_suffix) file_path = os.path.join(self.temp_dir, name) # Create the file to avoid possible race conditions # after the with block with open(file_path, 'a+'): pass self.temp_files.add(file_path) return file_path def release_temp_file(self, path): """ Remove a temp file created using `get_temp_path`. """ if path in self.temp_files: try: os.remove(path) except: pass def clear_temp_dir(self): for path in self.temp_files: self.release_temp_file(path) def get_file_path(self, file_name): return os.path.join(self.dir_path, file_name) def get_block_checksums(self, file_name, block_size): with self.fs_access_lock: if file_name not in self._index: return [] file_data = self._get_index_unsafe(file_name) if 'deleted' in file_data and file_data['deleted']: return [] with open(self.get_file_path(file_name), 'rb') as file: block_checksums = list(pyrsync2.blockchecksums( file, blocksize=block_size )) return block_checksums def load_index(self): with self.fs_access_lock: if os.path.isfile(self.index_path): with open(self.index_path, 'rb') as index_file: index = msgpack.unpackb(index_file.read()) # Decode the object to utf strings except the 'hash' values self._index = bintools.decode_object( index, except_keys=('hash',) ) else: self._index = dict() self.last_update = datetime.now().timestamp() def get_index(self, file_name=None): with self.fs_access_lock: return self._get_index_unsafe(file_name=file_name) def _get_index_unsafe(self, file_name=None): if file_name is None: return self._index elif file_name not in self._index: return None else: return self._index[file_name] def save_index(self): with self.fs_access_lock: index = msgpack.packb(self._index) with open(self.index_path, 'wb') as index_file: index_file.write(index) def update_index(self, save_index=True, force=False): """ Update self._index (use the get_index() method to get it). Return True if index changed, False otherwise. The index structure is: <index> ::= { <file_name>: <file_info>, ... } <file_name> ::= file path relative to directory top <file_info> ::= { 'sync_log': { <remote_uuid (as string)>: <timestamp>, ... }, 'last_update_location': <remote_uuid (or the local UUID) (str)> 'last_update': <timestamp>, 'hash': <md5 byte-string>, [optional 'deleted': (True|False)] } <timestamp> ::= Datetime in unix timestamp (seconds). Depends on the os time on the system on which the change happened. """ changes = set() with self.fs_access_lock: for file_data in self._index.values(): file_data['not_found'] = True for dirpath, dirnames, filenames in os.walk(self.dir_path): for name in filenames: file_path = pathext.normalize(os.path.join(dirpath, name)) if not re.search(self.IGNORE_PATTERNS, file_path): self._update_file_index(file_path, changes) # Mark each deleted file with the current timestamp # and UUID to avoid conflicts and to propagate properly timestamp = datetime.now().timestamp() for file_name, file_data in self._index.items(): if 'not_found' in file_data: del file_data['not_found'] if 'deleted' in file_data and file_data['deleted']: # File has been deleted some time ago... continue # File has been deleted now file_data['deleted'] = True file_data['last_update'] = timestamp file_data['last_update_location'] = self.uuid file_data['hash'] = b'' sync_log = file_data.setdefault('sync_log', dict()) sync_log[self.uuid] = timestamp changes.add(file_name) if changes: self.last_update = datetime.now().timestamp() if save_index and changes: self.save_index() if force: self.index_updated.notify(None) elif changes: self.index_updated.notify(changes) def _update_file_index(self, file_path, changes): relative_path = pathext.normalize( os.path.relpath(file_path, self.dir_path) ) file_data = self._index.setdefault(relative_path, dict()) if not file_data: # New file file_hash = bintools.hash_file(file_path) file_data['last_update'] = int(os.path.getmtime(file_path)) file_data['hash'] = file_hash file_data['last_update_location'] = self.uuid sync_log = file_data.setdefault('sync_log', dict()) sync_log[self.uuid] = file_data['last_update'] changes.add(relative_path) elif int(os.path.getmtime(file_path)) > file_data['last_update']: # Check if file is actually changed or the system time is off file_hash = bintools.hash_file(file_path) if file_data['hash'] != file_hash: # File modified locally (since last sync) file_data['last_update'] = int(os.path.getmtime(file_path)) file_data['hash'] = file_hash file_data['last_update_location'] = self.uuid sync_log = file_data.setdefault('sync_log', dict()) sync_log[self.uuid] = file_data['last_update'] changes.add(relative_path) if 'deleted' in file_data: file_data['last_update'] = datetime.now().timestamp() file_data['hash'] = bintools.hash_file(file_path) file_data['last_update_location'] = self.uuid sync_log = file_data.setdefault('sync_log', dict()) sync_log[self.uuid] = file_data['last_update'] changes.add(relative_path) del file_data['deleted'] if 'not_found' in file_data: del file_data['not_found'] def diff(self, remote_index): return IndexDiff.diff(self._index, remote_index) def finalize_transfer(self, transfer): if transfer.type == syncall.transfers.FileTransfer.TO_REMOTE: self.__finalize_transfer_to_remote(transfer) else: self.__finalize_transfer_from_remote(transfer) self.save_index() def __finalize_transfer_to_remote(self, transfer): with self.fs_access_lock: self.__update_index_after_transfer( transfer.file_name, self._get_index_unsafe(transfer.file_name), transfer.get_remote_uuid(), transfer.timestamp ) self.index_updated.notify({transfer.file_name}) def __finalize_transfer_from_remote(self, transfer): updated = False with self.fs_access_lock: diff = IndexDiff.compare_file( transfer.remote_file_data, self._get_index_unsafe(transfer.file_name) ) if diff == NEEDS_UPDATE: if 'deleted' in transfer.remote_file_data and \ transfer.remote_file_data['deleted']: try: os.remove(self.get_file_path(transfer.file_name)) except: pass else: try: os.makedirs( os.path.dirname( self.get_file_path(transfer.file_name) ) ) except: pass # Update the actual file shutil.move( transfer.get_temp_path(), self.get_file_path(transfer.file_name) ) # Update the file index self.__update_index_after_transfer( transfer.file_name, transfer.remote_file_data, transfer.messanger.my_uuid, transfer.timestamp ) updated = True else: self.logger.debug( "Skipping update of outdated file {} from {}" .format(transfer.file_name, transfer.get_remote_uuid()) ) if updated: self.index_updated.notify({transfer.file_name}) def __update_index_after_transfer(self, file_name, file_index, uuid, time): file_index['sync_log'][uuid] = time self._index[file_name] = file_index self.last_update = datetime.now().timestamp() self.transfer_finalized.notify((uuid, file_name, file_index))
def post(self): urlfetch.set_default_fetch_deadline(60) body = json.loads(self.request.body) logging.info('request body:') logging.info(body) self.response.write(json.dumps(body)) update_id = body['update_id'] message = body['message'] message_id = message.get('message_id') date = message.get('date') text = message.get('text') fr = message.get('from') chat = message['chat'] chat_id = chat['id'] if not text: logging.info('no text') return def proxy_announce(msg=None): if (msg is not '' and msg is not None and msg is not ' '): resp = urllib2.urlopen(BASE_URL + 'sendMessage', urllib.urlencode({ 'chat_id': -35532931, 'text': msg.encode('utf-8'), 'disable_web_page_preview': 'true', })).read() def broadcast(msg=None): resp = urllib2.urlopen(BASE_URL + 'sendMessage', urllib.urlencode({ 'chat_id': str(chat_id), 'text': msg.encode('utf-8'), 'disable_web_page_preview': 'true' })).read() def reply(msg=None, img=None): if msg: resp = urllib2.urlopen(BASE_URL + 'sendMessage', urllib.urlencode({ 'chat_id': str(chat_id), 'text': msg.encode('utf-8'), 'disable_web_page_preview': 'true', 'reply_to_message_id': str(message_id), })).read() elif img: resp = multipart.post_multipart(BASE_URL + 'sendPhoto', [ ('chat_id', str(chat_id)), ('reply_to_message_id', str(message_id)), ], [ ('photo', 'image.jpg', img), ]) else: logging.error('No message or image specified') resp = None logging.info('send response:') logging.info(resp) if text.startswith('/'): # Bot-recognised commands if (text.lower() == '/start'): reply('Bot enabled') setEnabled(chat_id, True) elif (text.lower() == '/stop'): reply('Bot disabled') setEnabled(chat_id, False) # Help command elif (text.lower() == '/help'): help_msgs = ['/reg or /register - Registers your username into the system\n', '/users - Displays all the registered users of the system\n', '/bus - Shows you a list of bus stops and the arrival timings for the buses\n', '/events - Shows you a list of current / upcoming events in Tembusu College'] to_print_str = '' for each_msg in help_msgs: to_print_str += each_msg reply(to_print_str) ########## For registration related cases # To register a new user elif (text.lower() == '/register' or text.lower() == '/reg'): CONST_ERROR_ALR_REG = 0 CONST_OPTION_REG_SUCCESS = 1 CONST_RESPONSE = Register.signUp(fr['first_name'], fr['id']) if (CONST_RESPONSE == CONST_ERROR_ALR_REG): reply('You are already registered!') return elif (CONST_RESPONSE == CONST_OPTION_REG_SUCCESS): reply('Thank you ' + str(fr['first_name']) + ' for registering!') return else: reply('There was an error trying to register you. Please try again, or contact the admin.') return # To check for users that have already registered elif (text.lower() == '/users' or text.lower() == '/user'): list_of_users = Register.getAllUsers() reply(list_of_users) ########## End of registration related cases ########## For bus-related cases elif (text.lower() == '/bus'): bot_msg = 'Please select a bus stop:' bus_stops = ['/newtownsecsch', '/oppbuona'] Keyboard.one_time_btn_keyboard(Utils.arrSeparator(bus_stops), chat_id, message_id, bot_msg) return elif (text.lower() == '/newtownsecsch'): path = 'ltaodataservice/BusArrival?BusStopID=19051&SST=True' buses_at_stop = Bus.getBusNums(path, '19051', fr['id']) output_msg = 'Please select a bus:' Keyboard.one_time_btn_keyboard(Utils.arrSeparator(buses_at_stop), chat_id, message_id, output_msg) return elif (text.lower() == '/oppbuona'): path = 'ltaodataservice/BusArrival?BusStopID=11369&SST=True' buses_at_stop = Bus.getBusNums(path, '11369', fr['id']) output_msg = 'Please select a bus:' Keyboard.one_time_btn_keyboard(Utils.arrSeparator(buses_at_stop), chat_id, message_id, output_msg) return elif ('/bus' in text.lower()): CONST_ERROR_BUS_STOP_NAN = 0 # This error ID is if the bus stop number is NaN #CONST_ERROR_BUS_NUM_NAN = 2 # This error ID is if the bus number is NaN CONST_OPTION_GET_BUSES = 1 # This option ID is for getting a list of buses for a given bus stop CONST_OPTION_GET_BUS_TIMING = 3 # This option ID is for getting the bus arrival timing for a given bus at a given bus stop CONST_OPTION_GET_SHORTCUT = 4 # This option ID is for getting the bus timings for favourited buses option_id = Bus.busTextParser(text.lower()) if (option_id == CONST_ERROR_BUS_STOP_NAN): error_msg_reply = 'Please enter a bus stop NUMBER.' Keyboard.norm_keyboard_reply(chat_id, message_id, error_msg_reply) return # Removed because there may be bus numbers that contain alphabets, e.g. 70M #elif (option_id == CONST_ERROR_BUS_NUM_NAN): # error_msg_reply = 'Please enter a bus NUMBER.' # norm_keyboard_reply(error_msg_reply) # return elif (option_id == CONST_OPTION_GET_SHORTCUT): user_query = (User.query()).fetch() for q in user_query: if (q.user_id == fr['id']): # If this is the user we are looking for bus_stop_num = q.user_bus_stop_reply # Get the bus stop number this user is referring to if (bus_stop_num != 0): # If there is an actual bus stop number, i.e. NOT 0 bus_num = text.lower().split(' ', 1)[1] path = '/ltaodataservice/BusArrival?BusStopID=' + str(bus_stop_num) + '&ServiceNo=' + bus_num + '&SST=True' bus_arr_timing_msg = Bus.getBusTiming(path) # Get the bus timing Keyboard.norm_keyboard_reply(chat_id, message_id, bus_arr_timing_msg) q.user_bus_stop_reply = 0 # Reset the user's bus stop that they are referring to, to 0 q.put() # Update the Datastore return else: # If there is no actual bus stop number stored Keyboard.norm_keyboard_reply(chat_id, message_id, 'Tembotsu does not know which bus stop you are referring to, check your bus timing again.') return # If the code reaches this point, this means that the user cannot be found in our datastore # Prompt the user to register first before using str_reply = 'I\'m sorry but I can\'t find you in our database! Please type /reg to register before using the rest of the bot!' Keyboard.norm_keyboard_reply(chat_id, message_id, str_reply) return elif (option_id == CONST_OPTION_GET_BUSES): bus_stop_num = text.lower().split(' ')[1] path = '/ltaodataservice/BusArrival?BusStopID=' + bus_stop_num + '&SST=True' buses_at_stop = Bus.getBusNums(path, bus_stop_num, fr['id']) if (len(buses_at_stop) == 0): # If there are no buses at this bus stop str_reply = 'There are no buses for bus stop number: ' + str(bus_stop_num) Keyboard.norm_keyboard_reply(chat_id, message_id, str_reply) return else: # If there are buses at this bus stop output_msg = 'Please select a bus:' Keyboard.one_time_btn_keyboard(Utils.arrSeparator(buses_at_stop), chat_id, message_id, output_msg) return elif (option_id == CONST_OPTION_GET_BUS_TIMING): bus_stop_num = text.lower().split(' ')[1] bus_num = text.lower().split(' ')[2] path = '/ltaodataservice/BusArrival?BusStopID=' + bus_stop_num + '&ServiceNo=' + bus_num + '&SST=True' bus_arr_timing_msg = Bus.getBusTiming(path) Keyboard.norm_keyboard_reply(chat_id, message_id, bus_arr_timing_msg) return ########## End of bus-related cases ########## Start of Tembusu event-scraping cases elif (text.lower() == '/events'): str_reply = '' all_evts = (Event.query().order(Event.event_date)).fetch() # If there are any events to view if (len(all_evts) != 0): str_reply += 'These are the current / upcoming events in Tembusu College:\n\n' for q in all_evts: str_reply += (q.event_name + '\n' + q.event_date + '\n' + q.event_link + '\n\n') Keyboard.norm_keyboard_reply(chat_id, message_id, str_reply) return # Else if there are no events to view else: str_reply = 'There are currently no ongoing / upcoming events in Tembusu College.' Keyboard.norm_keyboard_reply(chat_id, message_id, str_reply) return ########## End of Tembusu event-scraping cases ########## Start of bot proxy announcement cases # If the first part of the text is '/a' elif (((text.lower()).split(" ", 1))[0] == '/a'): # If the ID belongs to Nelson if (fr['id'] == 22595307): if ((len((text.lower()).split(" ", 1))) > 1): to_announce = text.split(" ", 1)[1] proxy_announce(to_announce) return ########## End of bot proxy announcement cases else: reply('Please enter a valid command. Type /help for more information.') return