def __init__( self, config, name, onAccount=None, onOrderMatched=None, onOrderPlaced=None, onMarketUpdate=None, onUpdateCallOrder=None, ontick=None, bitshares_instance=None, *args, **kwargs ): # BitShares instance self.bitshares = bitshares_instance or shared_bitshares_instance() # Storage Storage.__init__(self, name) # Statemachine StateMachine.__init__(self, name) # Events Events.__init__(self) if ontick: self.ontick += ontick if onMarketUpdate: self.onMarketUpdate += onMarketUpdate if onAccount: self.onAccount += onAccount if onOrderMatched: self.onOrderMatched += onOrderMatched if onOrderPlaced: self.onOrderPlaced += onOrderPlaced if onUpdateCallOrder: self.onUpdateCallOrder += onUpdateCallOrder # Redirect this event to also call order placed and order matched self.onMarketUpdate += self._callbackPlaceFillOrders self.config = config self.bot = config["bots"][name] self._account = Account( self.bot["account"], full=True, bitshares_instance=self.bitshares ) self._market = Market( config["bots"][name]["market"], bitshares_instance=self.bitshares ) # Settings for bitshares instance self.bitshares.bundle = bool(self.bot.get("bundle", False)) # disabled flag - this flag can be flipped to True by a bot and # will be reset to False after reset only self.disabled = False
class EventsTest(unittest.TestCase): def setUp(self): self._events = Events() def test_add_listener(self): sub1 = Subscriber() sub2 = Subscriber() self._events.add_listener("test",sub1) self._events.add_listener("test",sub2) self._events.fire_event("test",y=2) self.assertEqual(2,sub1.x) self.assertEqual(2,sub2.x) def test_remove_listener(self): sub1 = Subscriber() self._events.add_listener("test",sub1) self._events.remove_listener("test",sub1) self._events.fire_event("test",y=2) self.assertEqual(0,sub1.x) def test_decorator(self): self.x = 0 @register_listener('test') def test(): self.x = 1 fire_event('test') self.assertEqual(1, self.x)
def __init__(self, app_id, app_key): self._app_id = app_id self._app_key = app_key self._settings = Settings(self._app_id) self._session = requests.Session() self._session.auth = Auth(self, self._settings) self._last_location = None self._service_exception = Events() self._authentication_needed = Events() self._connection_changed = Events() self._connection_retry = Thread(target=self.__connection_retry_method) self._connection_level = Connection.on
def run(db_session, strain): users = Users(db_session) events = Events(db_session) while (1): # Very slow poppin' from the DB so # we build a nice queue of events event = events.pop(strain) if event == None: continue print "ENGINE: Event Strain: '{strain}'".format(strain=event.strain) if event.strain == "request_user": user = event.payload_dec users.create(user['name'], user['email']) events.push("created_user", user) print "ENGINE: User '{email}' created.".format(email=user['email']) elif event.strain == "destroy_user": user = event.payload_dec users.destroy(user['id']) events.push("destroyed_user", user) print "ENGINE: User '{id}' destroyed.".format(id=user['id']) elif event.strain == "update_user": user = event.payload_dec users.update(user) events.push("updated_user", user) print "ENGINE: User '{email}' updated.".format(email=user['email']) sleep(5)
class Counter: """ Counter. Add some number to total. """ def __init__(self): self.total = 0 self.events = Events() def add(self, x): self.total += x if (self.total > 10): self.threshReached() def threshReached(self): self.events.on_change(self.total) def addCallBack(self, fn): self.events.on_change += fn
def __init__(self, name=None): self.log_color = 'red' self.name = name or random_string(8) self.events = Events() events.e = self.events self.nodes = Nodes() self.register = Register(machine=self, events=self.events)
def __init__(self): Events.__init__(self) self.gravity = Vector(0.0, 0.1) self.init_graphics() self.init_sound() Menu.init_menus(self) self.current_menu = "welcome_menu" self.camera = 0 self.clock = pygame.time.Clock() self.time = pygame.time self.last_time = 0 self.take_screenshot = False self.ingame = False SoundEffect.play_music("menu.mp3") self.next_step = self.menu_handler self.load_background("welcome")
def __init__(self, key, access, cluster): try: url = "http://169.254.169.254/latest/meta-data/" public_hostname = urlopen(url + "public-hostname").read() zone = urlopen(url + "placement/availability-zone").read() region = zone[:-1] except: sys.exit("We should be getting user-data here...") # the name (and identity) of the cluster (the master) self.cluster = cluster self.redis = redis.StrictRedis(host='localhost', port=6379) endpoint = "monitoring.{0}.amazonaws.com".format(region) region_info = RegionInfo(name=region, endpoint=endpoint) self.cloudwatch = CloudWatchConnection(key, access, region=region_info) self.namespace = '9apps/redis' self.events = Events(key, access, cluster) # get the host, but without the logging self.host = Host(cluster) self.node = self.host.get_node()
class Pynsour: def __init__(self, config_file): if not os.path.exists(config_file): # this should throw an exception # but let's just be messy for the time being print("Config file '%s' does not exist!" % config_file) sys.exit(1) # otherwise, load the configuration self.bot = bot.Bot() conf = Config(config_file) conf.push(self.bot) self.events = Events(self.bot) def run(self): self.bot.connect() self.events.run()
def __init__(self): self.lat = None self.long = None self.distance = '10' self.date = date.today() self.banner = None self.location = LocationUpdater() self.events = Events() self.win = hildon.StackableWindow() self.app_title = "Gig Finder"
def __init__(self, debugflag, readingInterval, initialPice=0): self.debugflag = debugflag self.valuesSoFar = OrderedReadingsArray() self.initialPrice = initialPice self.notDecisiveReadings = 0 self.readingInterval = readingInterval self.sellEvents = Events() self.buyEvents = Events() self.increasses = 0 self.decreases = 0 self.tendency = ""
def __init__(self, name=None, title=None, msg=None, parent=None): self.name = name or self.__class__.__name__ self._logger = logging.getLogger("gsensors.%s" % self.name) self.parent = parent self.title = title self.msg = msg self.id = 0 self.events = Events() self.last_change = None # datetime on last change self.state = 0 # 0: no alarm
def __init__(self, name=None, unit=None, timeout=None): self.name = name or self.__class__.__name__ self._logger = logging.getLogger("gsensors.%s" % self.name) self.events = Events() self.unit = unit self._value = 0 self._error = None if timeout is not None: self.timeout = timeout self.last_update = None # datetime on last update
def __init__(self, subroutine): self.subroutine = subroutine self.bms_events = Events() self._add_eventhandler_range(0x00, 0x80, self.event_handle_note_on) self._add_eventhandler_range(0x81, 0x88, self.event_handle_note_off) self._add_eventhandler(0x80, self.event_handle_pause) self._add_eventhandler(0x88, self.event_handle_pause) self._add_eventhandler(0xFF, self.event_handle_endoftrack) self._fill_undefined_events(0x00, 0xFF, self.event_handle_unknown)
def __init__(self, *args, **kwargs): self._args = args self._kwargs = kwargs self.run_mainloop_forever = True self.events = Events() self.triggers = [] self.max_line_chars = 440 self.max_spam_lines = 5 self.logger = logging.getLogger(__name__) def shutdown_handler(signum, frame): self.run_mainloop_forever = False self.logger.info("Caught shutdown signal, shutting down.") self.quit("Caught shutdown signal, shutting down.") # Attach management signals signal.signal(signal.SIGINT, shutdown_handler) signal.signal(signal.SIGTERM, shutdown_handler)
class ChannelMultiplexer(object): def __init__(self, zmq_socket_type=zmq.ROUTER): self._events = Events(zmq_socket_type) self._channels = {} @property def socket(self): return self._events.socket @property def channels(self): return self._channels def bind(self, endpoint): self._events.bind(endpoint) def close(self): self._events.close() def poll(self, poll_timeout=1): return self._events.poll(poll_timeout) def recv(self): return self._events.recv() def create_event(self, name, args, xheader={}): return self._events.create_event(name, args, xheader) def emit_event(self, event, identity=None): return self._events.emit_event(event, identity) def emit(self, name, args, xheader={}): return self._events.emit(name, args, xheader) def channel(self, freq=5, from_event=None): return Channel(self, freq, from_event)
def __init__(self, race_room, race_info, condordb): self.room = race_room self.events = Events() self.condordb = condordb self.race_info = ( race_info ) # Information on the type of race (e.g. seeded, seed, character) -- see RaceInfo for details self.racers = dict() # a dictionary of racers indexed by user id self._status = RaceStatus["uninitialized"] # see RaceStatus self.no_entrants_time = ( None ) # whenever there becomes zero entrance for the race, the time is stored here; used for cleanup code self._countdown = int(0) # the current countdown (TODO: is this the right implementation? unclear what is best) self._start_time = float(0) # system clock time for the beginning of the race (but is modified by pause()) self._start_datetime = None # UTC time for the beginning of the race self._pause_time = float(0) # system clock time for last time we called pause() self._countdown_future = None # The Future object for the race countdown self._finalize_future = None # The Future object for the finalization countdown
class SubroutineEvents(object): def __init__(self): self.subroutine = None self.BMSevents = Events() self._offset = 0 def handleNextCommand(self, midiSheduler, ignoreUnknownCMDs=False, strict=True): cmdData = self.subroutine._parse_next_command(strict) cmdID, args = cmdData self._offset = self.subroutine.bm if cmdID in self.BMSevents._events_: pass elif not ignoreUnknownCMDs: raise RuntimeError("Cannot handle Command ID {0} with args {1}" "".format(cmdID, args)) def addEventHandler(self, ID, func): self.BMSevents.addEvent(ID, func) def addEventHandlerRange(self, start, end, func): for i in xrange(start, end): self.BMSevents.addEvent(i, func) def fillUndefinedEvents(self, start, end, func): for i in xrange(start, end): if i not in self.BMSevents._events_: self.BMSevents.addEvent(i, func) def event_handleNote(self, midiSheduler, cmdID, args, strict): note = cmdID polyID, volume = args if polyID > 0x7 and self.strict: raise RuntimeError("Invalid Polyphonic ID 0x{x:0} at offset 0x{x:1}" "".format(polyID, curr)) elif polyID > 0x7: # Well, we will skip this invalid note and hope that # everything will go well. return self.subroutine.set_polyphID(cmdID, polyID) midiSheduler.note_on(self.subroutine.current_trackID, tick, note, volume) def event_handleUnknown(self, midiSheduler, cmdID, args, strict): pass # We cannot do anything if we don't know what the piece of data does
def __init__(self, condor_module, condor_match, race_channel): self.channel = race_channel #The channel in which this race is taking place self.is_closed = False #True if room has been closed self.match = condor_match self.events = Events() self.race = None #The current race self.recorded_race = False #Whether the current race has been recorded self.entered_racers = [] #Racers that have typed .here in this channel self.before_races = True self.cancelling_racers = [] #Racers that have typed .cancel self._cm = condor_module self.command_types = [command.DefaultHelp(self), Here(self), Ready(self), Unready(self), Done(self), Undone(self), Cancel(self), #Forfeit(self), #Unforfeit(self), #Comment(self), #Igt(self), Time(self), Contest(self), ForceCancel(self), ForceChangeWinner(self), #ForceClose(self), ForceForfeit(self), #ForceForfeitAll(self), ForceRecordRace(self), ForceNewRace(self), ForceCancelRace(self), ForceRecordMatch(self), #Kick(self), ]
def __init__(self, wrapper=None, config='config.ini', path=''): config_class.__init__(self) self.bot_start = time.time() self.bot_connected = -1 self.want_connected = False self.ui_loaded = False self.def_config = config self.cfg_file = ['global.ini', config] self.path = path self.status = {'connected': False} self.wrapper = wrapper self.events = Events() self.events.add('IO', 0, 0, 'addchat', self.print_text) self.events.add('IO', 1000, 0, 'send', self.display_send) self.events.add('bot', -1, 0, 'disc', self.disced, 'connect', self.conn_clock, 'connected', self.connected, 'configure', self.edit_config, 'load_config', self.load_spec_config) self.events.add('ui', 0, 0, 'start', self.add_menus) self.load_spec_config() self.reload_connfig() self.plugins = extensions.plugins(self) self.plugins.load_classes(order=self.config['plugins']) self.events.call('bot', 'start')
def __init__(self): config_class.__init__(self) self.sockets = {} #Sockets that will be polled by an inter-bot select statement. #Functions to be called when self.sockets has received data self.bots = [] #List of bot instances self.events = Events() self.def_config = 'profiles' + sep + 'global' + sep + 'config.ini' self.cfg_file = [self.def_config] self.load_config(self.cfg_file) if ('plugins' in self.config) == False: self.config['plugins'] = {} self.plugins = extensions.plugins(self) self.plugins.load_classes('wrap', order=self.config['plugins']) self.load_bots() self.events.call('wrap', 'start') print 'Bot(s) loaded in ' + str(time.clock() - start) + ' seconds' self.recv_all()
class Raptor(config_class): def __init__(self): config_class.__init__(self) self.sockets = {} #Sockets that will be polled by an inter-bot select statement. #Functions to be called when self.sockets has received data self.bots = [] #List of bot instances self.events = Events() self.def_config = 'profiles' + sep + 'global' + sep + 'config.ini' self.cfg_file = [self.def_config] self.load_config(self.cfg_file) if ('plugins' in self.config) == False: self.config['plugins'] = {} self.plugins = extensions.plugins(self) self.plugins.load_classes('wrap', order=self.config['plugins']) self.load_bots() self.events.call('wrap', 'start') print 'Bot(s) loaded in ' + str(time.clock() - start) + ' seconds' self.recv_all() def load_bots(self): del sys.argv[0] #sys.argv likes to include script name #If an argument is supplied that ends with .txt, use that as loadlist.txt instead if sys.argv != [] and sys.argv[0].endswith('.txt'): self.list_file = sys.argv.pop(0) else: #Default loadlist.txt file self.list_file = 'profiles' + sep + 'loadlist.txt' #Rest of arguments are bot profiles to be loaded self.load_files = sys.argv #Read loadlist.txt try: load_list = open(self.list_file, 'r') except IOError: pass else: new_files = load_list.readlines() for new in new_files: self.load_files.append(new.strip(sep+' \n\r')) #If there are no profiles, load a default config profile. if self.load_files == []: self.load_files.append('config.ini') for f in self.load_files: bot_path = 'profiles' + sep #Presently all profiles are in ./profiles f = f % rep #Replace cross-platform patterns with platform-dependent equivalents at run-time paths = f.rsplit(sep, 1) #To to separate a file name from path if len(paths) > 1: bot_path += paths[0] cfg_name = paths[1] else: cfg_name = f if cfg_name.find('.') == -1: bot_path += cfg_name cfg_name = 'config.ini' cfg_name = bot_path + sep + cfg_name self.bots.append(Bot(self, cfg_name, bot_path+sep)) def recv_all(self): while True: if self.sockets == {}: time.sleep(0.01) else: avail = select(self.sockets.iterkeys(), [], [], 0.01) for recv in avail[0]: try: f = self.sockets[recv] except KeyError: pass else: f()
def __init__(self, name, config=None, account=None, market=None, worker_market=None, fee_asset_symbol=None, bitshares_instance=None, bitshares_bundle=None, *args, **kwargs): # BitShares instance self.bitshares = bitshares_instance or shared_bitshares_instance() # Dex instance used to get different fees for the market self.dex = Dex(self.bitshares) # Storage Storage.__init__(self, name) # Events Events.__init__(self) # Redirect this event to also call order placed and order matched self.onMarketUpdate += self._callbackPlaceFillOrders if config: self.config = config else: self.config = Config.get_worker_config_file(name) # Get Bitshares account and market for this worker self.account = account self.market = market self.worker_market = worker_market # Recheck flag - Tell the strategy to check for updated orders self.recheck_orders = False # Count of orders to be fetched from the API self.fetch_depth = 8 # Set fee asset fee_asset_symbol = fee_asset_symbol if fee_asset_symbol: try: self.fee_asset = Asset(fee_asset_symbol, bitshares_instance=self.bitshares) except bitshares.exceptions.AssetDoesNotExistsException: self.fee_asset = Asset('1.3.0', bitshares_instance=self.bitshares) else: # If there is no fee asset, use X4T self.fee_asset = Asset('1.3.0', bitshares_instance=self.bitshares) # CER cache self.core_exchange_rate = None # Ticker self.ticker = self.market.ticker # Settings for bitshares instance self.bitshares.bundle = bitshares_bundle # Disabled flag - this flag can be flipped to True by a worker and will be reset to False after reset only self.disabled = False # Order expiration time in seconds self.expiration = 60 * 60 * 24 * 365 * 5 # buy/sell actions will return order id by default self.returnOrderId = 'head'
export(SecretId=os.getenv('SECRET_ID')) EVENT_BUS_NAME = os.getenv('EVENT_BUS_NAME') EVENT_SOURCE = os.getenv('EVENT_SOURCE') SLACK_CLIENT_ID = os.getenv('SLACK_CLIENT_ID') SLACK_CLIENT_SECRET = os.getenv('SLACK_CLIENT_SECRET') SLACK_DISABLE_VERIFICATION = os.getenv('SLACK_DISABLE_VERIFICATION') SLACK_OAUTH_ERROR_URI = os.getenv('SLACK_OAUTH_ERROR_URI') SLACK_OAUTH_INSTALL_URI = os.getenv('SLACK_OAUTH_INSTALL_URI') SLACK_OAUTH_REDIRECT_URI = os.getenv('SLACK_OAUTH_REDIRECT_URI') SLACK_OAUTH_SUCCESS_URI = os.getenv('SLACK_OAUTH_SUCCESS_URI') SLACK_SIGNING_SECRET = os.getenv('SLACK_SIGNING_SECRET') SLACK_SIGNING_VERSION = os.getenv('SLACK_SIGNING_VERSION') SLACK_TOKEN = os.getenv('SLACK_TOKEN') events = Events(bus=EVENT_BUS_NAME, source=EVENT_SOURCE) slack = Slack( client_id=SLACK_CLIENT_ID, client_secret=SLACK_CLIENT_SECRET, oauth_error_uri=SLACK_OAUTH_ERROR_URI, oauth_install_uri=SLACK_OAUTH_INSTALL_URI, oauth_redirect_uri=SLACK_OAUTH_REDIRECT_URI, oauth_success_uri=SLACK_OAUTH_SUCCESS_URI, signing_secret=SLACK_SIGNING_SECRET, signing_version=SLACK_SIGNING_VERSION, token=SLACK_TOKEN, verify=not SLACK_DISABLE_VERIFICATION, ) states = States()
class Predicter: UP_TENDENCY ="UP" DOWN_TENDENCY = "DOWN" NO_TENDENCY = "LEVEL" def __init__(self, debugflag, readingInterval, initialPice=0): self.debugflag = debugflag self.valuesSoFar = OrderedReadingsArray() self.initialPrice = initialPice self.notDecisiveReadings = 0 self.readingInterval = readingInterval self.sellEvents = Events() self.buyEvents = Events() self.increasses = 0 self.decreases = 0 self.tendency = "" def addData(self, newValue, timeInterval): result = PredicterResult(self.readingInterval) if self.valuesSoFar.size() == 0: self.valuesSoFar.add(newValue, timeInterval) self.startGraphic() return result self.checkTendency(newValue) self.checkForMinimum(newValue, result) self.checkForMaximum(newValue, result) self.valuesSoFar.add(newValue, timeInterval) self.checkDerivative(result) self.__calculateGoodReadinngsDensity() result.readingInterval = self.readingInterval result.tendency = self.tendency self.outputData() return result def __calculateGoodReadinngsDensity(self): if self.notDecisiveReadings > 10: self.notDecisiveReadings = 0 self.readingInterval += 5 print(" ++ Reading interval increased to %d seconds"%self.readingInterval) def checkTendency(self, newValue): if newValue > self.valuesSoFar.lastAddedValue: self.increasses += 1 if newValue < self.valuesSoFar.lastAddedValue: self.decreases += 1 if self.decreases > self.increasses: self.tendency = self.DOWN_TENDENCY elif self.decreases < self.increasses: self.tendency = self.UP_TENDENCY else: self.tendency = self.NO_TENDENCY def checkForMinimum(self, newValue, result): minValue = self.valuesSoFar.getMinValue() if newValue < minValue: self.notDecisiveReadings = 0 result.comment = " ** Min Value found: %.4f"%newValue if newValue == minValue: self.sellEvents.on_change() result.comment = " * Reached bottom: %.4f"%newValue else: self.notDecisiveReadings += 1 def checkForMaximum(self, newValue, result): maxValue = self.valuesSoFar.getMaxValue() if newValue > maxValue: self.notDecisiveReadings = 0 result.comment = " ^^ Max Value found: %.4f"%newValue if newValue == maxValue: self.sellEvents.on_change() result.comment = " ^ Reached top: %.4f"%newValue else: self.notDecisiveReadings += 1 def checkOperation(self, result): result.comment2 = "I%d/D%d"%(self.increasses, self.decreases) if self.decreases - self.increasses > 5: result.operation = PredicterResult.OPERATION_SELL elif self.increasses - self.decreases > 5: result.operation = PredicterResult.OPERATION_BUY def checkDerivative(self, result): minimumDer = 0.2 result.lastDerivative = self.valuesSoFar.lastDerivative if abs(result.lastDerivative) < minimumDer and result.lastDerivative < 0: result.operation = PredicterResult.OPERATION_BUY elif abs(result.lastDerivative) < minimumDer and result.lastDerivative > 0: result.operation = PredicterResult.OPERATION_SELL def outputData(self): folder = "/home/gabriel/Materiale/Studiu/Proiecte_personale/Python/project_currency/Trading/" file = "values" + ".num" path = folder + "graphic/" + file scriptFileObject = open(path, 'w') for value in self.valuesSoFar.readings: scriptFileObject.write("%.4f\n"%value) def startGraphic(self): scriptfile = self.__writeScriptFileForGraphic() command = "gnome-terminal -x sh -c 'sh %s'" %scriptfile subprocess.Popen(command, stdin=subprocess.PIPE, shell=True) def __writeScriptFileForGraphic(self): folder = "/home/gabriel/Materiale/Studiu/Proiecte_personale/Python/project_currency/Trading/" pythonFileFolder = "/home/gabriel/Materiale/Studiu/Proiecte_personale/Python/project_currency/Utils/" file = "graph" + ".sh" path = folder + "graphic/" + file scriptFileObject = open(path, 'w') scriptFileObject.write("cd %s\n" %pythonFileFolder) scriptFileObject.write("python3.3 GraphicBuilder.py\n") scriptFileObject.write("clac") scriptFileObject.close() return path
class PrintObserver(CardObserver): """A simple card observer that is notified when cards are inserted/removed from the system and prints the list of cards """ def __init__(self): self.events = Events() self.muted_readers_names = [] self.active_cards = {} self.log = logging.getLogger(__name__) def update(self, observable, actions): # self.mute_all_readers() (addedcards, removedcards) = actions for card in addedcards: #Temporaly ractivate beep sound waiting for better feedback implementation #self.mute_reader(card.reader) # The reader has a card on it so we can try to remove the beep # Methode 1 int_id = self.get_id(card.reader) card.id = self.convert_to_hex_as_string(int_id) # print("METHODE 1 : +Inserted: {} in reader : {}".format(card.id, card.reader)) # Methode 2 readerObject = self.get_reader_by_name(card.reader) uid_str = self.get_id_with_reader(readerObject) # if uid_str != None: # print('METHODE 2 : Card id : {} in reader : {}'.format(uid_str, readerObject)) # else: # print('METHODE 2 : ERROR') self.log.info('+Inserted: {} in reader : {}'.format( card.id, card.reader)) self.active_cards[card.reader] = card # active cards dict update for card in removedcards: card.id = self.active_cards[card.reader].id self.log.info('+Removed: {} in reader : {}'.format( card.id, card.reader)) self.active_cards[card.reader] = None # active cards dict update self.events.on_change(addedcards, removedcards, self.active_cards) # Launch the event ''' Mute the readers | Remove the beep sound on card/tag connection Works only if a card is on the reader Throw an exception otherwise Two differents methods that works the same way but throw differents exceptions Once a reader is muted, the settings is live until the reader is unplugged. we store in a the muted_readers_names list of the readers already muted and launch the mute command only if useful ''' def mute_reader(self, reader_name): reader = self.get_reader_by_name(reader_name) if reader != None: if reader.name not in self.muted_readers_names: # Methode 1 # try: # self.launch_command(reader.name, cmdMap['mute']) # print('Reader {} muted!'.format(reader.name)) # self.muted_readers_names.append(reader.name) # except SystemError as err: # print(err) #Methode 2 try: connection = reader.createConnection() connection.connect() connection.transmit(cmdMap['mute']) self.muted_readers_names.append(reader.name) self.log.info('Reader {} muted!'.format(reader.name)) except NoCardException as err: print('Error : {} on reader : {}'.format(err, reader.name)) ''' First connection method The reader name is enough but this method get too much data, we need to truncate the response to get the right uid Return : the response minus the two last elements of the byte list ''' def get_id(self, reader_name): return self.launch_command(reader_name, cmdMap['getuid']) def launch_command(self, reader_name, command): try: hresult, hcontext = SCardEstablishContext(SCARD_SCOPE_USER) assert hresult == SCARD_S_SUCCESS hresult, hcard, dwActiveProtocol = SCardConnect( hcontext, reader_name, SCARD_SHARE_SHARED, SCARD_PROTOCOL_T0 | SCARD_PROTOCOL_T1) # hresult, response = SCardTransmit(hcard,dwActiveProtocol,[0xFF,0xCA,0x00,0x00,0x00]) hresult, response = SCardTransmit(hcard, dwActiveProtocol, command) return response[:len(response) - 2] except SystemError as err: print("Error in launch command : {}".format(err)) return None ''' Second connection method Pros : easier to read, no need to truncate the response Cons : need a PCSCReader as parameter instead of a name ''' def get_reader_by_name(self, reader_name): return next((x for x in readers() if x.name == reader_name), None) def get_id_with_reader(self, reader): return self.launch_command_with_reader(reader, cmdMap['getuid']) def launch_command_with_reader(self, reader, command): try: connection = reader.createConnection() connection.connect() data, sw1, sw2 = connection.transmit(command) return self.convert_to_hex_as_string(data) except NoCardException as err: print(err) return None ''' Util function used by both methods ''' def convert_to_hex_as_string(self, data): hexData = [ format(i, 'X').zfill(2) for i in data ] # we convert to hex with format and add a 0 digit if necessary return ''.join(hexData)
def __init__(self): self.alive = False self.elapsed_seconds = 0 self.elapsed_minutes = 0 self.elapsed_hours = 0 self.events = Events(('on_second', 'on_minute', 'on_hour'))
class BatchFileController: def __init__(self): self._source_1_list = [] self._source_2_list = [] self.events = Events() self._tracks_1 = [] self._tracks_2 = [] self._user_settings_1 = [] self._user_settings_2 = [] self.output_directory = None self.events.on_batch_list_update += self.generate_item @property def source_1_list(self): return self._source_1_list @source_1_list.setter def source_1_list(self, file_list): self._source_1_list = file_list self.events.on_batch_list_update(file_list, 1) @property def source_2_list(self): return self._source_2_list @source_2_list.setter def source_2_list(self, file_list): self._source_2_list = file_list self.events.on_batch_list_update(file_list, 2) @property def user_setting_1(self): return self._user_settings_1 @property def user_setting_2(self): return self._user_settings_2 def generate_item(self, file_list, list_num: int): if list == 1: self._source_1_list.extend(file_list) elif list == 2: self._source_2_list.extend(file_list) for file in file_list: item = QListWidgetItem(file) self.events.batch_file_item_generated(item, list_num) def generate_track_item(self, ref_num: int): """ Generate the track list for the two input directories with the first file in the list :return: """ if len(self._source_1_list) != 0 and ref_num == 1: self._tracks_1 = PymkvWrapper.process_file(self._source_1_list[0]) #TODO turn item generate into GUI helper for track in self._tracks_1: track_name = "N/A" track_type = track._track_type if track.track_name is not None: track_name = track.track_name track_item_1 = QListWidgetItem("Track Name: " + track_name + " | " + "Type: " + track_type) track_item_1.setFlags( track_item_1.flags() | PySide2.QtCore.Qt.ItemFlag.ItemIsUserCheckable) track_item_1.setFlags( track_item_1.flags() | PySide2.QtCore.Qt.ItemFlag.ItemIsEnabled) track_item_1.setCheckState( PySide2.QtCore.Qt.CheckState.Unchecked) self.events.ref_tracks_generated(self._source_1_list[0], track_item_1, 1) if len(self._source_2_list) != 0 and ref_num == 2: self._tracks_2 = PymkvWrapper.process_file(self._source_2_list[0]) # TODO turn item generate into GUI helper for track in self._tracks_2: track_name = "N/A" track_type = track._track_type if track.track_name is not None: track_name = track.track_name track_item_2 = QListWidgetItem("Track Name: " + track_name + " | " + "Type: " + track_type) track_item_2.setFlags( track_item_2.flags() | PySide2.QtCore.Qt.ItemFlag.ItemIsUserCheckable) track_item_2.setFlags( track_item_2.flags() | PySide2.QtCore.Qt.ItemFlag.ItemIsEnabled) track_item_2.setCheckState( PySide2.QtCore.Qt.CheckState.Unchecked) self.events.ref_tracks_generated(self._source_2_list[0], track_item_2, 2) def gather_all_selected_track(self, list_1: QListWidget, list_2: QListWidget): self._user_settings_1 = [] self._user_settings_2 = [] logger.debug("Gathering selected tracks") for index in range(list_1.count()): item = list_1.item(index) if item.checkState() == PySide2.QtCore.Qt.Checked: self._user_settings_1.append(index) logger.debug("Reference 1 Tracks : " + str(self._user_settings_1).strip('[]')) for index in range(list_2.count()): item = list_2.item(index) if item.checkState() == PySide2.QtCore.Qt.Checked: self._user_settings_2.append(index) logger.debug("Reference 2 Tracks : " + str(self._user_settings_2).strip('[]')) def batch_mux_files(self, ): batch_mux_thread = threading.Thread( PymkvWrapper.batch_mux_files(self.source_1_list, self.source_1_list, self.user_setting_1, self.user_setting_2, self.output_directory)) batch_mux_thread.start() def clear_all(self): self._source_1_list = [] self._source_2_list = [] self._tracks_1 = [] self._tracks_2 = [] self._user_settings_1 = [] self._user_settings_2 = [] self.events.clear_all()
def update(self): """ Check events """ if Events().key_pressed: self.status.show_hs = False
class Xevents(Plugin): def __init__(self, parent): Plugin.__init__(self) self.init_gconf() self._parent = parent self.running_sugar = self._parent.running_sugar self._status = True self.pause = 0 self._events = Events() self._buttons = { } #previous values from buttons {key:[value, lastDebounceTime]} self._last_event = 0 self._program_name = '' self._defaults = {} # local default values for conf keys self._last_button_state = 0 def setPause(self): self.pause = True def unsetPause(self): self.pause = False def getPause(self): return self.pause def setup(self): # set up X11 events specific blocks global CONSTANTS CONSTANTS['xe_left_click'] = 1 CONSTANTS['xe_right_click'] = 3 CONSTANTS['xe_scroll_up'] = 4 CONSTANTS['xe_scroll_down'] = 5 CONSTANTS['TRUE'] = True CONSTANTS['FALSE'] = False CONSTANTS['xe_buffer_size'] = 30 CONSTANTS['xe_ctrl'] = "xe_ctrl" CONSTANTS['xe_shift'] = "xe_shift" CONSTANTS['xe_alt'] = "xe_alt" CONSTANTS['xe_alt_gr'] = "xe_alt_gr" CONSTANTS['xe_left_arrow'] = "xe_left_arrow" CONSTANTS['xe_right_arrow'] = "xe_right_arrow" CONSTANTS['xe_up_arrow'] = "xe_up_arrow" CONSTANTS['xe_down_arrow'] = "xe_down_arrow" CONSTANTS['xe_f4'] = "xe_f4" CONSTANTS['xe_f5'] = "xe_f5" CONSTANTS['xe_spacebar'] = "xe_spacebar" CONSTANTS['xe_tab'] = "xe_tab" CONSTANTS['xe_return'] = "xe_return" CONSTANTS['xe_escape'] = "xe_escape" CONSTANTS['xe_enter'] = "xe_enter" global MACROS '''MACROS['setLineColorRGBmacro'] = [[0, 'setLineColorRGB', 0, 0, [None, 1, 2, 3, None]], [1, ['number', 0], 0, 0, [0, None]], [2, ['number', 0], 0, 0, [0, None]], [3, ['number', 0], 0, 0, [0, None]] ] ''' MACROS['setLineWidthAndHeightmacro'] = [[ 0, 'setLineWidthAndHeight', 0, 0, [None, 1, 2, None] ], [1, ['number', 0], 0, 0, [0, None]], [2, ['number', 0], 0, 0, [0, None]]] palette = make_palette('xlib-bots', colors=["#FF6060", "#A06060"], help_string=_('Palette of X11 event blocks')) # Extra palette palette2 = make_palette( 'xlib-bots-extra', colors=["#FF6060", "#A06060"], help_string=_('Palette of X11 extra event blocks')) palette.add_block( 'setX11mouse', style='basic-style-2arg', label=_('setXY'), value_block=True, default=[0, 0], help_string=_('set the mouse pointer to x y coordinates'), prim_name='set_x11_mouse') self._parent.lc.def_prim( 'set_x11_mouse', 2, Primitive(self.set_x11_mouse, arg_descs=[ArgSlot(TYPE_NUMBER), ArgSlot(TYPE_NUMBER)])) palette.add_block('getX11mouseX', style='box-style', label=_('getMouseX'), value_block=True, help_string=_('get the mouse pointer x coordinate'), prim_name='get_x11_mouse_x') self._parent.lc.def_prim('get_x11_mouse_x', 0, Primitive(self.get_x11_mouse_x, TYPE_INT)) palette.add_block('getX11mouseY', style='box-style', label=_('getMouseY'), value_block=True, help_string=_('get the mouse pointer y coordinate'), prim_name='get_x11_mouse_y') self._parent.lc.def_prim('get_x11_mouse_y', 0, Primitive(self.get_x11_mouse_y, TYPE_INT)) palette.add_block('getScreenWidth', style='box-style', label=_('getScreenWidth'), value_block=True, help_string=_('get the screen width'), prim_name='get_screen_width') self._parent.lc.def_prim('get_screen_width', 0, Primitive(self.get_screen_width, TYPE_INT)) palette.add_block('getScreenHeight', style='box-style', label=_('getScreenHeight'), value_block=True, help_string=_('get the screen height'), prim_name='get_screen_height') self._parent.lc.def_prim('get_screen_height', 0, Primitive(self.get_screen_height, TYPE_INT)) palette.add_block('click', style='basic-style-1arg', label=_('click'), value_block=True, default=[1], help_string=_('simulate a mouse click'), prim_name='click') self._parent.lc.def_prim( 'click', 1, Primitive(self.click, arg_descs=[ArgSlot(TYPE_NUMBER)])) palette.add_block('doubleClick', style='basic-style-1arg', label=_('double click'), value_block=True, default=[1], help_string=_('simulate a mouse double click'), prim_name='double_click') self._parent.lc.def_prim( 'double_click', 1, Primitive(self.double_click, arg_descs=[ArgSlot(TYPE_NUMBER)])) palette.add_block('pressButton', style='basic-style-1arg', label=_('pressButton'), value_block=True, default=[0], help_string=_('keeps button pressed'), prim_name='press_button') self._parent.lc.def_prim( 'press_button', 1, Primitive(self.press_button, arg_descs=[ArgSlot(TYPE_NUMBER)])) palette.add_block('releaseButton', style='basic-style-1arg', label=_('releaseButton'), value_block=True, default=[0], help_string=_('releases button'), prim_name='release_button') self._parent.lc.def_prim( 'release_button', 1, Primitive(self.release_button, arg_descs=[ArgSlot(TYPE_NUMBER)])) palette.add_block('leftClick', style='box-style', label=_('leftClick'), value_block=True, help_string=_('click left click'), prim_name='left_click') self._parent.lc.def_prim( 'left_click', 0, Primitive(CONSTANTS.get, TYPE_INT, [ConstantArg('xe_left_click')])) palette.add_block('rightClick', style='box-style', label=_('rightClick'), value_block=True, help_string=_('click right click'), prim_name='right_click') self._parent.lc.def_prim( 'right_click', 0, Primitive(CONSTANTS.get, TYPE_INT, [ConstantArg('xe_right_click')])) palette.add_block('scrollUp', style='box-style', label=_('scrollUp'), value_block=True, help_string=_('simulate mouse scroll up event'), prim_name='scroll_up') self._parent.lc.def_prim( 'scroll_up', 0, Primitive(CONSTANTS.get, TYPE_INT, [ConstantArg('xe_scroll_up')])) palette.add_block('scrollDown', style='box-style', label=_('scrollDown'), value_block=True, help_string=_('simulate mouse scroll down event'), prim_name='scroll_down') self._parent.lc.def_prim( 'scroll_down', 0, Primitive(CONSTANTS.get, TYPE_INT, [ConstantArg('xe_scroll_down')])) palette.add_block('freeze', style='basic-style', label=_('freezeBar'), value_block=True, help_string=_('freeze the bar'), prim_name='freeze') self._parent.lc.def_prim('freeze', 0, Primitive(self.setPause)) palette.add_block('unfreeze', style='basic-style', label=_('unfreezeBar'), value_block=True, help_string=_('unfreeze the bar'), prim_name='unfreeze') self._parent.lc.def_prim('unfreeze', 0, Primitive(self.unsetPause)) palette.add_block('showLine', style='basic-style', label=_('showLine'), value_block=True, help_string=_('show vertical line over mouse'), prim_name='show_line') self._parent.lc.def_prim('show_line', 0, Primitive(self.show_line)) palette.add_block('hideLine', style='basic-style', label=_('hideLine'), value_block=True, help_string=_('hide vertical line over mouse'), prim_name='hide_line') self._parent.lc.def_prim('hide_line', 0, Primitive(self.hide_line)) ''' palette.add_block('setLineColorRGB', hidden=True, style='basic-style-3arg', label=_('setLineColorRGB'), value_block=True, default=[0, 0, 0], help_string=_('set line color from rgb value'), prim_name='set_line_color_rgb') self._parent.lc.def_prim( 'set_line_color_rgb', 3, Primitive(self.set_line_color_rgb, arg_descs=[ArgSlot(TYPE_INT), ArgSlot(TYPE_INT), ArgSlot(TYPE_INT)])) palette.add_block('setLineColorRGBmacro', style='basic-style-extended-vertical', label=_('setLineColorRGB'), help_string=_('set line color from rgb value')) ''' palette.add_block('setLineColor', style='basic-style-1arg', label=_('setLineColor'), value_block=True, help_string=_('set line color'), prim_name='set_line_color') self._parent.lc.def_prim( 'set_line_color', 1, Primitive(self.set_line_color, arg_descs=[ArgSlot(TYPE_COLOR)])) palette.add_block('setLineOpacity', style='basic-style-1arg', label=_('setLineOpacity'), value_block=True, default=[1], help_string=_('set line opacity'), prim_name='set_line_opacity') self._parent.lc.def_prim( 'set_line_opacity', 1, Primitive(self.set_line_opacity, arg_descs=[ArgSlot(TYPE_NUMBER)])) palette.add_block( 'setLineWidthAndHeight', hidden=True, style='basic-style-2arg', label=_('setLineWidthAndHeight'), value_block=True, default=[0, 0], help_string=_('set width and height of line over mouse'), prim_name='set_line_width_and_height') self._parent.lc.def_prim( 'set_line_width_and_height', 2, Primitive(self.set_line_width_and_height, arg_descs=[ArgSlot(TYPE_NUMBER), ArgSlot(TYPE_NUMBER)])) palette.add_block( 'setLineWidthAndHeightmacro', style='basic-style-extended-vertical', label=_('setLineWidthAndHeight'), help_string=_('set width and height of line over mouse')) palette2.add_block('simulateKey', style='basic-style-1arg', label=_('simulateKey'), help_string=_('simulates pressing a key'), prim_name='simulate_key') self._parent.lc.def_prim( 'simulate_key', 1, Primitive(self.simulate_key, arg_descs=[ArgSlot(TYPE_STRING)])) palette2.add_block('spaceBar', style='box-style', label=_('spaceBar'), value_block=True, help_string=_('space bar'), prim_name='spacebar') self._parent.lc.def_prim( 'spacebar', 0, Primitive(CONSTANTS.get, TYPE_STRING, [ConstantArg('xe_spacebar')])) palette2.add_block('leftArrow', style='box-style', label=_('leftArrow'), value_block=True, help_string=_('left arrow'), prim_name='left_arrow') self._parent.lc.def_prim( 'left_arrow', 0, Primitive(CONSTANTS.get, TYPE_STRING, [ConstantArg('xe_left_arrow')])) palette2.add_block('rightArrow', style='box-style', label=_('rightArrow'), value_block=True, help_string=_('right arrow'), prim_name='right_arrow') self._parent.lc.def_prim( 'right_arrow', 0, Primitive(CONSTANTS.get, TYPE_STRING, [ConstantArg('xe_right_arrow')])) palette2.add_block('upArrow', style='box-style', label=_('upArrow'), value_block=True, help_string=_('up arrow'), prim_name='up_arrow') self._parent.lc.def_prim( 'up_arrow', 0, Primitive(CONSTANTS.get, TYPE_STRING, [ConstantArg('xe_up_arrow')])) palette2.add_block('downArrow', style='box-style', label=_('downArrow'), value_block=True, help_string=_('down arrow'), prim_name='down_arrow') self._parent.lc.def_prim( 'down_arrow', 0, Primitive(CONSTANTS.get, TYPE_STRING, [ConstantArg('xe_down_arrow')])) palette2.add_block('CtrlKey', style='box-style', label=_('ctrlKey'), value_block=True, help_string=_('ctrl key'), prim_name='ctrl_key') self._parent.lc.def_prim( 'ctrl_key', 0, Primitive(CONSTANTS.get, TYPE_STRING, [ConstantArg('xe_ctrl')])) palette2.add_block('ShiftKey', style='box-style', label=_('shiftKey'), value_block=True, help_string=_('shift key'), prim_name='shift_key') self._parent.lc.def_prim( 'shift_key', 0, Primitive(CONSTANTS.get, TYPE_STRING, [ConstantArg('xe_shift')])) palette2.add_block('AltKey', style='box-style', label=_('altKey'), value_block=True, help_string=_('alt key'), prim_name='alt_key') self._parent.lc.def_prim( 'alt_key', 0, Primitive(CONSTANTS.get, TYPE_STRING, [ConstantArg('xe_alt')])) ''' palette2.add_block('AltGrKey', style='box-style', label=_('altGrKey'), value_block=True, help_string=_('alt gr key'), prim_name='altgr_key') self._parent.lc.def_prim( 'altgr_key', 0, Primitive(CONSTANTS.get, TYPE_STRING, [ConstantArg('xe_alt_gr')])) ''' palette2.add_block('tabKey', style='box-style', label=_('tabKey'), value_block=True, help_string=_('tab key'), prim_name='tab_key') self._parent.lc.def_prim( 'tab_key', 0, Primitive(CONSTANTS.get, TYPE_STRING, [ConstantArg('xe_tab')])) palette2.add_block('returnKey', style='box-style', label=_('returnKey'), value_block=True, help_string=_('return key'), prim_name='return_key') self._parent.lc.def_prim( 'return_key', 0, Primitive(CONSTANTS.get, TYPE_STRING, [ConstantArg('xe_return')])) palette2.add_block('escapeKey', style='box-style', label=_('escapeKey'), value_block=True, help_string=_('escape key'), prim_name='escape_key') self._parent.lc.def_prim( 'escape_key', 0, Primitive(CONSTANTS.get, TYPE_STRING, [ConstantArg('xe_escape')])) palette2.add_block('enterKey', style='box-style', label=_('enterKey'), value_block=True, help_string=_('enter key'), prim_name='enter_key') self._parent.lc.def_prim( 'enter_key', 0, Primitive(CONSTANTS.get, TYPE_STRING, [ConstantArg('xe_enter')])) palette2.add_block('f4Key', style='box-style', label=_('f4Key'), value_block=True, help_string=_('f4 key'), prim_name='f4_key') self._parent.lc.def_prim( 'f4_key', 0, Primitive(CONSTANTS.get, TYPE_STRING, [ConstantArg('xe_f4')])) palette2.add_block('f5Key', style='box-style', label=_('f5Key'), value_block=True, help_string=_('f5 key'), prim_name='f5_key') self._parent.lc.def_prim( 'f5_key', 0, Primitive(CONSTANTS.get, TYPE_STRING, [ConstantArg('xe_f5')])) palette2.add_block('combineKeys', style='number-style-block', label=[_('combine'), _('key1'), _('key2')], help_string=_('Combines two keys. e.g : ctrl + c'), prim_name='combine_keys') self._parent.lc.def_prim( 'combine_keys', 2, Primitive(self.combine_keys, TYPE_STRING, arg_descs=[ArgSlot(TYPE_STRING), ArgSlot(TYPE_STRING)])) palette2.add_block( 'debounce', style='number-style-block', label=[_('debounce'), _('name'), _('button')], default=["name"], help_string=_('Debouncing - The name must be unique'), prim_name='debounce') self._parent.lc.def_prim( 'debounce', 2, Primitive(self.debounce, arg_descs=[ArgSlot(TYPE_STRING), ArgSlot(TYPE_NUMBER)])) palette2.add_block( 'edgeDetector', style='number-style-block', label=[_('edge detector'), _('name'), _('button')], default=["name"], help_string=_('Edge Detector - The name must be unique'), prim_name='edge_detector') self._parent.lc.def_prim( 'edge_detector', 2, Primitive(self.edge_detector, arg_descs=[ArgSlot(TYPE_STRING), ArgSlot(TYPE_NUMBER)])) palette2.add_block('openBrowser', style='basic-style-1arg', label=_('openBrowser'), default=[_("http://www.example.com")], help_string=_('Simulates opening a web browser'), prim_name='browser') self._parent.lc.def_prim( 'browser', 1, Primitive(self.browser, arg_descs=[ArgSlot(TYPE_STRING)])) palette2.add_block('openProgram', style='basic-style-1arg', label=_("openProgram"), default=[_("name")], help_string=_('Opens a program'), prim_name='open_program') self._parent.lc.def_prim( 'open_program', 1, Primitive(self.open_program, arg_descs=[ArgSlot(TYPE_STRING)])) palette2.add_block('closeProgram', style='basic-style-1arg', label=_("closeProgram"), default=[_("name")], help_string=_('close a program'), prim_name='close_program') self._parent.lc.def_prim( 'close_program', 1, Primitive(self.close_program, arg_descs=[ArgSlot(TYPE_STRING)])) palette2.add_block('minimizeWindow', style='basic-style', label=_('minimizeWindow'), value_block=True, help_string=_('minimize the window'), prim_name='minimize_window') self._parent.lc.def_prim('minimize_window', 0, Primitive(self.minimize_window)) palette2.add_block( 'saveValue', style='basic-style-2arg', label=[_('saveValue'), _('key'), _('value')], default=["key"], help_string=_('save value - The key must be unique'), prim_name='save_value') self._parent.lc.def_prim( 'save_value', 2, Primitive(self.save_value, arg_descs=[ArgSlot(TYPE_STRING), ArgSlot(TYPE_NUMBER)])) palette2.add_block( 'getValue', style='number-style-1arg', label=_("getValue"), default=[_("key")], help_string=_('get a value saved with save value block'), prim_name='get_value') self._parent.lc.def_prim( 'get_value', 1, Primitive(self.get_value, arg_descs=[ArgSlot(TYPE_STRING)])) palette2.add_block( 'defaultValue', style='basic-style-2arg', label=[_('defaultValue'), _('key'), _('value')], default=["key"], help_string=_('default value - The key must be unique'), prim_name='default_value') self._parent.lc.def_prim( 'default_value', 2, #save a color Primitive( self.default_value, arg_descs=or_([ArgSlot(TYPE_STRING), ArgSlot(TYPE_COLOR)], [ArgSlot(TYPE_STRING), ArgSlot(TYPE_NUMBER)], [ArgSlot(TYPE_STRING), ArgSlot(TYPE_STRING)]))) # or_(Primitive(self.default_value, # arg_descs=[ArgSlot(TYPE_STRING), # ArgSlot(TYPE_COLOR)]), # # ... or save a number # Primitive(self.default_value, # arg_descs=[ArgSlot(TYPE_STRING), # ArgSlot(TYPE_NUMBER)]), # # ... or save a string # Primitive(self.default_value, # arg_descs=[ArgSlot(TYPE_STRING), # ArgSlot(TYPE_STRING)]) )) palette2.add_block('setProgramName', style='basic-style-1arg', label=_("setProgramName"), default=[_("my program")], help_string=_('name this program'), prim_name='set_program_name') self._parent.lc.def_prim( 'set_program_name', 1, Primitive(self.set_program_name, arg_descs=[ArgSlot(TYPE_STRING)])) ############################# Turtle calls ################################ def start(self): pass def stop(self): self._events.show_line(False) def quit(self): pass ################################# Primitives ############################## def set_x11_mouse(self, x, y): self._events.create_absolute_mouse_event(int(x), int(y), self.getPause()) def get_x11_mouse_x(self): return self._events.get_mouse_position()[0] def get_x11_mouse_y(self): return self._events.get_mouse_position()[1] def get_screen_width(self): return self._events.get_screen_resolution()[0] def get_screen_height(self): return self._events.get_screen_resolution()[1] def click(self, button): self._events.click_button(button) def double_click(self, button): self._events.double_click_button(button) def press_button(self, button): self._events.press_button(button) def release_button(self, button): self._events.release_button(button) def show_line(self): self._events.show_line(True) def hide_line(self): self._events.show_line(False) def set_line_color(self, color_name): self._events.set_line_color(color_name) def set_line_opacity(self, opacity): self._events.set_line_opacity(opacity) def set_line_color_rgb(self, red, green, blue): self._events.set_line_color_rgb(red, green, blue) def set_line_width_and_height(self, width, height): self._events.set_line_width_and_height(width, height) def simulate_key(self, key): self._events.simulate_key(key) def browser(self, url): self._events.browser(url) def combine_keys(self, key1, key2): return key1 + " " + key2 def _listMode(self, l): data = Counter(l) if len(data) > 0: data.most_common() # Returns all unique items and their counts return data.most_common(1)[0][ 0] # Returns the highest occurring item else: return 0 def debounce(self, buttonName, buttonState): current_time = int(round(time.time() * 1000)) self._last_event = current_time #deboucing - recolectar lecturas en cierto tiempo y evaluar la cantidad de 0 y 1's #self.buttons -> key:[] if not self._buttons.has_key(buttonName): self._buttons[buttonName] = [] self._buttons[buttonName].append(buttonState) #Keep the buffer at xe_buffer_size if len(self._buttons[buttonName]) > CONSTANTS['xe_buffer_size']: self._buttons[buttonName].pop(0) if self._listMode(self._buttons[buttonName]) == 1: return 1 else: return 0 def edge_detector(self, buttonName, buttonState): falling_edge = 0 rising_edge = 0 ''' if not self._buttons.has_key(buttonName): self._buttons[buttonName] = [] self._buttons[buttonName].append(0) if (buttonState != self._buttons[buttonName][-1]): #falling edge if (buttonState == 0): falling_edge = 1 #rising edge else: rising_edge = 1 self._buttons[buttonName].append(buttonState) #Keep the buffer at xe_buffer_size while len(self._buttons[buttonName]) > CONSTANTS['xe_buffer_size']: self._buttons[buttonName].pop(0) ''' if (buttonState != self._last_button_state): #falling edge if (buttonState == 0): falling_edge = 1 #rising edge else: rising_edge = 1 self._last_button_state = buttonState #return falling_edge return rising_edge def open_program(self, program): self._events.open_program(program) def close_program(self, program): self._events.close_program(program) def init_gconf(self): try: self.gconf_client = gconf.client_get_default() except Exception, err: debug_output(_('ERROR: cannot init GCONF client: %s') % err) self.gconf_client = None
class BittrexSocket(WebSocket): def __init__(self): # Event handlers self.updateSummaryState = Events() self.updateSummaryState.on_change += self.on_ticker_update self.orderbook_callback = Events() self.orderbook_callback.on_change += self.on_orderbook self.orderbook_update = Events() self.orderbook_update.on_change += self.on_orderbook_update self.trades = Events() self.trades.on_change += self.on_trades # Queues self.control_queue = queue.Queue() self.order_queue = None # Other self.connections = {} self.order_books = {} self.threads = {} self.url = [ 'https://socket-stage.bittrex.com/signalr', 'https://socket.bittrex.com/signalr', 'https://socket-beta.bittrex.com/signalr' ] self.tickers = Ticker() self.max_tickers_per_conn = 20 self._start_main_thread() # =========================== # Main Thread Private Methods # =========================== def _start_main_thread(self): """ The websocket clients starts a separate thread upon initialization with further subthreads for each connection. """ thread = Thread(target=self._start_socket_control_queue) thread.daemon = True self.threads[thread.getName()] = thread thread.start() # --------------------- # Control Queue Methods # --------------------- def _start_socket_control_queue(self): """ Handles the communication with Bittrex, namely starting/closing a connection and un/subscribing to channels It stays idle until a command is send to it. """ while True: try: control_event = self.control_queue.get() except queue.Empty: pass else: if control_event is not None: if control_event.type == 'CONNECT': self._handle_connect(control_event) elif control_event.type == 'DISCONNECT': if self._handle_disconnect(control_event): self.on_close() break elif control_event.type == 'SUBSCRIBE': self._handle_subscribe(control_event) elif control_event.type == 'SUBSCRIBE_INTERNAL': self._handle_subscribe_internal(control_event) elif control_event.type == 'UNSUBSCRIBE': self._handle_unsubscribe(control_event) elif control_event.type == 'SNAPSHOT': self._handle_get_snapshot(control_event) self.control_queue.task_done() def _handle_connect(self, conn_event): """ Prepares and starts new thread for a new Bittrex connection. :param conn_event: Contains the connection object. :type conn_event: ConnectEvent """ thread = Thread(target=self._init_connection, args=(conn_event.conn_obj, )) self.threads[thread.getName()] = thread conn_event.conn_obj.assign_thread(thread.getName()) self.connections.update({conn_event.conn_obj.id: conn_event.conn_obj}) thread.start() def _init_connection(self, conn_obj): """ Initiates the Bittrex connection and assigns event handlers. :param conn_obj: The Bittrex connection object :type conn_obj: BittrexConnection """ conn, corehub = conn_obj.conn, conn_obj.corehub for url in self.url: try: logger.info( 'Trying to establish connection to Bittrex through {}.'. format(url)) conn.url = url conn.start() conn_obj.activate() logger.info( 'Connection to Bittrex established successfully through {}.' .format(url)) # Add handlers corehub.client.on('updateExchangeState', self._on_tick_update) corehub.client.on('updateSummaryState', self._on_ticker_update) conn.wait(120000) # When we purposely close the connection, the script will exit conn.wait() # so we need to inform the script that it should not try to reconnect. if conn_obj.close_me is True: return except HTTPError: logger.info( 'Failed to establish connection through {}'.format(url)) except MissingSchema: logger.info('Invalid URL: {}'.format(url)) else: logger.info( 'Failed to establish connection to Bittrex through all supplied URLS. Closing the socket' ) return def _handle_disconnect(self, disconn_event): """ Handles the event of disconnecting connections. :type disconn_event: The disconnect event. :type disconn_event: DisconnectEvent :return: True = all connections have to be closed; False = specific connection has to be closed. :rtype: bool """ # Find whether we are closing all connections or just one. if disconn_event.conn_object is None: conns = self.connections.values() msg = 'Sending a close signal to all connections.' flag = True else: conns = disconn_event.conn_object msg = 'Sending a close signal to connection {}'.format(conns[0].id) flag = False # Closing a connection from an external error results in an error. # We set a close_me flag so that next time when a message is received # from the thread that started the connection, a method will be called to close it. for conn in conns: logger.info(msg) conn.close() while conn.state: sleep(0.5) logger.info('Connection {} has been successfully closed.'.format( conn.id)) return flag def _handle_subscribe(self, sub_event): """ Handles the event of subscribing a specific ticker to a specific subscription type. :param sub_event: The ticker subscription event :type sub_event: SubscribeEvent :return: False = Subscribing failed because the connection is not active. :rtype: bool """ conn = sub_event.conn_object server_callback = sub_event.server_callback server_callback_no_payload = sub_event.server_callback_no_payload tickers = sub_event.tickers sub_type = sub_event.sub_type timeout = 0 while conn.state is False: sleep(0.2) timeout += 1 if timeout >= 100: logger.info( 'Failed to subscribe [{}][{}] from connection {} after 20 seconds. ' 'The connection is probably down.'.format( sub_type, tickers, conn.id)) return else: self.tickers.enable(tickers, sub_type, conn.id) try: if server_callback is not None: conn.set_callback_state(CALLBACK_EXCHANGE_DELTAS, CALLBACK_STATE_ON) for cb in server_callback: for ticker in tickers: conn.corehub.server.invoke(cb, ticker) if self.tickers.get_sub_state( ticker, SUB_TYPE_ORDERBOOK) is True: self._get_snapshot([ticker]) conn.increment_ticker() if server_callback_no_payload is not None: if tickers == ALL_TICKERS: state = CALLBACK_STATE_ON_ALLTICKERS else: state = CALLBACK_STATE_ON conn.set_callback_state(CALLBACK_SUMMARY_DELTAS, state) for cb in server_callback_no_payload: conn.corehub.server.invoke(cb) except Exception as e: print(e) print('Failed to subscribe') def _handle_subscribe_internal(self, sub_event): """ If the ticker is already in the tickers list and it shares a subscription callback for the new subscription request, then we don't have to invoke a request to Bittrex but only enable the subscription state internally, because the messages are received but are filtered. :param sub_event: The internal subscribe event. :type sub_event: SubscribeInternalEvent """ tickers = sub_event.tickers conn = sub_event.conn_object sub_type = sub_event.sub_type self.tickers.enable(tickers, sub_type, conn.id) def _handle_unsubscribe(self, unsub_event): """ Handles the event of revoking a specific active subscription for a specific ticker. Also if no active subscriptions remain, the websocket client is closed. :param unsub_event: The ticker unsubscribe event. :type unsub_event: UnsubscribeEvent """ ticker, sub_type, conn_id = unsub_event.ticker, unsub_event.sub_type, unsub_event.conn_id self.tickers.disable(ticker, sub_type, conn_id) self._is_no_subs_active() def _handle_get_snapshot(self, snapshot_event): """ Requests an order book snapshot request from Bittrex. :param snapshot_event: The ticker snapshot event. :type snapshot_event: SnapshotEvent """ conn, ticker = snapshot_event.conn_object, snapshot_event.ticker method = 'queryExchangeState' # Wait for the connection to start successfully and record N nounces of data while conn.state is False or self.tickers.get_nounces(ticker) < 5: sleep(0.1) else: try: logger.info('[Subscription][{}][{}]: Order book snapshot ' 'requested.'.format(SUB_TYPE_ORDERBOOK, ticker)) conn.corehub.server.invoke(method, ticker) self.tickers.set_snapshot_state(ticker, SNAPSHOT_SENT) except Exception as e: print(e) print('Failed to invoke snapshot query') while self.tickers.get_snapshot_state(ticker) is not SNAPSHOT_ON: sleep(0.5) def _is_first_run(self, tickers, sub_type): # Check if the websocket has been initiated already or if it's the first run. if not self.tickers.list: self.on_open() self._subscribe_first_run(tickers, sub_type) else: return False def _subscribe_first_run(self, tickers, sub_type, objects=None): if objects is None: objects = self._create_btrx_connection(tickers) for obj in objects: self.control_queue.put(ConnectEvent(obj[1])) self.control_queue.put(SubscribeEvent(obj[0], obj[1], sub_type)) def _unsubscribe(self, tickers, sub_type): for ticker in tickers: event = UnsubscribeEvent(ticker, self.tickers, sub_type) self.control_queue.put(event) def _get_snapshot(self, tickers): for ticker_name in tickers: # ticker_object = self.tickers.list[ticker_name] conn_id = self.tickers.get_sub_type_conn_id( ticker_name, SUB_TYPE_ORDERBOOK) # Due to multithreading the connection might not be added to the connection list yet while True: try: conn = self.connections[conn_id] except KeyError: sleep(0.5) conn_id = self.tickers.get_sub_type_conn_id( ticker_name, SUB_TYPE_ORDERBOOK) else: break self.control_queue.put(SnapshotEvent(ticker_name, conn)) def _is_order_queue(self): if self.order_queue is None: self.order_queue = queue.Queue() thread = Thread(target=self._start_order_queue) thread.daemon = True self.threads[thread.getName()] = thread thread.start() def _start_order_queue(self): while True: try: order_event = self.order_queue.get() except queue.Empty: pass else: if order_event is not None: ticker = order_event['MarketName'] snapshot_state = self.tickers.get_snapshot_state(ticker) if snapshot_state in [SNAPSHOT_OFF, SNAPSHOT_SENT]: self._init_backorder_queue(ticker, order_event) elif snapshot_state == SNAPSHOT_RCVD: if self._transfer_backorder_queue(ticker): self.tickers.set_snapshot_state( ticker, SNAPSHOT_ON) if self.tickers.get_snapshot_state(ticker) == SNAPSHOT_ON: self._sync_order_book(ticker, order_event) self.orderbook_callback.on_change( self.order_books[ticker]) self.order_queue.task_done() def _is_running(self, tickers, sub_type): # Check for existing connections if self.connections: # Check for already existing tickers and enable the subscription before opening a new connection. for ticker in tickers: if self.tickers.get_sub_state(ticker, sub_type) is SUB_STATE_ON: logger.info( '{} subscription is already enabled for {}. Ignoring...' .format(sub_type, ticker)) else: # Assign most suitable connection events = self._assign_conn(ticker, sub_type) for event in events: self.control_queue.put(event) def _assign_conn(self, ticker, sub_type): while self.control_queue.unfinished_tasks > 0: sleep(0.2) conns = self.tickers.sort_by_callbacks() d = {} if sub_type == SUB_TYPE_TICKERUPDATE: # Check for connection with enabled 'CALLBACK_SUMMARY_DELTAS' for conn in conns.keys(): if CALLBACK_SUMMARY_DELTAS in conns[conn]: d.update({ conns[conn]['{} count'.format(CALLBACK_EXCHANGE_DELTAS)]: conn }) # and get the connection with the lowest number of tickers. if d: min_tickers = min(d.keys()) conn_id = d[min_tickers] return [ SubscribeInternalEvent(ticker, self.connections[conn_id], sub_type) ] # No connection found with 'CALLBACK_SUMMARY_DELTAS'. # Get the connection with the lowest number of tickers. else: for conn in conns.keys(): d.update({ conns[conn]['{} count'.format(CALLBACK_EXCHANGE_DELTAS)]: conn }) min_tickers = min(d.keys()) conn_id = d[min_tickers] return [ SubscribeEvent(ticker, self.connections[conn_id], sub_type) ] else: # If 'EXCHANGE_DELTAS' is enabled for the ticker # and the specific connection, we just need to find the # connection and enable the subscription state in order # to stop filtering the messages. for conn in conns.keys(): try: if ticker in conns[conn][CALLBACK_EXCHANGE_DELTAS]: return [ SubscribeInternalEvent(ticker, self.connections[conn], sub_type) ] except KeyError: break # If there is no active subscription for the ticker, # check if there is enough quota and add the subscription to # an existing connection. for conn in conns.keys(): d.update({ conns[conn]['{} count'.format(CALLBACK_EXCHANGE_DELTAS)]: conn }) min_tickers = min(d.keys()) if min_tickers < self.max_tickers_per_conn: conn_id = d[min_tickers] return [ SubscribeEvent(ticker, self.connections[conn_id], sub_type) ] # The existing connections are in full capacity, create a new connection and subscribe. else: obj = self._create_btrx_connection([ticker])[0] conn_event = ConnectEvent(obj[1]) sub_event = SubscribeEvent(ticker, obj[1], sub_type) return [conn_event, sub_event] def _is_no_subs_active(self): # Close the websocket if no active connections remain. active_conns = self.tickers.sort_by_callbacks() if not active_conns: self.disconnect() else: # Check for non-active connections and close ONLY them. non_active = set(self.connections) - set(active_conns) if non_active: for conn in non_active: logger.info( 'Connection {} has no active subscriptions. Closing it...' .format(conn)) conn_object = self.connections[conn] disconnect_event = DisconnectEvent(conn_object) self.control_queue.put(disconnect_event) # ============== # Public Methods # ============== # ----------------- # Subscribe Methods # ----------------- def subscribe_to_orderbook(self, tickers, book_depth=10): """ Subscribe and maintain the live order book for a set of ticker(s). :param tickers: A list of tickers you are interested in. :type tickers: [] :param book_depth: The desired depth of the order book to be maintained. :type book_depth: int """ sub_type = SUB_TYPE_ORDERBOOK self._is_order_queue() if self._is_first_run(tickers, sub_type) is False: self._is_running(tickers, sub_type) self.tickers.set_book_depth(tickers, book_depth) def subscribe_to_orderbook_update(self, tickers): """ Subscribe to order book updates for a set of ticker(s). :param tickers: A list of tickers you are interested in. :type tickers: [] """ sub_type = SUB_TYPE_ORDERBOOKUPDATE if self._is_first_run(tickers, sub_type) is False: self._is_running(tickers, sub_type) def subscribe_to_trades(self, tickers): """ Subscribe and receive tick data(executed trades) for a set of ticker(s). :param tickers: A list of tickers you are interested in. :type tickers: [] """ sub_type = SUB_TYPE_TRADES if self._is_first_run(tickers, sub_type) is False: self._is_running(tickers, sub_type) def subscribe_to_ticker_update(self, tickers=None): """ Subscribe and receive general data updates for a set of ticker(s). Example output: { 'MarketName': 'BTC-ADA', 'High': 7.65e-06, 'Low': 4.78e-06, 'Volume': 1352355429.5288217, 'Last': 7.2e-06, 'BaseVolume': 7937.59243908, 'TimeStamp': '2017-11-28T15:02:17.7', 'Bid': 7.2e-06, 'Ask': 7.21e-06, 'OpenBuyOrders': 4452, 'OpenSellOrders': 3275, 'PrevDay': 5.02e-06, 'Created': '2017-09-29T07:01:58.873' } :param tickers: A list of tickers you are interested in. :type tickers: [] or None """ if tickers is None: tickers = [ALL_TICKERS] sub_type = SUB_TYPE_TICKERUPDATE if self._is_first_run(tickers, sub_type) is False: self._is_running(tickers, sub_type) # ------------------- # Unsubscribe Methods # ------------------- def unsubscribe_to_orderbook(self, tickers): """ Unsubscribe from real time order for specific set of ticker(s). :param tickers: A list of tickers you are interested in. :type tickers: [] """ sub_type = SUB_TYPE_ORDERBOOK self._unsubscribe(tickers, sub_type) def unsubscribe_to_orderbook_update(self, tickers): """ Unsubscribe from order book updates for a set of ticker(s). :param tickers: A list of tickers you are interested in. :type tickers: [] """ sub_type = SUB_TYPE_ORDERBOOKUPDATE self._unsubscribe(tickers, sub_type) def unsubscribe_to_trades(self, tickers): """ Unsubscribe from receiving tick data(executed trades) for a set of ticker(s) :param tickers: A list of tickers you are interested in. :type tickers: [] """ sub_type = SUB_TYPE_TRADES self._unsubscribe(tickers, sub_type) def unsubscribe_to_ticker_update(self, tickers=None): """ Unsubscribe from receiving general data updates for a set of ticker(s). :param tickers: A list of tickers you are interested in. :type tickers: [] or None """ if tickers is None: tickers = [ALL_TICKERS] sub_type = SUB_TYPE_TICKERUPDATE self._unsubscribe(tickers, sub_type) # ------------- # Other Methods # ------------- def get_order_book(self, ticker=None): """ Returns the most recently updated order book for the specific ticker. If no ticker is specified, returns a dictionary with the order books of all subscribed tickers. :param ticker: The specific ticker you want the order book for. :type ticker: str """ if ticker is None: return self.order_books else: return self.order_books[ticker] def get_order_book_sync_state(self, tickers=None): """ Returns the sync state of the order book for the specific ticker(s). If no ticker is specified, returns the state for all tickers. The sync states are: Not initiated = 0 Invoked, not synced = 1 Received, not synced, not processing = 2 Received, synced, processing = 3 :param tickers: The specific ticker(s) and it's order book sync state you are interested in. :type tickers: [] """ if tickers is not None: t = find_ticker_type(tickers) else: t = self.tickers.list.keys() states = {} for ticker in t: if self.tickers.get_sub_state(ticker, SUB_TYPE_ORDERBOOK) is True: state = self.tickers.get_snapshot_state(ticker) states[ticker] = state return states def disconnect(self): """ Disconnects the connections and stops the websocket instance. """ self.control_queue.put(DisconnectEvent()) @staticmethod def enable_log(file_name=None): """ Enables logging. :param file_name: The name of the log file, located in the same directory as the executing script. :type file_name: str """ add_stream_logger(file_name=file_name) @staticmethod def disable_log(): """ Disables logging. """ remove_stream_logger() def _create_btrx_connection(self, tickers): results = [] def get_chunks(l, n): # Yield successive n-sized chunks from l. for i in range(0, len(l), n): yield l[i:i + n] ticker_gen = get_chunks(list(tickers), self.max_tickers_per_conn) # Initiate a generator that splits the ticker list into chunks while True: try: chunk_list = next(ticker_gen) except StopIteration: break if chunk_list is not None: conn_obj = self._create_signalr_connection() results.append([chunk_list, conn_obj]) return results def _create_signalr_connection(self): with cfscrape.create_scraper() as connection: conn = Connection(None, connection) conn.received += self._on_debug conn.error += self.on_error corehub = conn.register_hub('coreHub') return BittrexConnection(conn, corehub) def _is_orderbook_snapshot(self, msg): # Detect if the message contains order book snapshots and manipulate them. if 'R' in msg and type(msg['R']) is not bool: if 'MarketName' in msg['R'] and msg['R']['MarketName'] is None: for ticker in self.tickers.list.values(): if ticker['OrderBook']['SnapshotState'] == SNAPSHOT_SENT: msg['R']['MarketName'] = ticker['Name'] del msg['R']['Fills'] self.order_books[ticker['Name']] = msg['R'] self.tickers.set_snapshot_state( ticker['Name'], SNAPSHOT_RCVD) break logger.info( '[Subscription][{}][{}]: Order book snapshot received.'. format(SUB_TYPE_ORDERBOOK, msg['R']['MarketName'])) def _init_backorder_queue(self, ticker, msg): sub = self.tickers.list[ticker][SUB_TYPE_ORDERBOOK] if sub['InternalQueue'] is None: sub['InternalQueue'] = queue.Queue() sub['InternalQueue'].put(msg) self.tickers.increment_nounces(ticker) def _transfer_backorder_queue(self, ticker): sub = self.tickers.list[ticker][SUB_TYPE_ORDERBOOK] q = sub['InternalQueue'] while True: try: e = q.get(False) except queue.Empty: sub['InternalQueue'] = None return True else: if self._sync_order_book(ticker, e): self.tickers.set_snapshot_state(ticker, SNAPSHOT_ON) q.task_done() # ======================== # Private Channels Methods # ======================== def _on_debug(self, **kwargs): """ Debug information, shows all data Don't edit unless you know what you are doing. Redirect full order book snapshots to on_message """ if self._is_close_me(): return self._is_orderbook_snapshot(kwargs) def _on_tick_update(self, msg): if self._is_close_me(): return ticker = msg['MarketName'] subs = self.tickers.get_ticker_subs(ticker) if self.tickers.get_sub_state(ticker, SUB_TYPE_ORDERBOOK) is True: self.order_queue.put(msg) if subs[SUB_TYPE_ORDERBOOKUPDATE]['Active'] is True: d = dict(self._create_base_layout(msg), **{ 'bids': msg['Buys'], 'asks': msg['Sells'] }) self.orderbook_update.on_change(d) if subs[SUB_TYPE_TRADES]['Active'] is True: if msg['Fills']: d = dict(self._create_base_layout(msg), **{'trades': msg['Fills']}) self.trades.on_change(d) def _on_ticker_update(self, msg): """ Invoking summary state updates for specific filter doesn't work right now. So we will filter them manually. """ if self._is_close_me(): return if 'Deltas' in msg: for update in msg['Deltas']: if self.tickers.get_sub_state( ALL_TICKERS, SUB_TYPE_TICKERUPDATE) is SUB_STATE_ON: self.updateSummaryState.on_change(msg['Deltas']) else: try: ticker = update['MarketName'] subs = self.tickers.get_ticker_subs(ticker) except KeyError: # not in the subscription list continue else: if subs['TickerUpdate']['Active']: self.updateSummaryState.on_change(update) # ------------------------------------- # Private Channels Supplemental Methods # ------------------------------------- @staticmethod def _create_base_layout(msg): d = { 'ticker': msg['MarketName'], 'nounce': msg['Nounce'], 'timestamp': time() } return d def _sync_order_book(self, pair_name, order_data): # Syncs the order book for the pair, given the most recent data from the socket book_depth = self.tickers.list[pair_name]['OrderBook'][ 'OrderBookDepth'] nounce_diff = order_data['Nounce'] - self.order_books[pair_name][ 'Nounce'] if nounce_diff == 1: self.order_books[pair_name]['Nounce'] = order_data['Nounce'] # Start syncing for side in [['Buys', True], ['Sells', False]]: made_change = False for item in order_data[side[0]]: # TYPE 0: New order entries at matching price # -> ADD to order book if item['Type'] == 0: self.order_books[pair_name][side[0]].append({ 'Quantity': item['Quantity'], 'Rate': item['Rate'] }) made_change = True # TYPE 1: Cancelled / filled order entries at matching price # -> DELETE from the order book elif item['Type'] == 1: for i, existing_order in enumerate( self.order_books[pair_name][side[0]]): if existing_order['Rate'] == item['Rate']: del self.order_books[pair_name][side[0]][i] made_change = True break # TYPE 2: Changed order entries at matching price (partial fills, cancellations) # -> EDIT the order book elif item['Type'] == 2: for existing_order in self.order_books[pair_name][ side[0]]: if existing_order['Rate'] == item['Rate']: existing_order['Quantity'] = item['Quantity'] made_change = True break if made_change: # Sort by price, with respect to BUY(desc) or SELL(asc) self.order_books[pair_name][side[0]] = sorted( self.order_books[pair_name][side[0]], key=lambda k: k['Rate'], reverse=side[1]) # Put depth to 10 self.order_books[pair_name][side[0]] = \ self.order_books[pair_name][side[0]][ 0:book_depth] # Add nounce unix timestamp self.order_books[pair_name]['timestamp'] = time() return True # The next nounce will trigger a sync. elif nounce_diff == 0: return True # The order book snapshot nounce is ahead. Discard this nounce. elif nounce_diff < 0: return False else: raise NotImplementedError("Implement nounce resync!") def _is_close_me(self): thread_name = current_thread().getName() conn_object = self._return_conn_by_thread_name(thread_name) if conn_object.close_me: try: conn_object.conn.close() except WebSocketConnectionClosedException: pass conn_object.deactivate() return True def _return_conn_by_thread_name(self, thread_name): for conn in self.connections: if self.connections[conn].thread_name == thread_name: return self.connections[conn] # =============== # Public Channels # =============== def on_open(self): # Called before initiating the first websocket connection # Use it when you want to add some opening logic. pass def on_close(self): # Called before closing the websocket instance. # Use it when you want to add any closing logic. # print('Bittrex websocket closed.') pass def on_error(self, error): # Error handler print(error) self.disconnect() def on_orderbook(self, msg): # The main channel of subscribe_to_orderbook(). # print('[OrderBook]: {}'.format(msg['MarketName'])) pass def on_orderbook_update(self, msg): # The main channel of subscribe_to_orderbook_update(). # print('[OrderBookUpdate]: {}'.format(msg['ticker'])) pass def on_trades(self, msg): # The main channel of subscribe_to_trades(). # print('[Trades]: {}'.format(msg['ticker'])) pass def on_ticker_update(self, msg): # The main channel of subscribe_to_ticker_update(). # print('Just received ticker update for {}.'.format(msg['MarketName'])) pass
from index import Index from inventory import Inventory from people import People from presentations import Presentations from requests_ import Requests_ from robohash import Robohash from test import Test from vms import VMs from positions import Positions app = Flask(__name__) # The handler classes for each route type about = About() help = Help() events = Events() index = Index() inventory = Inventory() people = People() requests_ = Requests_() robohash = Robohash() test = Test() vms = VMs() presentations = Presentations() positions = Positions() # parsed config from config.ini config = globals.config # Two helper functions to save typing
class AddToDatabaseTask(Task): def __init__(self, app): super().__init__(app) self._team_id_map = {} self.events = Events(( "find_all_games_eligible_for_import_start", "find_all_games_eligible_for_import_complete", "add_data_to_db_start", "add_data_to_db_progress", "add_data_to_db_complete", )) @cached_property def season_id_map(self): return db.Season.get_regular_season_map(self.db_session) @cached_property def player_id_map(self): return db.PlayerId.get_player_id_map(self.db_session) @cached_property def game_id_map(self): return db.GameScrapeStatus.get_game_id_map(self.db_session) @cached_property def pitch_app_id_map(self): return db.PitchAppScrapeStatus.get_pitch_app_id_map(self.db_session) def get_team_id_map_for_year(self, year): team_id_map_for_year = self._team_id_map.get(year) if not team_id_map_for_year: team_id_map_for_year = db.Team.get_team_id_map_for_year( self.db_session, year) self._team_id_map[year] = team_id_map_for_year return team_id_map_for_year def execute(self, year=None): return self.add_data_for_year(year) if year else self.add_all_data() def add_all_data(self): valid_years = [ year for year, results in self.app.audit_report.items() if results["successful"] ] for year in valid_years: self.add_data_for_year(year) return Result.Ok() def add_data_for_year(self, year): report_for_season = self.app.audit_report.get(year) if not report_for_season: return Result.Fail( f"Audit report could not be generated for MLB Season {year}") game_ids = report_for_season.get("successful") if not game_ids: error = f"No games for MLB Season {year} qualify to have PitchFx data imported." return Result.Fail(error) self.events.add_data_to_db_start(year, game_ids) self.add_data_for_games(year, game_ids) self.events.add_data_to_db_complete(year) return Result.Ok() def add_data_for_games(self, year, game_ids): for num, game_id in enumerate(game_ids, start=1): game_data = GameData(self.app, game_id) result = self.add_player_stats_to_database(game_id, game_data) if result.failure: return result result = self.add_pitchfx_to_database(game_data) if result.failure: return result self.events.add_data_to_db_progress(num, year, game_id) def add_player_stats_to_database(self, game_id, game_data): game_status = db.GameScrapeStatus.find_by_bbref_game_id( self.db_session, game_id) if not game_status: error = f"Import aborted! Game status '{game_id}' not found in database" return Result.Fail(error) if not game_status.imported_bat_stats: self.add_bat_stats_to_database(game_id, game_data, game_status) if not game_status.imported_pitch_stats: self.add_pitch_stats_to_database(game_id, game_data, game_status) return Result.Ok() def add_bat_stats_to_database(self, game_id, game_data, game_status): for team_boxscore in game_data.bat_boxscore.values(): for player_boxscore in team_boxscore.values(): bat_stats_dict = game_data.get_bat_stats( player_boxscore["mlb_id"]).value if not bat_stats_dict: continue bat_stats_dict["is_starter"] = player_boxscore["is_starter"] bat_stats_dict["bat_order"] = player_boxscore["bat_order"] bat_stats_dict["def_position"] = player_boxscore[ "def_position"] bat_stats = db.BatStats.from_dict(game_id, bat_stats_dict) bat_stats = self.update_player_stats_relationships(bat_stats) self.db_session.add(bat_stats) game_status.imported_bat_stats = 1 self.db_session.commit() def add_pitch_stats_to_database(self, game_id, game_data, game_status): for mlb_id in game_data.pitch_stats_player_ids: pitch_stats_dict = game_data.get_pitch_app_stats(mlb_id).value pitch_stats = db.PitchStats.from_dict(game_id, pitch_stats_dict) pitch_stats = self.update_player_stats_relationships(pitch_stats) self.db_session.add(pitch_stats) game_status.imported_pitch_stats = 1 self.db_session.commit() def update_player_stats_relationships(self, stats): game_date = self.get_game_date_from_bbref_game_id(stats.bbref_game_id) stats.player_id = self.player_id_map[stats.player_id_mlb] stats.player_team_id = self.get_team_id_map_for_year( game_date.year)[stats.player_team_id_bbref] stats.opponent_team_id = self.get_team_id_map_for_year( game_date.year)[stats.opponent_team_id_bbref] stats.season_id = self.season_id_map[game_date.year] stats.date_id = self.get_date_status_id_from_game_date(game_date) stats.game_status_id = self.game_id_map[stats.bbref_game_id] return stats def add_pitchfx_to_database(self, game_data): for pitch_app_id, pfx_dict_list in game_data.all_pitchfx.items(): pitch_app = db.PitchAppScrapeStatus.find_by_pitch_app_id( self.db_session, pitch_app_id) if not pitch_app: error = f"Import aborted! Pitch app '{pitch_app_id}' not found in database" return Result.Fail(error) if pitch_app.imported_pitchfx: continue for pfx_dict in pfx_dict_list: pfx = db.PitchFx.from_dict(pfx_dict) pfx = self.update_pitchfx_relationships(pfx) self.db_session.add(pfx) pitch_app.imported_pitchfx = 1 self.db_session.commit() return Result.Ok() def update_pitchfx_relationships(self, pfx): game_date = self.get_game_date_from_bbref_game_id(pfx.bbref_game_id) pitcher_team_id_br = get_bbref_team_id(pfx.pitcher_team_id_bb) opponent_team_id_br = get_bbref_team_id(pfx.opponent_team_id_bb) pfx.pitcher_id = self.player_id_map[pfx.pitcher_id_mlb] pfx.batter_id = self.player_id_map[pfx.batter_id_mlb] pfx.team_pitching_id = self.get_team_id_map_for_year( game_date.year)[pitcher_team_id_br] pfx.team_batting_id = self.get_team_id_map_for_year( game_date.year)[opponent_team_id_br] pfx.season_id = self.season_id_map[game_date.year] pfx.date_id = self.get_date_status_id_from_game_date(game_date) pfx.game_status_id = self.game_id_map[pfx.bbref_game_id] pfx.pitch_app_db_id = self.pitch_app_id_map[pfx.pitch_app_id] return pfx def get_game_date_from_bbref_game_id(self, bbref_game_id): game_date = validate_bbref_game_id(bbref_game_id).value["game_date"] return datetime(game_date.year, game_date.month, game_date.day) def get_date_status_id_from_game_date(self, game_date): return game_date.strftime(DATE_ONLY_TABLE_ID)
def __init__(self, name, config=None, onAccount=None, onOrderMatched=None, onOrderPlaced=None, onMarketUpdate=None, onUpdateCallOrder=None, ontick=None, bitshares_instance=None, *args, **kwargs): # BitShares instance self.bitshares = bitshares_instance or shared_bitshares_instance() # Storage Storage.__init__(self, name) # Events Events.__init__(self) if ontick: self.ontick += ontick if onMarketUpdate: self.onMarketUpdate += onMarketUpdate if onAccount: self.onAccount += onAccount if onOrderMatched: self.onOrderMatched += onOrderMatched if onOrderPlaced: self.onOrderPlaced += onOrderPlaced if onUpdateCallOrder: self.onUpdateCallOrder += onUpdateCallOrder # Redirect this event to also call order placed and order matched self.onMarketUpdate += self._callbackPlaceFillOrders if config: self.config = config else: self.config = config = Config.get_worker_config_file(name) # Get worker's parameters from the config self.worker = config["workers"][name] # Get Bitshares account and market for this worker self._account = Account(self.worker["account"], full=True, bitshares_instance=self.bitshares) self._market = Market(config["workers"][name]["market"], bitshares_instance=self.bitshares) # Recheck flag - Tell the strategy to check for updated orders self.recheck_orders = False # Count of orders to be fetched from the API self.fetch_depth = 8 # Set fee asset fee_asset_symbol = self.worker.get('fee_asset') if fee_asset_symbol: try: self.fee_asset = Asset(fee_asset_symbol, bitshares_instance=self.bitshares) except bitshares.exceptions.AssetDoesNotExistsException: self.fee_asset = Asset('1.3.0', bitshares_instance=self.bitshares) else: # If there is no fee asset, use LLC self.fee_asset = Asset('1.3.0', bitshares_instance=self.bitshares) # CER cache self.core_exchange_rate = None # Ticker self.ticker = self.market.ticker # Settings for bitshares instance self.bitshares.bundle = bool(self.worker.get("bundle", False)) # Disabled flag - this flag can be flipped to True by a worker and will be reset to False after reset only self.disabled = False # Order expiration time in seconds self.expiration = 60 * 60 * 24 * 365 * 5 # buy/sell actions will return order id by default self.returnOrderId = 'head' # A private logger that adds worker identify data to the LogRecord self.log = logging.LoggerAdapter( logging.getLogger('dexbot.per_worker'), { 'worker_name': name, 'account': self.worker['account'], 'market': self.worker['market'], 'is_disabled': lambda: self.disabled } ) self.worker_market = self.worker["market"] self.orders_log = logging.LoggerAdapter(logging.getLogger('dexbot.orders_log'), {})
from events import Events name = Events('név', 'típus', 'lol') name.open_csv() print(name.timewaster)
def __init__(self, name, config=None, onAccount=None, onOrderMatched=None, onOrderPlaced=None, onMarketUpdate=None, onUpdateCallOrder=None, ontick=None, bitshares_instance=None, *args, **kwargs): # BitShares instance self.bitshares = bitshares_instance or shared_bitshares_instance() # Storage Storage.__init__(self, name) # Statemachine StateMachine.__init__(self, name) # Events Events.__init__(self) if ontick: self.ontick += ontick if onMarketUpdate: self.onMarketUpdate += onMarketUpdate if onAccount: self.onAccount += onAccount if onOrderMatched: self.onOrderMatched += onOrderMatched if onOrderPlaced: self.onOrderPlaced += onOrderPlaced if onUpdateCallOrder: self.onUpdateCallOrder += onUpdateCallOrder # Redirect this event to also call order placed and order matched self.onMarketUpdate += self._callbackPlaceFillOrders if config: self.config = config else: self.config = config = Config.get_worker_config_file(name) self.worker = config["workers"][name] self._account = Account(self.worker["account"], full=True, bitshares_instance=self.bitshares) self._market = Market(config["workers"][name]["market"], bitshares_instance=self.bitshares) # Recheck flag - Tell the strategy to check for updated orders self.recheck_orders = False # Settings for bitshares instance self.bitshares.bundle = bool(self.worker.get("bundle", False)) # Disabled flag - this flag can be flipped to True by a worker and # will be reset to False after reset only self.disabled = False # A private logger that adds worker identify data to the LogRecord self.log = logging.LoggerAdapter( logging.getLogger('dexbot.per_worker'), { 'worker_name': name, 'account': self.worker['account'], 'market': self.worker['market'], 'is_disabled': lambda: self.disabled }) self.orders_log = logging.LoggerAdapter( logging.getLogger('dexbot.orders_log'), {})
def __init__(self): self.events = Events() self.muted_readers_names = [] self.active_cards = {} self.log = logging.getLogger(__name__)
class Requestmeter: """ This class works like a speedometer for requests. The members declared will calculate the request ratios made by unit of time (seconds, minutes, hours). Attributes ---------- total_requests: int Counter of the total requests made. requests_by_second: list A two elements list that stores the requests made in each second. It functions like a log that registers the accumulated requests made in the previous second and the differential requests made in the last second. requests_by_minute: list A two elements list that stores the requests made in each minute. It functions like a log that registers the accumulated requests made in the previous minute and the differential requests made in the last minute. requests_by_hour: list A two elements list that stores the requests made in each hour. It functions like a log that registers the accumulated requests made in the previous hour and the differential requests made in the last hour. events: Events The event trigger. Contains the event names second_speed_limit_exceeded, minute_speed_limit_exceeded, hour_speed_limit_exceeded. Raises the corresponding event each time the speed limit has been exceeded. timer: Timer The timer that will count each second, minute and hour elapsed. Class Attributes ---------------- speed_limits : list Maximum number of requests that must be sent per second, minute and hour, correspondingly. Notes ----- The difference between the terms "by" and "per" used in the members of this class is clearly explained in https://english.stackexchange.com/a/22693 """ speed_limits = () # maximum number of requests per second, minute and hour, correspondingly def __init__(self, limits): Requestmeter.speed_limits = limits if limits is not None else (2, 9, 540) self.total_requests = 0 self.requests_by_second = [0, 0] self.requests_by_minute = [0, 0] self.requests_by_hour = [0, 0] self.events = Events(('s_speed_limit_exceeded', 'm_speed_limit_exceeded', 'h_speed_limit_exceeded')) self.timer = Timer() # Event subscriptions self.timer.events.on_second = self.requests_by_second_counter self.timer.events.on_minute = self.requests_by_minute_counter self.timer.events.on_hour = self.requests_by_hour_counter def start(self): self.timer.alive = True seconds_thread = Thread(target=self.timer.count_seconds) minutes_thread = Thread(target=self.timer.count_minutes) hours_thread = Thread(target=self.timer.count_hours) seconds_thread.setDaemon(True) minutes_thread.setDaemon(True) hours_thread.setDaemon(True) seconds_thread.start() minutes_thread.start() hours_thread.start() def finish(self): # finishing all 3 threads self.timer.alive = False # returning elapsed time return self.timer.elapsed_seconds, self.timer.elapsed_minutes, self.timer.elapsed_hours def count(self): self.total_requests += 1 # region Speed calculators def requests_per_second(self): return sum(self.requests_by_second[:]) / self.timer.elapsed_seconds def requests_per_minute(self): return sum(self.requests_by_minute[:]) / self.timer.elapsed_minutes def requests_per_hour(self): return sum(self.requests_by_hour[:]) / self.timer.elapsed_hours # endregion Speed calculators # region Timer event handlers def requests_by_second_counter(self): self.requests_by_second[0] = sum(self.requests_by_second[:]) self.requests_by_second[-1] = self.total_requests - self.requests_by_second[0] print("s:", self.requests_by_second[-1]) if self.requests_by_second[-1] > Requestmeter.speed_limits[0]: self.events.s_speed_limit_exceeded((self.requests_by_second[-1] / self.speed_limits[0])-1) def requests_by_minute_counter(self): self.requests_by_minute[0] = sum(self.requests_by_minute[:]) self.requests_by_minute[-1] = self.total_requests - self.requests_by_minute[0] print("m:", self.requests_by_minute[-1]) if self.requests_by_minute[-1] > Requestmeter.speed_limits[1]: self.events.m_speed_limit_exceeded((self.requests_by_minute[-1] / self.speed_limits[1])-1) def requests_by_hour_counter(self): self.requests_by_hour[0] = sum(self.requests_by_hour[:]) self.requests_by_hour[-1] = self.total_requests - self.requests_by_hour[0] print("h:", self.requests_by_hour[-1]) if self.requests_by_hour[-1] > Requestmeter.speed_limits[2]: self.events.h_speed_limit_exceeded((self.requests_by_hour[-1] / self.speed_limits[2])-1) # endregion Timer event handlers def summary(self): p_seconds, p_minutes, p_hours = time_units(self.timer.elapsed_seconds) print("Total requests: ", self.total_requests) print(f"Elapsed time: {self.timer.elapsed_hours} h | {self.timer.elapsed_minutes} m | " f"{self.timer.elapsed_seconds} s") print(f"Pretty time: {self.timer.elapsed_seconds} s = {p_hours}:{p_minutes}:{p_seconds}") print(f"rps: {self.total_requests/self.timer.elapsed_seconds} requests/second") print("Requests by second") print(self.requests_by_second) print("Requests by minute") print(self.requests_by_minute) print("Requests by hour") print(self.requests_by_hour)
class BuddyClient(object): exception_name = u"exception" _result_name = u"result" _hardware_info_file_name = "/proc/cpuinfo" def __init__(self, app_id, app_key): self._app_id = app_id self._app_key = app_key self._settings = Settings(self._app_id) self._session = requests.Session() self._session.auth = Auth(self, self._settings) self._last_location = None self._service_exception = Events() self._authentication_needed = Events() self._connection_changed = Events() self._connection_retry = Thread(target=self.__connection_retry_method) self._connection_level = Connection.on @property def app_id(self): return self._app_id @property def app_key(self): return self._app_key @property def last_location(self): return self._last_location @last_location.setter def last_location(self, value): self._last_location = value @property def current_user_id(self): return self._settings.user_id @property def service_exception(self): return self._service_exception @property def authentication_needed(self): return self._authentication_needed @property def connection_changed(self): return self._connection_changed def get_access_token_string(self): if self._settings.access_token_string is None: self.__register_device() return self._settings.access_token_string def __register_device(self): response = self.__handle_dictionary_request(requests.post, "/devices", { "appId": self.app_id, "appKey": self.app_key, "platform": BuddyClient.__get_platform(), "model": BuddyClient.__get_model(), "osVersion": BuddyClient.__get_os_version(), "uniqueId": self.__get_unique_id(), }) if response[BuddyClient.exception_name] is None: self._settings.set_device_token(response[BuddyClient._result_name]) @staticmethod def __get_platform(): return sys.platform @staticmethod def __get_model(): hardware = BuddyClient.__get_cpuinfo("Hardware") revision = BuddyClient.__get_cpuinfo("Revision") if hardware is None: return "Hardware info not available" else: return hardware + "-" + revision @staticmethod def __get_os_version(): return platform.release() def __get_unique_id(self): unique_id = BuddyClient.__get_cpuinfo("Serial") if unique_id is None: unique_id = uuid.getnode() return unique_id @staticmethod def __get_cpuinfo(key): try: with open(BuddyClient._hardware_info_file_name, "r") as hardware_file: for line in hardware_file: if line.startswith(key): return line.splitlines()[0].split(": ")[1] except: return None def get(self, path, parameters): return self.__handle_parameters_request(self._session.get, path, parameters) def delete(self, path, parameters): return self.__handle_parameters_request(self._session.delete, path, parameters) def patch(self, path, dictionary): return self.__handle_dictionary_request(self._session.patch, path, dictionary) def post(self, path, dictionary, file=None): return self.__handle_dictionary_request(self._session.post, path, dictionary, file) def put(self, path, dictionary): return self.__handle_dictionary_request(self._session.put, path, dictionary) def create_user(self, user_name, password, first_name=None, last_name=None, email=None, gender=None, date_of_birth=None, tag=None): response = self.__handle_dictionary_request(self._session.post, "/users", { "username": user_name, "password": password, "firstName": first_name, "lastName": last_name, "email": email, "gender": gender, "dateOfBirth": date_of_birth, "tag": tag }) if response[BuddyClient.exception_name] is None: self._settings.set_user(response[BuddyClient._result_name]) return response def login_user(self, user_name, password): response = self.__handle_dictionary_request(self._session.post, "/users/login", { "username": user_name, "password": password, }) if response[BuddyClient.exception_name] is None: self._settings.set_user(response[BuddyClient._result_name]) return response def logout_user(self): self._settings.set_user(None) def __handle_parameters_request(self, verb, path, parameters=None): self.__handle_last_location(parameters) def closure(): return verb(self._settings.service_root + path, params=parameters) return self.__handle_request(closure) def __handle_dictionary_request(self, verb, path, dictionary, file=None): self.__handle_last_location(dictionary) def closure(): if file is None: return verb(self._settings.service_root + path, json=dictionary) else: return verb(self._settings.service_root + path, json=dictionary, files={"data": ("data",) + file}) return self.__handle_request(closure) def __handle_last_location(self, dictionary): if self.last_location is not None and dictionary is not None: dictionary["location"] = "%s, %s" % self.last_location def __handle_request(self, closure): response = None try: response = closure() except requests.RequestException as exception: return self.__handle_connection_exception(exception) except Exception as exception: return self.__handle_exception(exception) else: return self.__handle_response(response) def __handle_connection_exception(self, exception): self.__set_connection_level(Connection.off) if not self._connection_retry.isAlive(): self._connection_retry.start() return self.__handle_exception(exception) def __set_connection_level(self, connection_level): if self._connection_level is not connection_level: self._connection_level = connection_level self._connection_changed.on_change(self._connection_level) def __handle_exception(self, exception): self._service_exception.on_change(exception) return {BuddyClient.exception_name: exception} def __handle_response(self, response): if response.status_code == 401 or response.status_code == 403: self._authentication_needed.on_change() json_response = response.json() json_response[BuddyClient.exception_name] = None return json_response def __connection_retry_method(self): successful = False try: while not successful: try: requests.get(self._settings.service_root + "/service/ping") except requests.RequestException: successful = False else: successful = True finally: self.__set_connection_level(Connection.on)
def events(data): from events import Events hits = data['data']['EventPageByLanguage'] for i in hits: items.add(Events(i).item) items.list()
class Bot (config_class): name = 'login' def __init__(self, wrapper=None, config='config.ini', path=''): config_class.__init__(self) self.bot_start = time.time() self.bot_connected = -1 self.want_connected = False self.ui_loaded = False self.def_config = config self.cfg_file = ['global.ini', config] self.path = path self.status = {'connected': False} self.wrapper = wrapper self.events = Events() self.events.add('IO', 0, 0, 'addchat', self.print_text) self.events.add('IO', 1000, 0, 'send', self.display_send) self.events.add('bot', -1, 0, 'disc', self.disced, 'connect', self.conn_clock, 'connected', self.connected, 'configure', self.edit_config, 'load_config', self.load_spec_config) self.events.add('ui', 0, 0, 'start', self.add_menus) self.load_spec_config() self.reload_connfig() self.plugins = extensions.plugins(self) self.plugins.load_classes(order=self.config['plugins']) self.events.call('bot', 'start') def add_menus(self): self.events.call('ui', 'menu', 'add', ['Bot Settings', 'Configure...\tCTRL+E', self.show_config, 'Reload settings\tCTRL+R', self.reload_config]) self.events.call('ui', 'menu', 'add', ['Connection', 'Connect\tCTRL+B', self.connect, 'Disconnect\tCTRL+D', self.disc]) def show_config(self, *rest): self.events.call('ui', 'config') def reload_config(self, *rest): self.events.call('bot', 'load_config') self.addchat('Settings reloaded.') def add_socket(self, socket, func): self.wrapper.sockets[socket] = func def del_socket(self, socket): func = self.wrapper.sockets[socket] del self.warpper.sockets[socket] return func def set_socket_func(self, socket, new_func): func = self.wrapper.sockets[socket] self.wrapper.sockets[socket] = new_func return func def disc(self, *rest): self.want_connected = False self.events.call('bot', 'disc') self.addchat('Bot disconnected.') self.status['connected'] = False def connect(self, *rest): self.want_connected = True self.events.call('bot', 'connect') def conn_clock(self): self.reload_connfig() self.conn_start = time.time() def connected(self, username): self.status = {'connected': True} self.addchat('Bot connected in '+ \ time_diff(self.conn_start, time.time())) self.conn_start = -1 self.bot_connected = time.time() def disced(self): self.reload_connfig() self.status = {} self.bot_connected = -1 def addchat(self, *args, **kwargs): if ('loudness' in kwargs) == False: kwargs['loudness'] = 1 if kwargs['loudness'] >= self.config['ui']['loudness']: self.events.call('IO', 'addchat', list(args)) def print_text(self, *args): tf = time.strftime('[%I:%M:%S %p] ', time.localtime()) if len(args) > 1: out = '' for x in range(0, len(args), 2): out=out+args[x+1] else: out = args[0] print tf+out def load_spec_config(self, *rest): self.load_config(self.cfg_file) default = {'bnlsserver': 'pyro.no-ip.biz', 'server_file': 'servers.txt', 'username': '', 'password': '', 'cdkey': '', 'expcdkey': '', 'product': 'D2DV', 'server': 'useast.battle.net', 'home': 'Clan BoT'} try: default.update(self.config['login']) except KeyError: pass finally: self.config['login'] = default default = {'loudness': 1} try: default.update(self.config['ui']) except KeyError: pass finally: self.config['ui'] = default self.config['login']['product'] = product_aliases[self.config['login']['product'].lower()] if ('plugins' in self.config) == False: self.config['plugins'] = {} def reload_connfig(self): self.connfig = {} self.connfig.update(self.config) def edit_config(self, settings): config = [{'key': 'username'}, {'key': 'password', 'type': ('text', 0x800)}, {'key': 'product', 'type': ('list', ['Starcraft', 'Starcraft - Broodwar', 'Diablo II', 'Diablo II - Lord of Destruction', 'Warcraft II Battle.net Edition', 'Warcraft III', 'Warcraft III - The Frozen Throne'])}, {'key': 'cdkey', 'caption': 'CD Key'}, {'key': 'expcdkey', 'caption': 'Exp. CD Key'}, {'key': 'server', 'type': ('list', servers)}, {'key': 'bnlsserver', 'caption': 'BNLS Server', 'type': ('list', ['pyro.no-ip.biz', 'jbls.org', 'bnls.valhallalegends.com'])}, {'key': 'home'}] settings.insert(0, {'caption': 'Login', 'file': self.cfg_file[1], 'title': 'login', 'dict': self.config['login'], 'settings': config}) def ver(self): return 'JavaBot, Release 4.1' def uptime(self): now = time.time() if self.bot_connected == -1: return 'Bot uptime: ' + time_diff(self.bot_start, now) else: return 'Bot uptime: ' + time_diff(self.bot_start, now) +\ '; connection uptime: ' + time_diff(self.bot_connected, now) def send (self, text='', **kwargs): if text == '': return ref = {'text': text} ref.update(kwargs) self.events.call('IO', 'send', [ref]) def confirm(self, *args): self.events.call('ui', 'confirm', list(args)) def respond(self, cmd, response, output='addchat'): cmd['text']['text'] = response if cmd['output'] == 'send' or output == 'send': self.send(**cmd['text']) elif cmd['output'] == 'whisper': cmd['text']['text'] = '/w ' + cmd['username'] + ' ' + cmd['text']['text'] self.send(**cmd['text']) elif cmd['output'] == 'addchat': self.addchat(response) def display_send(self, msg): if msg['text'][0] != '/': try: self.addchat('me', '<'+self.status['username']+'> ', 'chat', msg['text']) except KeyError: self.addchat('chat', msg['text'])
class Machine(Base): ''' The machine connects the nodes and manages the addition and removal of nodes to the network. ''' def __init__(self, name=None): self.log_color = 'red' self.name = name or random_string(8) self.events = Events() events.e = self.events self.nodes = Nodes() self.register = Register(machine=self, events=self.events) def set_listeners(self): ''' provide machine event listeners ''' self.events.listen('add_conditions') def activate_node(self, node): _n = node.integrate(self) self.nodes.append(_n) self.log('Integrating node', _n) ''' Loop an iterable to add Conditiond into the network for a node ''' # Write node into register. # n=node.get_name() # add weak reference in register. self.register.add_node(_n) _n.react(self) return _n def render_nodes(self, nodes): self.log('Add nodes to network') for node in nodes: n = node # Create the node is the element is not an instance if inspect.isclass(node): n = node() self.activate_node(n) def start(self, nodes=None): if nodes is not None: self.render_nodes(nodes) print '\n--- Run ---\n' self.started(nodes) def started(self, nodes): ''' Override for easy start reference. Passed are the initial nodes provides to the machine. ''' pass def __str__(self): return '%s "%s"' % (self.__class__.__name__, self.name, )
def __init__(self): self.events = Events()
#end of processi2c Class def makeWord(a, b): c = a c <<= 8 c += b return c #### Test Code if __name__ == '__main__': try: events = Events( ('System', 'CtrlPress', 'CtrlTurn', 'VolPress', 'ScreenSaving', 'VolTurn', 'VolPress', 'Pause', 'Start', 'Stop')) vb = VolumeBoard(events) print "Init complete : Accessing volume board" print "main> read BinBandwidth", vb.readBinBandwidth() print "main> read TxTime", vb.readTime() print "main> read readNyquist", vb.readNyquist() # print "main> read writeVolume (25 sent)", vb.writeVolume( 25) while 1: # print "main> read Vrms ", vb.readVrms()
class Widget(object): """Widget BaseClass. """ def __init__(self, cordx=0, cordy=0, width=20, height=1, **kwargs): self._cordx = round(cordx) self._cordy = round(cordy) self._width = round(width) self._height = round(height) self._parent = kwargs.get('parent', None) self._term = Terminal() Debug.log("Create", str(self) + " (x: {cordx}, y: {cordy}, w: {width}, h: {height})") if self._parent: self._store = self._parent.store else: self._store = kwargs.get('store', Store()) self.ref = kwargs.get('ref', None) self._events = Events(default=[self], wrapper=Widget._debug_events) self.on_change += self._on_change events_init = filter(lambda x: x[0].startswith('on_'), kwargs.items()) for event_name, callback in events_init: evt = self.__getattr__(event_name) evt += callback def paint(self): raise NotImplementedError( "All child class of widget need implement _paint method") def destroy(self): line = (' ' * self.width) + '\n' lines = line * self.height echo(self.term.move(self.y, self.x) + lines) def _on_change(self): self.destroy() self.paint() @property def parent(self): return self._parent @parent.setter def parent(self, value): self._parent = value def __getattr__(self, name): if name in self.__dict__: return self.__dict__[name] elif name.startswith('on_'): evt = self._events.__getattr__(name) self.__dict__[name] = evt return evt else: raise AttributeError(f'{self} object has no attribute {name}') @staticmethod def _debug_events(func, *args, **kwargs): Debug.log("Trigger event", (func.__name__, args, kwargs)) func(*args, **kwargs) @property def term(self): return self._term @property def store(self): return self._store @store.setter def store(self, value): self._store = value @property def x(self): return self._cordx @property def y(self): return self._cordy @property def height(self): """ Height of window. """ return self._height @property def width(self): """ Width of window. """ return self._width def __str__(self): return self.__class__.__name__
import webbrowser reload(sys) sys.setdefaultencoding('utf-8') if len(sys.argv) != 3: print 'Error passing arguments' print 'USAGE: pract.py "(' ')" "[\'peu\',\'transport\']"' else: w = sys.argv[1] #e = "['peu','transport','bicing']" e = sys.argv[2] d = Events() print "Filtering events..." k = d.getEvents(w) if len(k) != 0: print "Getting nearest transport..." t = Transports(e) print "Generating the html file..." aux = '<!DOCTYPE html>\n<html>\n<body>\n\n<table style="width:75%" border="1" align="center" cellpadding="10" >\n' for i in k: aux = aux + ' <tr>\n' aux = aux + ' <td>'+i.tohtml()+'</td>\n' aux = aux + ' <td>'+(t.getTransports(i.lat,i.lon)).tohtml()+'</td>\n' aux = aux + ' </tr>\n</table>'
def event_object(): event = Events() return event
import time from kinect import Kinect, X, Y, Z from midi_interface import MidiInterface from utilities import sprout, scale_constrain from events import Events key = 60 beat = 0.125 kinect = Kinect.retrieve() midi = MidiInterface.retrieve() min_piano_dur = 0.15 min_piano_up = 0.05 events = Events.retrieve()
def setUp(self): self.events = Events()
class SyncScrapedDataTask(Task): def __init__(self, app): super().__init__(app) self.sync_direction = None self.file_type = None self.data_set = None self.year = None self.spinner = Halo(spinner=get_random_dots_spinner(), color=get_random_cli_color()) self.events = Events( ( "error_occurred", "get_s3_objects_start", "get_s3_objects_complete", "find_out_of_sync_files_start", "find_out_of_sync_files_complete", "sync_files_start", "sync_files_progress", "sync_files_complete", ) ) @property def file_helper(self): return self.scraped_data.file_helper @cached_property def all_s3_objects(self): # pragma: no cover return self.file_helper.get_all_object_keys_in_s3_bucket() def execute(self, sync_direction, file_type, data_set, year): self.get_all_s3_objects() result = self.find_out_of_sync_files(sync_direction, file_type, data_set, year) if result.failure: self.events.error_occurred("Error occurred analyzing which files need to be synced.") return result (out_of_sync, missing_files, outdated_files) = result.value if not out_of_sync: return Result.Ok() self.sync_files(sync_direction, missing_files, outdated_files, file_type, data_set, year) return Result.Ok() def get_all_s3_objects(self): self.events.get_s3_objects_start() self.all_s3_objects self.events.get_s3_objects_complete() def find_out_of_sync_files(self, sync_direction, file_type, data_set, year): self.events.find_out_of_sync_files_start() (s3_objects, local_files) = self.get_all_files_in_src_and_dest(file_type, data_set, year) if (sync_direction == SyncDirection.UP_TO_S3 and not local_files) or ( sync_direction == SyncDirection.DOWN_TO_LOCAL and not s3_objects ): sync_results = (False, [], []) self.events.find_out_of_sync_files_complete(sync_results) return Result.Ok(sync_results) src_files = local_files if sync_direction == SyncDirection.UP_TO_S3 else s3_objects dest_files = s3_objects if sync_direction == SyncDirection.UP_TO_S3 else local_files sync_results = get_files_to_sync(src_files, dest_files) self.events.find_out_of_sync_files_complete(sync_results) return Result.Ok(sync_results) def get_all_files_in_src_and_dest(self, file_type, data_set, year): local_files = self.get_local_files(file_type, data_set, year) s3_objects = self.get_s3_objects(file_type, data_set, year) return (s3_objects, local_files) def get_local_files(self, file_type, data_set, year): folderpath = self.scraped_data.get_local_folderpath(file_type, data_set, year) id_regex = URL_ID_REGEX[file_type][data_set] local_files = filter(lambda x: id_regex.search(x.stem), Path(folderpath).glob("*.*")) return sorted(map(get_local_file_data, local_files), key=lambda x: x["name"]) def get_s3_objects(self, file_type, data_set, year): folderpath = self.scraped_data.get_s3_folderpath(file_type, data_set, year) id_regex = URL_ID_REGEX[file_type][data_set] file_suffix = ".html" if file_type == VigFile.SCRAPED_HTML else ".json" s3_objects = filter( lambda x: folderpath in x.key and id_regex.search(Path(x.key).stem) and Path(x.key).suffix == file_suffix, self.all_s3_objects, ) return sorted(map(get_s3_object_data, s3_objects), key=lambda x: x["name"]) def sync_files(self, sync_direction, missing_files, outdated_files, file_type, data_set, year): self.file_helper.create_all_folderpaths(year) self.bucket_name = self.config.get_current_setting("S3_BUCKET", data_set) sync_files = get_sync_files(missing_files, outdated_files) self.events.sync_files_start(sync_files[0]["name"], 0, len(sync_files)) for num, file in enumerate(sync_files, start=1): self.events.sync_files_progress(file["name"], num - 1, len(sync_files)) (filepath, s3_key) = self.get_local_path_and_s3_key(file, file_type, data_set, year) self.send_file(sync_direction, filepath, s3_key) self.events.sync_files_progress(file["name"], num, len(sync_files)) self.events.sync_files_complete() def get_local_path_and_s3_key(self, file, file_type, data_set, year): local_folder = self.scraped_data.get_local_folderpath(file_type, data_set, year) s3_folder = self.scraped_data.get_s3_folderpath(file_type, data_set, year) local_path = str(Path(local_folder).joinpath(file["name"])) s3_key = f'{s3_folder}/{file["name"]}' return (local_path, s3_key) def send_file(self, sync_direction, local_path, s3_key): # pragma: no cover if sync_direction == SyncDirection.UP_TO_S3: self.file_helper.get_s3_bucket().upload_file(local_path, s3_key) if sync_direction == SyncDirection.DOWN_TO_LOCAL: self.file_helper.get_s3_bucket().download_file(s3_key, local_path)
def __init__(self): """ Constructor """ self.events = Events(('on_event1', 'on_event2')) # declare Events
from users import Users from docs import Docs # configuration DATABASE = 'test.json' app = flask.Flask(__name__) app.secret_key = settings.secret_key #URL rules: add as needed for the various dynamic pages app.add_url_rule('/', view_func=Main.as_view('main'), methods=('get','post')) app.add_url_rule('/<page>/', view_func=Main.as_view('main'), methods=('get','post')) app.add_url_rule('/login/', view_func=Login.as_view('login'), methods=('get','post')) app.add_url_rule('/remote/', view_func=Remote.as_view('remote'), methods=('get','post')) app.add_url_rule('/music/', view_func=Music.as_view('music'), methods=('get', 'post')) app.add_url_rule('/events/', view_func=Events.as_view('events'), methods=('get','post')) app.add_url_rule('/users/', view_func=Users.as_view('users'), methods=('get','post')) app.add_url_rule('/docs/', view_func=Docs.as_view('docs'), methods=('get','post')) #error handling wrapper @app.errorhandler(404) def page_not_found(error): return flask.render_template('404.html'), 404 #database handling wrappers @app.before_request def before_request(): """Make sure we are connected to the database each request.""" try: db = open(DATABASE).read() except IOError:
def __init__(self, name): self.name = name self.anevent = Events()
def test_instance_default(self): events = Events(default=["Default_1", "Default_2"]) events.on_event += self.callback4 events.on_event()
class Blockchain: SECONDS_PER_BLOCK = 15 DECREMENT_INTERVAL = 2000000 GENERATION_AMOUNT = [ 8, 7, 6, 5, 4, 3, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ] __blockchain = None __validators = [] __genesis_block = None __instance = None __blockrequests = set() _paused = False BlockSearchTries = 0 CACHELIM = 4000 CMISSLIM = 5 LOOPTIME = .1 PersistCompleted = Events() Notify = Events() @staticmethod def StandbyValidators(): if len(Blockchain.__validators) < 1: vlist = settings.STANDBY_VALIDATORS for pkey in settings.STANDBY_VALIDATORS: Blockchain.__validators.append(ECDSA.decode_secp256r1(pkey).G) return Blockchain.__validators @staticmethod @lru_cache(maxsize=2) def SystemShare(): """ Register AntShare. Returns: RegisterTransaction: """ amount = Fixed8.FromDecimal( sum(Blockchain.GENERATION_AMOUNT) * Blockchain.DECREMENT_INTERVAL) owner = ECDSA.secp256r1().Curve.Infinity admin = Crypto.ToScriptHash(PUSHT) return RegisterTransaction( [], [], AssetType.GoverningToken, "[{\"lang\":\"zh-CN\",\"name\":\"小蚁股\"},{\"lang\":\"en\",\"name\":\"AntShare\"}]", amount, 0, owner, admin) @staticmethod @lru_cache(maxsize=2) def SystemCoin(): """ Register AntCoin Returns: RegisterTransaction: """ amount = Fixed8.FromDecimal( sum(Blockchain.GENERATION_AMOUNT) * Blockchain.DECREMENT_INTERVAL) owner = ECDSA.secp256r1().Curve.Infinity precision = 8 admin = Crypto.ToScriptHash(PUSHF) return RegisterTransaction( [], [], AssetType.UtilityToken, "[{\"lang\":\"zh-CN\",\"name\":\"小蚁币\"},{\"lang\":\"en\",\"name\":\"AntCoin\"}]", amount, precision, owner, admin) @staticmethod def GenesisBlock(): """ Create the GenesisBlock. Returns: BLock: """ prev_hash = UInt256(data=bytearray(32)) timestamp = int( datetime(2016, 7, 15, 15, 8, 21, tzinfo=pytz.utc).timestamp()) index = 0 consensus_data = 2083236893 # Pay tribute To Bitcoin next_consensus = Blockchain.GetConsensusAddress( Blockchain.StandbyValidators()) script = Witness(bytearray(0), bytearray(PUSHT)) mt = MinerTransaction() mt.Nonce = 2083236893 output = TransactionOutput( Blockchain.SystemShare().Hash, Blockchain.SystemShare().Amount, Crypto.ToScriptHash( Contract.CreateMultiSigRedeemScript( int(len(Blockchain.StandbyValidators()) / 2) + 1, Blockchain.StandbyValidators()))) it = IssueTransaction([], [output], [], [script]) return Block( prev_hash, timestamp, index, consensus_data, next_consensus, script, [mt, Blockchain.SystemShare(), Blockchain.SystemCoin(), it], True) @staticmethod def Default(): """ Get the default registered blockchain instance. Returns: obj: Currently set to `neo.Implementations.Blockchains.LevelDB.LevelDBBlockchain`. """ if Blockchain.__instance is None: Blockchain.__instance = Blockchain() Blockchain.GenesisBlock().RebuildMerkleRoot() return Blockchain.__instance @property def CurrentBlockHash(self): pass @property def CurrentHeaderHash(self): pass @property def HeaderHeight(self): pass @property def Height(self): pass @property def CurrentBlock(self): pass def AddBlock(self, block): pass def AddBlockDirectly(self, block): pass def AddHeaders(self, headers): pass @property def BlockRequests(self): """ Outstanding block requests. Returns: set: """ return self.__blockrequests def ResetBlockRequests(self): self.__blockrequests = set() @staticmethod def CalculateBonusIgnoreClaimed(inputs, ignore_claimed=True): unclaimed = [] for hash, group in groupby(inputs, lambda x: x.PrevHash): claimable = Blockchain.Default().GetUnclaimed(hash) if claimable is None or len(claimable) < 1: if ignore_claimed: continue else: raise Exception( "Error calculating bonus without ignoring claimed") for coinref in group: if coinref.PrevIndex in claimable: claimed = claimable[coinref.PrevIndex] unclaimed.append(claimed) else: if ignore_claimed: continue else: raise Exception( "Error calculating bonus without ignoring claimed") return Blockchain.CalculateBonusInternal(unclaimed) @staticmethod def CalculateBonus(inputs, height_end): unclaimed = [] for hash, group in groupby(inputs, lambda x: x.PrevHash): tx, height_start = Blockchain.Default().GetTransaction(hash) if tx is None: raise Exception("Could Not calculate bonus") if height_start == height_end: continue for coinref in group: if coinref.PrevIndex >= len(tx.outputs) or tx.outputs[ coinref.PrevIndex].AssetId != Blockchain.SystemShare( ).Hash: raise Exception("Invalid coin reference") spent_coin = SpentCoin(output=tx.outputs[coinref.PrevIndex], start_height=height_start, end_height=height_end) unclaimed.append(spent_coin) return Blockchain.CalculateBonusInternal(unclaimed) @staticmethod def CalculateBonusInternal(unclaimed): amount_claimed = Fixed8.Zero() decInterval = Blockchain.DECREMENT_INTERVAL genAmount = Blockchain.GENERATION_AMOUNT genLen = len(genAmount) for coinheight, group in groupby(unclaimed, lambda x: x.Heights): amount = 0 ustart = int(coinheight.start / decInterval) if ustart < genLen: istart = coinheight.start % decInterval uend = int(coinheight.end / decInterval) iend = coinheight.end % decInterval if uend >= genLen: iend = 0 if iend == 0: uend -= 1 iend = decInterval while ustart < uend: amount += (decInterval - istart) * genAmount[ustart] ustart += 1 istart = 0 amount += (iend - istart) * genAmount[ustart] endamount = Blockchain.Default().GetSysFeeAmountByHeight( coinheight.end - 1) startamount = 0 if coinheight.start == 0 else Blockchain.Default( ).GetSysFeeAmountByHeight(coinheight.start - 1) amount += endamount - startamount outputSum = 0 for spentcoin in group: outputSum += spentcoin.Value.value outputSum = outputSum / 100000000 outputSumFixed8 = Fixed8(int(outputSum * amount)) amount_claimed += outputSumFixed8 return amount_claimed def OnNotify(self, notification): self.Notify.on_change(notification) def ContainsBlock(self, hash): pass def ContainsTransaction(self, hash): pass def ContainsUnspent(self, hash, index): pass def Dispose(self): pass def GetStates(self, prefix, classref): pass def GetAccountStateByIndex(self, index): pass def GetAccountState(self, script_hash): pass def GetAssetState(self, assetId): # abstract pass def SearchAssetState(self, query): pass def GetHeaderHash(self, height): pass def GetBlockByHeight(self, height): pass def GetBlock(self, height_or_hash): pass def GetBlockByHash(self, hash): # abstract pass def GetBlockHash(self, height): # abstract pass def GetSpentCoins(self, tx_hash): pass def GetAllSpentCoins(self): pass def SearchContracts(self, query): pass def ShowAllContracts(self): pass def GetContract(self, hash): # abstract pass def GetEnrollments(self): # abstract pass def GetHeader(self, hash): # abstract pass def GetHeaderByHeight(self, height): pass @staticmethod def GetConsensusAddress(validators): """ Get the script hash of the consensus node. Args: validators (list): of Ellipticcurve.ECPoint's Returns: UInt160: """ vlen = len(validators) script = Contract.CreateMultiSigRedeemScript( vlen - int((vlen - 1) / 3), validators) return Crypto.ToScriptHash(script) def GetValidators(self, others): votes = Counter([len(vs.PublicKeys) for vs in self.GetVotes(others)]).items() # TODO: Sorting here may cost a lot of memory, considering whether to use other mechanisms # votes = GetVotes(others).OrderBy(p => p.PublicKeys.Length).ToArray() # int validators_count = (int)votes.WeightedFilter(0.25, 0.75, p => p.Count.GetData(), (p, w) => new # { # ValidatorsCount = p.PublicKeys.Length, # Weight = w # }).WeightedAverage(p => p.ValidatorsCount, p => p.Weight) # validators_count = Math.Max(validators_count, StandbyValidators.Length) # Dictionary<ECPoint, Fixed8> validators = GetEnrollments().ToDictionary(p => p.PublicKey, p => Fixed8.Zero) # foreach (var vote in votes) # { # foreach (ECPoint pubkey in vote.PublicKeys.Take(validators_count)) # { # if (validators.ContainsKey(pubkey)) # validators[pubkey] += vote.Count # } # } # return validators.OrderByDescending(p => p.Value).ThenBy(p => p.Key).Select(p => p.Key).Concat(StandbyValidators).Take(validators_count) # } raise NotImplementedError() def GetNextBlockHash(self, hash): # abstract pass def GetScript(self, script_hash): return self.GetContract(script_hash) def GetStorageItem(self, storage_key): # abstract pass def GetSysFeeAmount(self, hash): # abstract pass def GetSysFeeAmountByHeight(self, height): """ Get the system fee for the specified block. Args: height (int): block height. Returns: int: """ hash = self.GetBlockHash(height) return self.GetSysFeeAmount(hash) def GetTransaction(self, hash): return None, 0 def GetUnclaimed(self, hash): # abstract pass def GetUnspent(self, hash, index): # abstract pass def GetAllUnspent(self, hash): # abstract pass def GetVotes(self, transactions): # abstract pass def IsDoubleSpend(self, tx): # abstract pass def OnPersistCompleted(self, block): self.PersistCompleted.on_change(block) def BlockCacheCount(self): pass def Pause(self): self._paused = True def Resume(self): self._paused = False @staticmethod def RegisterBlockchain(blockchain): """ Register the default block chain instance. Args: blockchain: a blockchain instance. E.g. neo.Implementations.Blockchains.LevelDB.LevelDBBlockchain """ if Blockchain.__instance is None: Blockchain.__instance = blockchain @staticmethod def DeregisterBlockchain(): """ Remove the default blockchain instance. """ Blockchain.__instance = None
import collections import contextdecorator as cd from events import Events import logging import six import threading events = Events(("send_event")) modules = {} def _loggingHandler(name, data): logging.getLogger("nstack.events").info("%s: %r", name, data) events.send_event += _loggingHandler def send_event(name, data): events.send_event(name, data) class LocalConfig(threading.local): def __init__(self): self.stack = collections.deque([{}]) def push(self, config): self.stack.appendleft(config) def pop(self): self.stack.popleft()
for link, (name, size, unit) in check_links(max_links).items(): shared.events.trigger_link_checked(service, link, name, size, unit, plugin_type) shared.events.trigger_check_completed(service) def cancel_check(self): """""" self.cancel_flag = True if __name__ == "__main__": import random import time import shared from events import Events logging.basicConfig(level=logging.DEBUG) shared.events = Events() def check_links(links): result = {} m = random.randint(2, 15) time.sleep(m) for link in links: result[link] = (link, m, "KB") return result l = LinkDispatcher(LINKS, lambda x: (check_links, "PREMIUM", MAX_SINGLE_CHECK)) time.sleep(10) shared.events.trigger_check_cancel()
class TwoRooms(JoinableGame): def __init__(self, guild, user): super().__init__(guild, user) self.events = Events() self.players = dict() # dict of discord_user : tworooms.player.Player self.state = TwoRoomsState.SETUP self.role_tracker = RoleTracker() self.rooms = [Room(self), Room(self)] self.round = 0 self.start_channel = None # TODO this is so sus just make the available_commands() for each game static right? # consider returning only commands valid in this state def available_commands(self): for command in super().available_commands(): yield command yield GameCommand( 'begin', TwoRooms.begin_game, 'once all players and roles have been added begin the game and assign roles' ) yield GameCommand('add-role', TwoRooms.add_role, 'add a role to the play set') yield GameCommand('remove-role', TwoRooms.remove_role, 'remove a role from the play set') yield GameCommand('list-roles', TwoRooms.list_roles, 'list the available roles') yield GameCommand('clear-roles', TwoRooms.clear_roles, 'reset the play set and start over') yield GameCommand('selected-roles', TwoRooms.selected_roles, 'display the roles in the current play set') yield GameCommand('room-roles', TwoRooms.set_room_roles, 'assigns a channel to a room') yield GameCommand( 'send', TwoRooms.set_sent_hostage, 'chooses a player to send as a hostage at the end of this round') yield GameCommand('test-dm', TwoRooms.send_test_dm, 'send a test dm to all players') yield GameCommand('abdicate', TwoRooms.abdicate_leader, 'as a room leader abdicate to another player') async def assign_roles(self): for user in self.joined_users.values(): player = Player(user) await player.set_role(self.role_tracker.deal_role()) self.players[user] = player async def assign_rooms(self): unassigned_players = list() for player in self.players.values(): unassigned_players.append(player) num_players = len(unassigned_players) room_one_slots = num_players // 2 while room_one_slots > 0: random_player = choice(unassigned_players) unassigned_players.remove(random_player) await self.rooms[0].add_player(random_player) room_one_slots = room_one_slots - 1 for player in unassigned_players: await self.rooms[1].add_player(player) async def assign_leaders_randomly(self): for room in self.rooms: await room.set_leader(choice(room.players)) def get_room_for_player(self, player): for room in self.rooms: if player in room.players: return room return None def are_hostages_valid(self, room, mentions): if len(mentions) != self.get_hostages_per_round(): return False for user in mentions: player = self.players[user] if player is room.leader or player not in room.players: return False return True ''' get_hostages_per_round chart round 1 2 3 6-10 1 1 1 11-21 2 1 1 22+ 3 2 1 ''' def get_hostages_per_round(self): num_players = len(self.players) game_size = num_players // 11 return max(game_size + 2 - self.round, 1) async def end_round(self): await self.exchange_hostages() self.round += 1 await self.events.fire('on_round_start', self.round) async def exchange_hostages(self): send_to_two = self.rooms[0].next_sent_hostages send_to_one = self.rooms[1].next_sent_hostages for user in send_to_two: player = self.players[user] await self.rooms[0].remove_player(player) await self.rooms[1].add_player(player) for user in send_to_one: player = self.players[user] await self.rooms[1].remove_player(player) await self.rooms[0].add_player(player) self.rooms[0].next_sent_hostages.clear() self.rooms[1].next_sent_hostages.clear() # COMMAND HANDLERS def set_room_roles(self, params, message, client): if self.state != TwoRoomsState.SETUP: return 'not a valid state to setup channels' if message.author.id != self.leader.id: return 'only the leader can set the room roles' if len(message.role_mentions) != 2: 'please mention exactly two roles to set roles' self.rooms[0].set_role(message.role_mentions[0]) self.rooms[1].set_role(message.role_mentions[1]) return 'roles set successfully. you should double check that these roles each' + \ ' have at least one text channel that they can see and the other cannot' def add_role(self, params, message, client): if self.state != TwoRoomsState.SETUP: return 'not a valid state to add a role' if len(params) == 0: return 'provide a role name to add try using `!game list-roles`' for param in params: if param.lower() not in self.role_tracker.role_factory: return f'{param} is not a valid role. use `!game list-roles` to see a list of available roles.' self.role_tracker.add_role(param.lower()) return f'added roles successfully. current play set size: {len(self.role_tracker.unassigned_roles)}' def remove_role(self, params, message, client): if self.state != TwoRoomsState.SETUP: return 'not a valid state to remove a role' if len(params) == 0: return 'provide a role name to remove try using `!game list-roles` or clear all roles with `!game ' \ 'clear-roles` ' for param in params: if param.lower() not in self.role_tracker.role_factory: return f'{param} is not a valid role. use `!game list-roles` to see a list of available roles.' self.role_tracker.remove_role(param.lower()) return 'removed roles successfully' def list_roles(self, params, message, client): lines = list() for role_name in self.role_tracker.role_factory: role = self.role_tracker.role_factory[role_name]() lines.append(f'`{role_name}`\n{role.to_string()}') return '\n\n'.join(lines) def clear_roles(self, params, message, client): if self.state != TwoRoomsState.SETUP: return 'not a valid state to remove roles' self.role_tracker.clear_roles() return 'play set cleared' def selected_roles(self, params, message, client): roles = self.role_tracker.get_selected_role_names() return 'selected roles:\n' + roles async def begin_game(self, params, message, client): if self.state != TwoRoomsState.SETUP: return 'not a valid state to begin a game' player_id = message.author.id num_players = len(self.joined_users) if num_players < 1: return 'you need more people to play' if not self.role_tracker.roles_are_valid(num_players): return "the selected roles are not valid sorry," + \ " you'll have to figure out why on your own. selected roles:\n" + \ self.role_tracker.get_selected_role_names() if not (self.rooms[0].discord_role and self.rooms[1].discord_role): return 'you need to set a role for each room. use `!game room-roles @role1 @role2`' if player_id != self.leader.id: return 'only the leader can start the game' try: self.start_channel = message.channel await self.assign_roles() await self.assign_rooms() self.state = TwoRoomsState.PLAYING self.round = 1 await self.assign_leaders_randomly() self.events.register('on_hostages_set', self.on_hostages_set_event) self.events.register('on_round_start', self.on_round_start_event) await self.events.fire('on_round_start', 1) except Forbidden as e: return f'the bot is missing the permissions it needs message: {e}' except Exception as e: return f'something went wrong starting the game: {e}' async def set_sent_hostage(self, params, message, client): if self.state != TwoRoomsState.PLAYING: return 'game needs to be started to designate a hostage' player = self.players[message.author] room = self.get_room_for_player(player) if room is None or room.leader != player: return 'you are not the room leader' if not self.are_hostages_valid(room, message.mentions): return f'invalid hostages. you need to send {self.get_hostages_per_round()} and the leader can\'t send ' \ f'themself ' await room.set_next_hostages(message.mentions) return 'hostages set. they will be sent when the other room is ready. you can still change them in the ' \ 'meantime. ' async def send_test_dm(self, params, message, client): msg = list() if not self.joined_users: return 'nobody has joined yet' for user in self.joined_users.values(): try: if user.dm_channel: dm = user.dm_channel else: dm = await user.create_dm() await dm.send('this is a test dm') except Exception as e: msg.append(f'failed to dm {user.mention}') if not msg: return 'sent DMs' return '\n'.join(msg) async def abdicate_leader(self, params, message, client): if self.state != TwoRoomsState.PLAYING: return 'game needs to be started to use this command' player = self.players[message.author] room = self.get_room_for_player(player) if room.leader is not player: return 'you need to be the leader to abdicate' if len(message.mentions) != 1: return 'please mention exactly one player' if message.mentions[0] not in self.players: return 'could not find that user in the game' new_leader = self.players[message.mentions[0]] if room is not self.get_room_for_player(new_leader): return 'that player is not in your room' await room.set_leader(new_leader) # EVENT HANDLERS async def on_hostages_set_event(self, room): if self.rooms[0].next_sent_hostages and self.rooms[ 1].next_sent_hostages: await self.end_round() async def on_round_start_event(self, round_number): if round_number < 4: return msg = [ 'game has finished', 'room 1:', self.rooms[0].get_room_status(), 'room 2:', self.rooms[1].get_room_status() ] self.start_channel.send('\n'.join(msg))
class Periphery: def __init__(self): # ports = serial.tools.list_ports.comports() # print(list(ports)) if os.getenv("SERIAL_PORT") is not None: port = os.getenv("SERIAL_PORT") else: port = "/dev/ttyUSB0" self.ser = serial.Serial(port, baudrate=9600, timeout=1) self.periphery_events = Events() if not self.ser.is_open: self.ser.open() self.button1Pressed = False self.button2Pressed = False self.button3Pressed = False self.angleX = 0 self.angleY = 0 self.angleZ = 0 self.led_color: pygame.Color = pygame.Color(0, 0, 0) self.led_animation = None self.rotary_last_direction = 0 self.rotary_step_count = 0 def close(self): self.ser.close() def set_led(self, color: pygame.Color): self.stop_led_animation() self.led_color = color self.ser.write( bytes([self.led_color.r, self.led_color.g, self.led_color.b])) def stop_led_animation(self): self.led_animation = None def set_led_animation(self, properties, duration): self.led_animation = ValueAnimation(self.led_color, properties, duration) def run(self): if not self.ser.isOpen(): return line = self.ser.readline() try: data = line.decode().split(",") if data[0] == "1" and not self.button1Pressed: self.button1Pressed = True self.periphery_events.button1_pressed() if data[0] == "0" and self.button1Pressed: self.button1Pressed = False self.periphery_events.button1_released() if data[1] == "1" and not self.button2Pressed: self.button2Pressed = True self.periphery_events.button2_pressed() if data[1] == "0" and self.button2Pressed: self.button2Pressed = False self.periphery_events.button2_released() if data[2] == "1" and not self.button3Pressed: self.button3Pressed = True self.periphery_events.button3_pressed() if data[2] == "0" and self.button3Pressed: self.button3Pressed = False self.periphery_events.button3_released() if data[3] == "1": if self.rotary_last_direction == 1: self.rotary_step_count += 1 if self.rotary_last_direction == 1 and self.rotary_step_count == 3: self.periphery_events.rotary_big_ccw() self.rotary_step_count = 0 if self.rotary_last_direction != 1: self.rotary_step_count = 2 self.rotary_last_direction = 1 self.periphery_events.rotary_ccw() if data[3] == "2": if self.rotary_last_direction == 2: self.rotary_step_count += 1 if self.rotary_last_direction == 2 and self.rotary_step_count == 3: self.periphery_events.rotary_big_cw() self.rotary_step_count = 0 if self.rotary_last_direction != 2: self.rotary_step_count = 2 self.rotary_last_direction = 2 self.periphery_events.rotary_cw() self.angleX = round(float(data[4])) self.angleY = round(float(data[5])) self.angleZ = round(float(data[6])) except (UnicodeDecodeError, IndexError): return if self.led_animation is not None: self.led_color = self.led_animation.run() self.ser.write( bytes([self.led_color[0], self.led_color[1], self.led_color[2]]))