def get_order_book_worker(self, instmt): """ Get order book worker :param instmt: Instrument """ ExchGwQuoine.num_of_connections_lock.acquire() ExchGwQuoine.num_of_connections += 1 Logger.info( self.__class__.__name__, "Current number of connections = %d" % ExchGwQuoine.num_of_connections) ExchGwQuoine.num_of_connections_lock.release() instmt.set_order_book_id(self.get_order_book_init(instmt)) while True: try: l2_depth = self.api_socket.get_order_book(instmt) if l2_depth is not None and l2_depth.is_diff( instmt.get_l2_depth()): instmt.set_prev_l2_depth(instmt.get_l2_depth()) instmt.set_l2_depth(l2_depth) instmt.incr_order_book_id() self.insert_order_book(instmt) except Exception as e: Logger.error(self.__class__.__name__, "Error in order book: %s" % e) ExchGwQuoine.num_of_connections_lock.acquire() time.sleep(ExchGwQuoine.num_of_connections + ExchGwQuoine.extra_waiting_sec) ExchGwQuoine.num_of_connections_lock.release()
def on_open_handler(self, instmt, ws): """ Socket on open handler :param instmt: Instrument :param ws: Web socket """ Logger.info(self.__class__.__name__, "Instrument %s is subscribed in channel %s" % \ (instmt.get_instmt_code(), instmt.get_exchange_name())) if not instmt.get_subscribed(): instmt_code_split = instmt.get_instmt_code().split('_') if len(instmt_code_split) == 3: # Future instruments instmt.set_order_book_channel_id("ok_sub_%s_%s_depth_%s_20" % \ (instmt_code_split[0], instmt_code_split[1], instmt_code_split[2])) instmt.set_trades_channel_id("ok_sub_%s_%s_trade_%s" % \ (instmt_code_split[0], instmt_code_split[1], instmt_code_split[2])) else: # Spot instruments instmt.set_order_book_channel_id("ok_sub_%s_depth_20" % instmt.get_instmt_code()) instmt.set_trades_channel_id("ok_sub_%s_trades" % instmt.get_instmt_code()) ws.send(self.api_socket.get_order_book_subscription_string(instmt)) ws.send(self.api_socket.get_trades_subscription_string(instmt)) instmt.set_subscribed(True)
def __on_open(self, ws): Logger.info(self.__class__.__name__, "Socket <%s> is opened." % self.id) self._connected = True if len(self.on_open_handlers) > 0: for handler in self.on_open_handlers: handler(ws)
def connect(self, url, on_message_handler=None, on_open_handler=None, on_close_handler=None, on_error_handler=None): """ :param url: Url link :param on_message_handler: Message handler which take the message as the first argument :param on_open_handler: Socket open handler which take the socket as the first argument :param on_close_handler: Socket close handler which take the socket as the first argument :param on_error_handler: Socket error handler which take the socket as the first argument and the error as the second argument """ Logger.info(self.__class__.__name__, "Connecting to socket <%s>..." % self.id) if on_message_handler is not None: self.on_message_handlers.append(on_message_handler) if on_open_handler is not None: self.on_open_handlers.append(on_open_handler) if on_close_handler is not None: self.on_close_handlers.append(on_close_handler) if on_error_handler is not None: self.on_error_handlers.append(on_error_handler) if not self._connected: self.wst = threading.Thread(target=lambda: self.run(url, 80)) self.wst.start() return self.wst
def create(self, table, columns, types, primary_key_index=(), is_ifnotexists=True): """ Create table in the database :param table: Table name :param columns: Column array :param types: Type array :param is_ifnotexists: Create table if not exists keyword """ file_path = os.path.join(self.file_directory, table + ".csv") columns = [e.split(' ')[0] for e in columns] if len(columns) != len(types): return False self.lock.acquire() if os.path.isfile(file_path): Logger.info(self.__class__.__name__, "File (%s) has been created already." % file_path) else: with open(file_path, 'w+') as csvfile: csvfile.write(','.join(["\"" + e + "\"" for e in columns]) + '\n') self.lock.release() return True
def start(dog_id, level): if level < 1 or level > 4: Logger.error("You have entered an invalid level for Phase 2.") return Logger.info("Phase Two: Starting") if level == 1: if level_one(dog_id): Logger.info("Phase Two: Complete") else: Logger.warning("Phase Two: Failed") elif level == 2: if level_two(dog_id): Logger.info("Phase Two: Complete") else: Logger.warning("Phase Two: Failed") elif level == 3: if level_three(dog_id): Logger.info("Phase Two: Complete") else: Logger.warning("Phase Two: Failed") elif level == 4: if level_four(dog_id): Logger.info("Phase Two: Complete") else: Logger.warning("Phase Two: Failed")
def on_message_handler(self, instmt, message): """ Incoming message handler :param instmt: Instrument :param message: Message """ if isinstance(message, dict): keys = message.keys() if 'event' in keys and message['event'] == 'info' and 'version' in keys: Logger.info(self.__class__.__name__, "Bitfinex version: %s" % message['version']) elif 'event' in keys and message['event'] == 'subscribed': if instmt.get_instmt_code() == message['pair']: if message['channel'] == 'book': instmt.set_order_book_channel_id(message['chanId']) elif message['channel'] == 'trades': instmt.set_trades_channel_id(message['chanId']) else: raise Exception("Unknown channel %s : <%s>" % (message['channel'], message)) Logger.info(self.__class__.__name__, 'Subscription: %s, pair: %s, channel Id: %s' % \ (message['channel'], instmt.get_instmt_code(), message['chanId'])) elif isinstance(message, list): if message[0] == instmt.get_order_book_channel_id(): if isinstance(message[1], list): self.api_socket.parse_l2_depth(instmt, message[1]) elif len(message) != 2: instmt.set_prev_l2_depth(instmt.get_l2_depth().copy()) self.api_socket.parse_l2_depth(instmt, message) else: return if instmt.get_l2_depth().is_diff(instmt.get_prev_l2_depth()): instmt.incr_order_book_id() self.insert_order_book(instmt) elif message[0] == instmt.get_trades_channel_id(): # No recovery trade # if isinstance(message[1], list): # raw_trades = message[1] # raw_trades.sort(key=lambda x:x[0]) # for raw in raw_trades: # trade = self.api_socket.parse_trade(instmt, raw) # try: # if int(trade.trade_id) > int(instmt.get_exch_trade_id()): # instmt.incr_trade_id() # instmt.set_exch_trade_id(trade.trade_id) # self.insert_trade(instmt, trade) # except Exception as e: # Logger.info('test', "trade.trade_id(%s):%s" % (type(trade.trade_id), trade.trade_id)) # Logger.info('test', "instmt.get_exch_trade_id()(%s):%s" % (type(instmt.get_exch_trade_id()), instmt.get_exch_trade_id())) # raise e if message[1] == 'tu': trade = self.api_socket.parse_trade(instmt, message[3:]) if int(trade.trade_id) > int(instmt.get_exch_trade_id()): instmt.incr_trade_id() instmt.set_exch_trade_id(trade.trade_id) self.insert_trade(instmt, trade)
def __on_close(self, ws): Logger.info(self.__class__.__name__, "Socket <%s> is closed." % self.id) self._connecting = False self._connected = False if len(self.on_close_handlers) > 0: for handler in self.on_close_handlers: handler(ws)
def create(self, table, columns, types, primary_key_index=[], is_ifnotexists=True): """ Create table in the database. Caveat - Assign the first few column as the keys!!! :param table: Table name :param columns: Column array :param types: Type array :param is_ifnotexists: Create table if not exists keyword """ if len(columns) != len(types): raise Exception("Incorrect create statement. Number of columns and that of types are different.\n%s\n%s" % \ (columns, types)) if is_ifnotexists: ret = self.conn("\\v") if ret is not None: for t in ret: if table == self.decode_qtypes(t): Logger.info(self.__class__.__name__, "Table %s has been created." % table) return True Logger.info(self.__class__.__name__, "Table %s is going to be created." % table) c = columns[:] for i in range(0, len(types)): t = self.convert_type(types[i]) if t is str: if columns[i].find('date_time') > -1: c[i] += ":`timestamp$()" else: c[i] += ":`symbol$()" elif t is float: c[i] += ":`float$()" elif t is int: c[i] += ":`int$()" keys = [] for i in primary_key_index: keys.append(c[i]) for i in sorted(primary_key_index, reverse=True): del c[i] if len(keys) > 0: command = '%s:([%s] %s)' % (table, '; '.join(keys), '; '.join(c)) else: command = '%s:(%s)' % (table, '; '.join(c)) self.lock.acquire() try: self.conn.sync(command) except Exception as e: Logger.error(self.__class__.__name__, "Error in creat statement(%s).\n%s" % (command, e)) finally: self.lock.release() return True
def start(dog_id, level): if level != 1: Logger.error("You have entered an invalid level for Phase One.") return Logger.info("Phase One: Starting") if level_one(dog_id): Logger.info("Phase One: Complete") else: Logger.warning("Phase One: Failed")
def on_close_handler(self, instmt, ws): """ Socket on close handler :param instmt: Instrument :param ws: Web socket """ Logger.info(self.__class__.__name__, "Instrument %s is unsubscribed in channel %s" % \ (instmt.get_instmt_code(), instmt.get_exchange_name())) instmt.set_subscribed(False)
def connect(self, **kwargs): """ Connect :param path: sqlite file to connect """ addr = kwargs['addr'] Logger.info(self.__class__.__name__, 'Zmq client is connecting to %s' % addr) self.conn.bind(addr) return self.conn is not None
def on_open_handler(self, instmt, ws): """ Socket on open handler :param instmt: Instrument :param ws: Web socket """ Logger.info(self.__class__.__name__, "Instrument %s is subscribed in channel %s" % \ (instmt.get_instmt_code(), instmt.get_exchange_name())) if not instmt.get_subscribed(): ws.send(self.api_socket.get_trades_subscription_string(instmt)) instmt.set_subscribed(True)
def level_three(dog_id): Logger.info("Phase Two: Level 3 - Starting") Logger.data(dog_id, 2, 3, "starting") scheduler.schedule(8, 64, dispense_treat) if not Config.RUN_FLAG: Logger.data(dog_id, 2, 3, "cancelled") return False FileIOUtil.save(dog_id, 2, 3) Logger.info("Phase Two: Level 3 - Complete") Logger.data(dog_id, 2, 3, "complete") return level_four(dog_id)
def level_four(dog_id): Logger.info("Phase Two: Level 4 - Starting") Logger.data(dog_id, 3, 4, "starting") scheduler.schedule(12, 60, dispense_treat) if not Config.RUN_FLAG: Logger.data(dog_id, 2, 4, "cancelled") return False FileIOUtil.save(dog_id, 2, 4) Logger.info("Phase Two: Level 4 - Complete") Logger.data(dog_id, 3, 4, "complete") return True
def level_one(dog_id): Logger.info("Phase One: Level 1 - Starting") Logger.data(dog_id, 1, 1, "starting") scheduler.schedule(5, 150, dispense_treat) if not Config.RUN_FLAG: Logger.data(dog_id, 1, 1, "cancelled") return False FileIOUtil.save(dog_id, 1, 1) Logger.info("Phase One: Level 1 - Complete") Logger.data(dog_id, 1, 1, "complete") return True
def phase_three(dog_id, level, quiet_length, fail_max, treat_frequency_min, treat_frequency_max=-1): if treat_frequency_max == -1: treat_frequency_max = treat_frequency_min quiet_timer = 0 treat_timer = 0 treat_dispense_time = 0 fail_count = 0 # Reset quiet status NoiseUtil.reset_bark_status() while quiet_timer < quiet_length and fail_count < fail_max: # Wait 1 second time.sleep(1) if not Config.RUN_FLAG: Logger.data(dog_id, 3, level, "cancelled") return False if treat_timer == 0: treat_dispense_time = random.randint(treat_frequency_min, treat_frequency_max) if NoiseUtil.has_dog_barked: Logger.info("Dog just barked, incrementing fail count.") Logger.data(dog_id, 3, level, "dog_bark") fail_count += 1 quiet_timer = 0 NoiseUtil.reset_bark_status() continue else: quiet_timer += 1 treat_timer += 1 # Check if a treat should be dispensed if treat_timer == treat_dispense_time: PetTutorUtil.dispense_treat() treat_timer = 0 fail_count = 0 if quiet_timer >= quiet_length: # Dog passed the Challenge. return True if fail_count >= fail_max: # Dog has failed the challenge return False Logger.error("[Quiet Challenge] CODE SHOULD NEVER GET HERE!")
def start(dog_id, level): if str(level) not in ('1', '2', '1D', '2D', '3D', '4D', '5D', '6D', '7D', '8D', '9D'): Logger.error("You have entered an invalid level for Phase Five.") return Logger.info("Phase Five: Starting.") # Start mic recording NoiseUtil.record_bark_status() MoveUtil.record_down_status() if str(level) in ('1', '2'): if str(level) == '1': dog_standing(dog_id, 1) elif str(level) == '2': dog_standing(dog_id, 2) else: dog_down(dog_id, level)
def get_trades_worker(self, instmt): """ Get order book worker thread :param instmt: Instrument name """ ExchGwQuoine.num_of_connections_lock.acquire() ExchGwQuoine.num_of_connections += 1 Logger.info( self.__class__.__name__, "Current number of connections = %d" % ExchGwQuoine.num_of_connections) ExchGwQuoine.num_of_connections_lock.release() trade_id, exch_trade_id = self.get_trades_init(instmt) instmt.set_trade_id(trade_id) instmt.set_exch_trade_id(exch_trade_id) while True: try: ret = self.api_socket.get_trades(instmt) if ret is None or len(ret) == 0: time.sleep(1) continue except Exception as e: Logger.error(self.__class__.__name__, "Error in trades: %s" % e) for trade in ret: assert isinstance(trade.trade_id, str), "trade.trade_id(%s) = %s" % (type( trade.trade_id), trade.trade_id) assert isinstance(instmt.get_exch_trade_id(), str), \ "instmt.get_exch_trade_id()(%s) = %s" % (type(instmt.get_exch_trade_id()), instmt.get_exch_trade_id()) if int(trade.trade_id) > int(instmt.get_exch_trade_id()): instmt.set_exch_trade_id(trade.trade_id) instmt.incr_trade_id() self.insert_trade(instmt, trade) # After the first time of getting the trade, indicate the instrument # is recovered if not instmt.get_recovered(): instmt.set_recovered(True) ExchGwQuoine.num_of_connections_lock.acquire() time.sleep(ExchGwQuoine.num_of_connections + ExchGwQuoine.extra_waiting_sec) ExchGwQuoine.num_of_connections_lock.release()
class Portfolio: def __init__(self, org_w, asset_num, assets_return, assets_covariance, adjust_thres=0.3, risk_thres=1): # Factor model portfolio optimization. self._asset_num = asset_num self._assets_return = np.array(assets_return) self._assets_covariance = np.array(assets_covariance) self._logger = Logger() # control_thres = (sum(assets_return) / np.ones((1, asset_num)) # .dot(np.array(assets_covariance)) # .dot(np.ones((asset_num, 1))) * asset_num)[0][0] # abs(control_thres).value * control_coef self._logger.info( module='portfolio', file='portfolio.py', content= "Doing portfolio optimization with params: adjust_thres is %f, risk_thres is %f" % (adjust_thres, risk_thres)) w = Variable(self._asset_num) w_old = np.array(org_w) gamma = Parameter(sign='positive') gamma.value = risk_thres ret = self._assets_return * w risk = quad_form(w, self._assets_covariance) prob_factor = Problem( Maximize(ret - gamma * risk), [w >= 0, sum_entries(w) == 1, norm(w - w_old, 1) < adjust_thres]) prob_factor.solve(verbose=True) self._asset_weight = w.value.T.tolist()[0] @property def asset_weight(self): return self._asset_weight
def connect(self, url, on_message_handler=None, on_open_handler=None, on_close_handler=None, on_error_handler=None, reconnect_interval=10): """ :param url: Url link :param on_message_handler: Message handler which take the message as the first argument :param on_open_handler: Socket open handler which take the socket as the first argument :param on_close_handler: Socket close handler which take the socket as the first argument :param on_error_handler: Socket error handler which take the socket as the first argument and the error as the second argument :param reconnect_interval: The time interval for reconnection """ Logger.info(self.__class__.__name__, "Connecting to socket <%s>..." % self.id) if on_message_handler is not None: self.on_message_handlers.append(on_message_handler) if on_open_handler is not None: self.on_open_handlers.append(on_open_handler) if on_close_handler is not None: self.on_close_handlers.append(on_close_handler) if on_error_handler is not None: self.on_error_handlers.append(on_error_handler) if not self._connecting and not self._connected: self._connecting = True self.ws = websocket.WebSocketApp(url, on_message=self.__on_message, on_close=self.__on_close, on_open=self.__on_open, on_error=self.__on_error) self.wst = threading.Thread(target=lambda: self.__start( reconnect_interval=reconnect_interval)) self.wst.start() return self.wst
def __write_to_csv(entidades: List[CSVEntity], path: str, mode: str): """ Salva uma lista de :class:`CsvEntity` num arquivo no formato CSV. :param entidades: Lista de Entidades a serem salvas. :type entidades: List[CSVEntity] :param path: Caminho absoluto do arquivo '.csv' onde as Entidades devam ser salvas. :type path: str :param mode: Modo de abertura do arquivo. :type mode: str """ Logger.info(f'Salvando entidades no arquivo: {path}') rows = [] for entidade in entidades: rows.append(entidade.as_row()) with open(path, mode) as file: writter = csv.writer(file) writter.writerows(rows)
def on_message_handler(self, instmt, message): """ Incoming message handler :param instmt: Instrument :param message: Message """ keys = message.keys() if 'info' in keys: Logger.info(self.__class__.__name__, message['info']) elif 'subscribe' in keys: Logger.info(self.__class__.__name__, 'Subscription of %s is %s' % \ (message['request']['args'], \ 'successful' if message['success'] else 'failed')) elif 'table' in keys: if message['table'] == 'trade': for trade_raw in message['data']: if trade_raw["symbol"] == instmt.get_instmt_code(): # Filter out the initial subscriptions trade = self.api_socket.parse_trade(instmt, trade_raw) if trade.trade_id != instmt.get_exch_trade_id(): instmt.incr_trade_id() instmt.set_exch_trade_id(trade.trade_id) self.insert_trade(instmt, trade) elif message['table'] == 'orderBookL2': l2_depth = self.api_socket.parse_l2_depth(instmt, message) if l2_depth is not None and l2_depth.is_diff(instmt.get_l2_depth()): instmt.set_prev_l2_depth(instmt.get_l2_depth()) instmt.set_l2_depth(l2_depth) instmt.incr_order_book_id() self.insert_order_book(instmt) else: Logger.info(self.__class__.__name__, json.dumps(message,indent=2)) else: Logger.error(self.__class__.__name__, "Unrecognised message:\n" + json.dumps(message))
def insert(self, table, columns, types, values, primary_key_index=[], is_orreplace=False, is_commit=True): """ Insert into the table :param table: Table name :param columns: Column array :param types: Type array :param values: Value array :param primary_key_index: An array of indices of primary keys in columns, e.g. [0] means the first column is the primary key :param is_orreplace: Indicate if the query is "INSERT OR REPLACE" """ if len(columns) != len(values): return False column_names = ','.join(columns) value_string = ','.join([SqlClient.convert_str(e) for e in values]) if is_orreplace: sql = "%s %s (%s) values (%s)" % (self.replace_keyword(), table, column_names, value_string) else: sql = "insert into %s (%s) values (%s)" % (table, column_names, value_string) self.lock.acquire() try: self.execute(sql) if is_commit: self.commit() except Exception as e: Logger.info(self.__class__.__name__, "SQL error: %s\nSQL: %s" % (e, sql)) self.lock.release() return True
def start(self, instmt): """ Start the exchange gateway :param instmt: Instrument :return List of threads """ instmt.set_l2_depth(L2Depth(20)) instmt.set_prev_l2_depth(L2Depth(20)) instmt.set_instmt_snapshot_table_name( self.get_instmt_snapshot_table_name(instmt.get_exchange_name(), instmt.get_instmt_name())) self.init_instmt_snapshot_table(instmt) Logger.info( self.__class__.__name__, 'instmt snapshot table: {}'.format( instmt.get_instmt_snapshot_table_name())) return [ self.api_socket.connect( self.api_socket.get_link(), on_message_handler=partial(self.on_message_handler, instmt), on_open_handler=partial(self.on_open_handler, instmt), on_close_handler=partial(self.on_close_handler, instmt)) ]
def level_two(dog_id): Logger.info("Phase Three: Level 2 - Starting") Logger.data(dog_id, 3, 2, "starting") if Challenge.phase_three(dog_id=dog_id, level=2, treat_frequency_min=3, quiet_length=30, fail_max=3): FileIOUtil.save(dog_id, 3, 2) Logger.info("Phase Three: Level 2 - Complete") Logger.data(dog_id, 3, 2, "complete") return level_three(dog_id) else: if not Config.RUN_FLAG: Logger.info("Phase Three: Level 2 - Cancelled") return False Logger.info("Phase Three: Level 2 - Failed, regressing to Level 2") Logger.data(dog_id, 3, 2, "failed") return level_two(dog_id)
def level_five(dog_id): Logger.info("Phase Three: Level 5 - Starting") Logger.data(dog_id, 3, 5, "starting") if Challenge.phase_three(dog_id=dog_id, level=5, treat_frequency_min=12, quiet_length=60, fail_max=3): FileIOUtil.save(dog_id, 3, 5) Logger.info("Phase Three: Level 5 - Complete") Logger.data(dog_id, 3, 5, "complete") return level_six(dog_id) else: if not Config.RUN_FLAG: Logger.info("Phase Three: Level 5 - Cancelled") return False Logger.info("Phase Three: Level 5 - Failed, regressing to Level 4") Logger.data(dog_id, 3, 5, "failed") return level_four(dog_id)
def on_message_handler(self, instmt, messages): """ Incoming message handler :param instmt: Instrument :param message: Message """ for message in messages: keys = message.keys() if 'channel' in keys: if 'data' in keys: if message['channel'] == instmt.get_order_book_channel_id( ): data = message['data'] instmt.set_prev_l2_depth(instmt.get_l2_depth().copy()) self.api_socket.parse_l2_depth(instmt, data) # Insert only if the first 5 levels are different if instmt.get_l2_depth().is_diff( instmt.get_prev_l2_depth()): instmt.incr_order_book_id() self.insert_order_book(instmt) elif message['channel'] == instmt.get_trades_channel_id(): for trade_raw in message['data']: trade = self.api_socket.parse_trade( instmt, trade_raw) if trade.trade_id != instmt.get_exch_trade_id(): instmt.incr_trade_id() instmt.set_exch_trade_id(trade.trade_id) self.insert_trade(instmt, trade) elif 'success' in keys: Logger.info(self.__class__.__name__, "Subscription to channel %s is %s" \ % (message['channel'], message['success'])) else: Logger.info(self.__class__.__name__, ' - ' + json.dumps(message))
def dog_standing(dog_id, level=1): Logger.info("Phase Five: Level %s - Starting" % level) Logger.data(dog_id, 5, level, "starting") stand_time = 0 treat_timer = 1 treat_dispense_time = 0 if level == 2: stand_time = levels['1']['duration'] + 1 """ DOG IS STANDING """ while True: # If the program is calling for cancellation, just cancel whatever we are doing if not Config.RUN_FLAG: return False # If the dog has laid down, move to #D levels. if MoveUtil.is_dog_down: Logger.data(dog_id, 5, 1 if stand_time <= levels['1']['duration'] else 2, "dog_down") return dog_down(dog_id, '1D') # If the dog has barked, reset to level 1 if NoiseUtil.has_dog_barked: stand_time = 0 treat_timer = 1 continue # If the dog has completed Level 1, announce and save if stand_time == levels['1']['duration']: FileIOUtil.save(dog_id, 5, 1) Logger.info("Phase Five: Level 1 - Complete") Logger.data(dog_id, 5, 1, "complete") FileIOUtil.save(dog_id, 5, 1) Logger.info("Phase Five: Level 2 - Starting") Logger.data(dog_id, 5, 2, "starting") # If we just gave a treat, figure out the next time we should dispense one if treat_timer == 1: treat_dispense_time = random.randint( levels['1']['min'] if stand_time <= levels['1']['duration'] else levels['2']['min'], levels['1']['max'] if stand_time <= levels['1']['duration'] else levels['2']['max']) # Check if a treat should be dispensed if treat_timer == treat_dispense_time: dispense_treat() treat_timer = 0 # End stuff time.sleep(1) treat_timer += 1 stand_time += 1
def writeAllPaths(pathModel, outputFileName): Logger.info("-----Write File Started-----") Logger.info("Writing file :" + str(outputFileName)) try: with open(outputFileName, 'wb') as fp: pickle.dump(pathModel, fp) Logger.info("-----Write File Complete-----") except OSError as err: Logger.error("OS error: %s" % err.message) except Exception as exp: Logger.error("exception: %s" % exp.message)
class Mail(object): """model for the Mail.""" id_is_valid = staticmethod(lambda num: 0 < int(num) <= 1L << 31) def __init__(self, env, id=None, db=None, messageid=None, row=None): self.env = env self.db = db self.log = Logger(env) if id is not None: self.resource = Resource('mailarchive', str(id), None) self._fetch_mail(id) elif messageid is not None: self._fetch_mail_by_messageid(messageid) self.resource = Resource('mailarchive', self.id, None) elif row is not None: self._fetch_mail_by_row(row) self.resource = Resource('mailarchive', self.id, None) else: self.messageid = '' self.subject = '' self.utcdate = 0 self.localdate = '' self.zoneoffset = 0 self.body = '' def __eq__(self, other): if isinstance(other, Mail): return self.messageid == other.messageid return super.__eq__(other) def _get_db(self): if self.db: return self.db else: return self.env.get_db_cnx() def _get_db_for_write(self): if self.db: return (self.db, False) else: return (self.env.get_db_cnx(), True) def get_sanitized_fromaddr(self): return self.fromaddr.replace('@', self.env.config.get('mailarchive', 'replaceat', '@')) def get_fromtext(self): return get_author(self.fromname, self.fromaddr) def get_category(self): yearmonth = time.strftime("%Y%m", time.gmtime(self.utcdate)) category = self.mlid + yearmonth return category.encode('utf-8') def get_plain_body(self): return self._sanitize(self.env, self.body) def get_html_body(self, req): # for HTML Mail if self.body.lstrip().startswith('<'): return Markup(self.body) contentlines = self.body.splitlines() htmllines = ['',] #customize! #http://d.hatena.ne.jp/ohgui/20090604/1244114483 wikimode = req.args.get('wikimode', 'on') for line in contentlines: if self.env.config.get('mailarchive', 'wikiview',' enabled') == 'enabled' and wikimode == 'on': htmllines.append(wiki_to_oneliner(line, self.env, self.db, False, False, req)) else: htmllines.append(Markup(Markup().escape(line).replace(' ',' '))) content = Markup('<br/>').join(htmllines) return content def _sanitize(self, env, text): return text.replace('@', env.config.get('mailarchive', 'replaceat','_at_') ) def _fetch_mail(self, id): row = None if self.id_is_valid(id): db = self._get_db() cursor = db.cursor() cursor.execute(SELECT_FROM_MAILARC + " WHERE id=%s", (id,)) row = cursor.fetchone() if not row: raise ResourceNotFound('Mail %s does not exist.' % id, 'Invalid Mail Number') self._fetch_mail_by_row(row) def _fetch_mail_by_messageid(self, messageid): row = None db = self._get_db() cursor = db.cursor() cursor.execute(SELECT_FROM_MAILARC + " WHERE messageid=%s", (messageid,)) row = cursor.fetchone() if not row: raise ResourceNotFound('Mail messageid %s does not exist.' % messageid, 'Invalid Mail messageid Number') self._fetch_mail_by_row(row) def _fetch_mail_by_row(self, row): self.id = row[0] self.messageid = row[1] self.utcdate = row[2] self.zoneoffset = row[3] self.subject = row[4] self.fromname = row[5] self.fromaddr = row[6] self.header =row[7] self.body = row[8] self.thread_root = row[9] self.thread_parent = row[10] self.zone = self._to_zone(self.zoneoffset) self.localdate = self._to_localdate(self.utcdate, self.zoneoffset) def _to_localdate(self, utcdate, zoneoffset): return time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(utcdate + zoneoffset)) def _to_zone(self, zoneoffset): #zone and date zone = '' if zoneoffset == '': zoneoffset = 0 if zoneoffset > 0: zone = ' +' + time.strftime('%H%M', time.gmtime(zoneoffset)) elif zoneoffset < 0: zone = ' -' + time.strftime('%H%M', time.gmtime(-1 * zoneoffset)) return zone def get_href(self, req): return req.href.mailarchive(self.id) def get_subject(self): if is_empty(self.subject): return '(no subject)' else: return self.subject def get_senddate(self): return self.localdate + self.zone def get_thread_root(self): if self.thread_root == '': return self try: root_mail = Mail(self.env, messageid=self.thread_root) except ResourceNotFound: return self #self.thread_rootはオリジナル版だと親のメールになってしまっている。 #互換性維持のため、ルートではない場合は自力で探しにいくロジックを走らす if root_mail.thread_root == '': return root_mail else: if self.thread_parent != '': root_id = MailFinder.find_root_id(self.env, self.messageid) return Mail(self.env, messageid=root_id) def get_thread_parent_id(self): if self.thread_parent != '': return self.thread_parent.split(' ')[0] return None def get_thread_parent(self): if self.thread_parent != '': return Mail(self.env, db=self.db, messageid=self.get_thread_parent_id()) return self def get_children(self, desc=False, cached_mails=None): if cached_mails: self.log.debug("[%s] mail's threads is cached." % self.id) return [x for x in cached_mails if x.get_thread_parent_id() == self.messageid] db = self._get_db() cursor = db.cursor() sql = SELECT_FROM_MAILARC + " WHERE threadparent LIKE %s ORDER BY utcdate" if desc: sql += " DESC" cursor.execute(sql, ('%s %%' % self.messageid,)) children = [] for row in cursor: child_mail = Mail(self.env, row=row, db=self.db) children.append(child_mail) return children def get_thread_mails(self, desc=False): root = self.get_thread_root() db = self._get_db() cursor = db.cursor() sql = SELECT_FROM_MAILARC + " WHERE threadroot = %s ORDER BY utcdate" if desc: sql += " DESC" cursor.execute(sql, (root.messageid,)) mails = [] for row in cursor: mails.append(Mail(self.env, row=row, db=self.db)) return mails def has_children(self, cached_mails=None): rtn = len(self.get_children(cached_mails=cached_mails)) > 0 return rtn def get_related_tickets(self, req): db = self._get_db() return get_related_tickets(self.env, req, db, self.id) def has_attachments(self, req): attachment = MailArchiveAttachment(self.env, self.id) return attachment.has_attachments(req) def populate(self, author, msg, mlid): """Populate the mail with 'suitable' values from a message""" if 'message-id' not in msg: raise 'Illegal Format Mail!' self.is_new_mail = False self.mlid = mlid self._parse_messageid(msg) self._parse_date(msg) self._parse_subject(msg) if msg.is_multipart(): self._parse_multipart(author, msg) else: self._parse_body(msg) ref_messageid = self._parse_reference(msg) self._make_thread(ref_messageid) def update_or_save(self): if self.messageid is None or self.messageid == '': raise "Can't save mail to database." db, has_tran = self._get_db_for_write() cursor = db.cursor() yearmonth = time.strftime("%Y%m", time.gmtime(self.utcdate)) category = self.mlid + yearmonth cursor.execute("SELECT category, mlid, yearmonth, count FROM mailarc_category WHERE category=%s", (category.encode('utf-8'),)) row = cursor.fetchone() count = 0 if row: count = row[3] pass else: cursor.execute("INSERT INTO mailarc_category (category, mlid, yearmonth, count) VALUES(%s, %s, %s, %s)", (category.encode('utf-8'), self.mlid.encode('utf-8'), yearmonth, 0)) if self.is_new_mail: count = count + 1 cursor.execute("UPDATE mailarc_category SET count=%s WHERE category=%s", (count, category.encode('utf-8'))) # insert or update mailarc #self.log.debug( # "VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)" %(str(id), # category.encode('utf-8'), # messageid, # utcdate, # zoneoffset, # subject.encode('utf-8'), fromname.encode('utf-8'), # fromaddr.encode('utf-8'),'','', # thread_root,thread_parent)) cursor.execute("DELETE FROM mailarc where messageid=%s", (self.messageid,)) cursor.execute("INSERT INTO mailarc (" "id, category, messageid, utcdate, zoneoffset, subject," "fromname, fromaddr, header, text, threadroot, threadparent) " "VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)", (str(self.id), category.encode('utf-8'), self.messageid, self.utcdate, self.zoneoffset, self.subject.encode('utf-8'), self.fromname.encode('utf-8'), self.fromaddr.encode('utf-8'), '', self.body.encode('utf-8'), self.thread_root, self.thread_parent)) if has_tran: db.commit() def _parse_messageid(self, msg): self.messageid = msg['message-id'].strip('<>') #check messageid is unique self.log.debug("Creating new mailarc '%s'" % 'mailarc') db = self._get_db() cursor = db.cursor() cursor.execute("SELECT id from mailarc WHERE messageid=%s", (self.messageid,)) row = cursor.fetchone() id = None if row: id = row[0] if id == None or id == "": # why? get_last_id return 0 at first. #id = db.get_last_id(cursor, 'mailarc') self.is_new_mail = True cursor.execute("SELECT Max(id)+1 as id from mailarc") row = cursor.fetchone() if row and row[0] != None: id = row[0] else: id = 1 self.id = int(id) # Because id might be 'n.0', int() is called. def _parse_date(self, msg): if 'date' in msg: datetuple_tz = email.Utils.parsedate_tz(msg['date']) localdate = calendar.timegm(datetuple_tz[:9]) #toDB zoneoffset = datetuple_tz[9] # toDB utcdate = localdate - zoneoffset # toDB #make zone ( +HHMM or -HHMM zone = '' if zoneoffset > 0: zone = '+' + time.strftime('%H%M', time.gmtime(zoneoffset)) elif zoneoffset < 0: zone = '-' + time.strftime('%H%M', time.gmtime(-1 * zoneoffset)) #self.log.debug( time.strftime("%y/%m/%d %H:%M:%S %z",datetuple_tz[:9])) self.log.debug(time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(utcdate))) self.log.debug(time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(localdate))) self.log.debug(zone) fromname, fromaddr = email.Utils.parseaddr(msg['from']) self.fromname = self._decode_to_unicode(fromname) self.fromaddr = self._decode_to_unicode(fromaddr) self.zone = zone self.utcdate = utcdate self.zoneoffset = zoneoffset self.localdate = self._to_localdate(utcdate, zoneoffset) self.log.info(' ' + self.localdate + ' ' + zone +' '+ fromaddr) def _parse_subject(self, msg): if 'subject' in msg: self.subject = self._decode_to_unicode(msg['subject']) def _parse_reference(self, msg): # make thread infomations ref_messageid = '' if 'in-reply-to' in msg: ref_messageid = ref_messageid + msg['In-Reply-To'] + ' ' self.log.debug('In-Reply-To:%s' % ref_messageid ) if 'references' in msg: ref_messageid = ref_messageid + msg['References'] + ' ' m = re.findall(r'<(.+?)>', ref_messageid) ref_messageid = '' for text in m: ref_messageid = ref_messageid + "'%s'," % text ref_messageid = ref_messageid.strip(',') self.log.debug('RefMessage-ID:%s' % ref_messageid) return ref_messageid def _parse_multipart(self, author, msg): body = '' # delete all attachement at message-id Attachment.delete_all(self.env, 'mailarchive', self.id, self.db) for part in msg.walk(): content_type = part.get_content_type() self.log.debug('Content-Type:' + content_type) file_counter = 1 if content_type == 'multipart/mixed': pass elif content_type == 'text/html' and self._is_file(part) == False: if body != '': body += "\n------------------------------\n\n" body = part.get_payload(decode=True) charset = part.get_content_charset() self.log.debug('charset:' + str(charset)) # Todo:need try if charset != None: body = self._to_unicode(body, charset) elif content_type == 'text/plain' and self._is_file(part) == False: #body = part.get_payload(decode=True) if body != '': body += "\n------------------------------\n\n" current_body = part.get_payload(decode=True) charset = part.get_content_charset() self.log.debug('charset:' + str(charset)) # Todo:need try if charset != None: #body = self._to_unicode(body, charset) body += self._to_unicode(current_body, charset) else: body += current_body elif part.get_payload(decode=True) == None: pass # file attachment else: self.log.debug(part.get_content_type()) # get filename # Applications should really sanitize the given filename so that an # email message can't be used to overwrite important files filename = self._get_filename(part) if not filename: import mimetypes ext = mimetypes.guess_extension(part.get_content_type()) if not ext: # Use a generic bag-of-bits extension ext = '.bin' filename = 'part-%03d%s' % (file_counter, ext) file_counter += 1 self.log.debug("filename:" + filename.encode(OUTPUT_ENCODING)) # make attachment tmp = os.tmpfile() tempsize = len(part.get_payload(decode=True)) tmp.write(part.get_payload(decode=True)) tmp.flush() tmp.seek(0,0) attachment = Attachment(self.env, 'mailarchive', self.id) attachment.description = '' # req.args.get('description', '') attachment.author = author #req.args.get('author', '') attachment.ipnr = '127.0.0.1' try: attachment.insert(filename, tmp, tempsize, None, self.db) except Exception, e: try: ext = filename.split('.')[-1] if ext == filename: ext = '.bin' else: ext = '.' + ext filename = 'part-%03d%s' % (file_counter, ext) file_counter += 1 attachment.description += ', Original FileName: %s' % filename attachment.insert(filename, tmp, tempsize, None, self.db) self.log.warn('As name is too long, the attached file is renamed : ' + filename) except Exception, e: self.log.error('Exception at attach file of Message-ID:' + self.messageid) traceback.print_exc(e) tmp.close()