def create_db_path(cls, db_path: Optional[str] = None, db_name: Optional[str] = None) -> str: if db_path is not None: return db_path if db_name is not None: return join(data_path(), f"{db_name}.sqlite") else: return join(data_path(), "hummingbot_trades.sqlite")
def append_to_csv(self, trade: TradeFill): csv_filename = "trades_" + trade.config_file_path[:-4] + ".csv" csv_path = os.path.join(data_path(), csv_filename) field_names = tuple(trade.attribute_names_for_file_export()) field_data = tuple(getattr(trade, attr) for attr in field_names) # adding extra field "age" # // indicates order is a paper order so 'n/a'. For real orders, calculate age. age = pd.Timestamp(int((trade.timestamp * 1e-3) - (trade.order.creation_timestamp * 1e-3)), unit='s').strftime('%H:%M:%S') if ( trade.order is not None and "//" not in trade.order_id) else "n/a" field_names += ("age", ) field_data += (age, ) if (os.path.exists(csv_path) and (not self._csv_matches_header(csv_path, field_names))): move( csv_path, csv_path[:-4] + '_old_' + pd.Timestamp.utcnow().strftime("%Y%m%d-%H%M%S") + ".csv") if not os.path.exists(csv_path): df_header = pd.DataFrame([field_names]) df_header.to_csv(csv_path, mode='a', header=False, index=False) df = pd.DataFrame([field_data]) df.to_csv(csv_path, mode='a', header=False, index=False)
def append_to_csv(self, trade: TradeFill): csv_filename = "trades_" + trade.config_file_path[:-4] + ".csv" csv_path = os.path.join(data_path(), csv_filename) field_names = ("exchange_trade_id", ) # id field should be first field_names += tuple(attr for attr in dir(trade) if ( not self._is_protected_method(attr) and self._is_primitive_type( getattr(trade, attr)) and (attr not in field_names))) field_data = tuple(getattr(trade, attr) for attr in field_names) # adding extra field "age" # // indicates order is a paper order so 'n/a'. For real orders, calculate age. age = pd.Timestamp( int(trade.timestamp / 1e3 - int(trade.order_id[-16:]) / 1e6), unit='s').strftime( '%H:%M:%S') if "//" not in trade.order_id else "n/a" field_names += ("age", ) field_data += (age, ) if (os.path.exists(csv_path) and (not self._csv_matches_header(csv_path, field_names))): move( csv_path, csv_path[:-4] + '_old_' + pd.Timestamp.utcnow().strftime("%Y%m%d-%H%M%S") + ".csv") if not os.path.exists(csv_path): df_header = pd.DataFrame([field_names]) df_header.to_csv(csv_path, mode='a', header=False, index=False) df = pd.DataFrame([field_data]) df.to_csv(csv_path, mode='a', header=False, index=False)
def __init__(self, connection_type: SQLConnectionType, db_path: Optional[str] = None): if db_path is None: db_path = join(data_path(), "hummingbot_trades.sqlite") if connection_type is SQLConnectionType.TRADE_FILLS: self._engine: Engine = create_engine(f"sqlite:///{db_path}") self._metadata: MetaData = self.get_declarative_base().metadata self._metadata.create_all(self._engine) self._session_cls = sessionmaker(bind=self._engine) self._shared_session: Session = self._session_cls() if connection_type is SQLConnectionType.TRADE_FILLS: self.check_and_upgrade_trade_fills_db()
def append_to_csv(self, trade: TradeFill): csv_file = "trades_" + trade.config_file_path[:-4] + ".csv" csv_path = os.path.join(data_path(), csv_file) # // indicates order is a paper order so 'n/a'. For real orders, calculate age. age = "n/a" if "//" not in trade.order_id: age = pd.Timestamp(int(trade.timestamp / 1e3 - int(trade.order_id[-16:]) / 1e6), unit='s').strftime('%H:%M:%S') if not os.path.exists(csv_path): df_header = pd.DataFrame([["Config File", "Strategy", "Exchange", "Timestamp", "Market", "Base", "Quote", "Trade", "Type", "Price", "Amount", "Fee", "Age", "Order ID", "Exchange Trade ID"]]) df_header.to_csv(csv_path, mode='a', header=False, index=False) df = pd.DataFrame([[trade.config_file_path, trade.strategy, trade.market, trade.timestamp, trade.symbol, trade.base_asset, trade.quote_asset, trade.trade_type, trade.order_type, trade.price, trade.amount, trade.trade_fee, age, trade.order_id, trade.exchange_trade_id]]) df.to_csv(csv_path, mode='a', header=False, index=False)
def __init__(self, connection_type: SQLConnectionType, db_path: Optional[str] = None): if db_path is None: db_path = join(data_path(), "hummingbot_trades.sqlite") engine_options = { "db_engine": global_config_map.get("db_engine").value, "db_host": global_config_map.get("db_host").value, "db_port": global_config_map.get("db_port").value, "db_username": global_config_map.get("db_username").value, "db_password": global_config_map.get("db_password").value, "db_name": global_config_map.get("db_name").value, "db_path": db_path } if connection_type is SQLConnectionType.TRADE_FILLS: self._engine: Engine = self.get_db_engine( engine_options.get("db_engine"), engine_options) self._metadata: MetaData = self.get_declarative_base().metadata self._metadata.create_all(self._engine) # SQLite does not enforce foreign key constraint, but for others engines, we need to drop it. # See: `hummingbot/market/markets_recorder.py`, at line 213. with self._engine.begin() as conn: inspector = inspect(conn) for tname, fkcs in reversed( inspector.get_sorted_table_and_fkc_names()): if fkcs: if not self._engine.dialect.supports_alter: continue for fkc in fkcs: fk_constraint = ForeignKeyConstraint((), (), name=fkc) Table(tname, MetaData(), fk_constraint) conn.execute(DropConstraint(fk_constraint)) self._session_cls = sessionmaker(bind=self._engine) self._shared_session: Session = self._session_cls() if connection_type is SQLConnectionType.TRADE_FILLS: self.check_and_upgrade_trade_fills_db()