def get_using_names(self) -> [str]: names = [] for table in TABLE_LIST: data_table = DatabaseEntry().get_finance_table(table) keys = data_table.get_all_keys() names.extend(keys) return list(set(names))
def __save_cached_data(self) -> bool: table = DatabaseEntry().get_securities_table() for index, row in self.__cached_data.iterrows(): code = row['code'] exchange = row['exchange'] identity = IDENTITY_SECURITIES_INFO.\ replace('<stock_code>', code).\ replace('<exchange>', exchange) table.upsert(identity, text_auto_time('2000-01-01'), row.to_dict()) return True
def __load_cached_data(self) -> bool: table = DatabaseEntry().get_securities_table() record = table.query() if record is not None and len(record) > 0: self.__cached_data = pd.DataFrame(record) del self.__cached_data['DateTime'] del self.__cached_data['_id'] else: self.__cached_data = pd.DataFrame(column=list(FIELD_INFO.keys())) return True
def __update_date_field(self, tag1: str, tag2: str, tag3: str, date: datetime or str, field: int, compare): sql_update = ("UPDATE %s SET %s = '%s' WHERE L1Tag='%s' AND L2Tag='%s' AND L3Tag='%s';" % (UpdateTable.TABLE, UpdateTable.FIELD[field], text_auto_time(date), tag1, tag2, tag3)) sql_insert = ("INSERT INTO %s (L1Tag, L2Tag, L3Tag, %s) VALUES ('%s', '%s', '%s', '%s');" % (UpdateTable.TABLE, UpdateTable.FIELD[field], tag1, tag2, tag3, text_auto_time(date))) record = self.get_update_record(tag1, tag2, tag3) if record is None or len(record) == 0: return DatabaseEntry().get_utility_db().QuickExecuteDML(sql_insert, True) elif record[0][field] is None or compare(text_auto_time(date), text_auto_time(record[0][field])): return DatabaseEntry().get_utility_db().QuickExecuteDML(sql_update, True) else: return True
def __load_cached_data(self, tags: [str]) -> bool: report_type = tags[0] stock_identity = tags[1] data_table = DatabaseEntry().get_finance_table(report_type) record = data_table.query(stock_identity) if record is not None and len(record) > 0: df = pd.DataFrame(record) # del df['DateTime'] del df['_id'] self.__cached_data[report_type][stock_identity] = df return True else: logger.info('FinanceData.load_cached_data() - Not record for + ' + str(tags)) return False
def __save_cached_data(self) -> bool: for report_type in self.__save_table.keys(): save_list = self.__save_table.get(report_type) data_table = DatabaseEntry().get_finance_table(report_type) for stock_identity in save_list: df = self.__cached_data.get(report_type).get(stock_identity) self.__save_single_data(stock_identity, df, data_table)
def __build_data_center() -> UniversalDataCenter: plugin_path = root_path + '/Collector/' collector_plugin = PluginManager(plugin_path) collector_plugin.refresh() return UniversalDataCenter(DatabaseEntry(), collector_plugin)
def load_from_db(self, **kw) -> bool: self.__calendar = DatabaseEntry().get_utility_db().DataFrameFromDB( 'StockCalendar', ["Date", "IsOpen"]) if self.__calendar is not None: return True else: self.__calendar = pd.DataFrame({'Date': [], 'IsOpen': []}) return False
def __load_cached_data(self) -> bool: df = DatabaseEntry().get_utility_db().DataFrameFromDB( 'TradeCalender', FIELD_TRADE_CALENDER) if df is None: df = pd.DataFrame(columns=FIELD_TRADE_CALENDER) for exchange in TRADE_EXCHANGE: self.__cached_data[exchange] = df[df['exchange'] == exchange] self.__cached_data[exchange].reindex() return True
def execute_update_patch( self, patch: DataUtility.Patch) -> DataUtility.RESULT_CODE: logger.info('FinanceData.execute_update_patch(' + str(patch) + ')') if not self.is_data_support(patch.tags): logger.info( 'FinanceData.execute_update_patch() - Data is not support.') return DataUtility.RESULT_NOT_SUPPORTED report_type = patch.tags[0] save_list = self.__save_table.get(report_type) report_dict = self.__cached_data.get(report_type) if report_dict is None or save_list is None: # Should not reach here logger.error('Cannot not get report dict for ' + report_type) return DataUtility.RESULT_FAILED stock_identity = normalize_stock_identity(patch.tags[1]) df = self.__do_fetch_finance_data(report_type, stock_identity, patch.since, patch.until) if df is None or len(df) == 0: return DataUtility.RESULT_FAILED DatabaseEntry().get_alias_table().tell_names(list(df.columns)) DatabaseEntry().get_alias_table().check_save() # df.set_index('period') codes = df['identity'].unique() for code in codes: new_df = df[df['identity'] == code] if new_df is None or len(new_df) == 0: continue if code in report_dict.keys() and report_dict[code] is not None: old_df = report_dict[code] concated_df = concat_dataframe_row_by_index([old_df, new_df]) report_dict[code] = concated_df else: report_dict[code] = new_df if code not in save_list: save_list.append(code) return DataUtility.RESULT_SUCCESSFUL
def analysis_black_list(securities: str, data_hub: DataHubEntry, database: DatabaseEntry, context: AnalysisContext) -> AnalysisResult: nop(data_hub) if context.cache.get('black_table', None) is None: context.cache['black_table'] = database.get_black_table().get_name_table() black_table = context.cache.get('black_table', None) df_slice = black_table[black_table['name'] == securities] exclude = len(df_slice) > 0 if exclude: reason = get_dataframe_slice_item(df_slice, 'reason', 0, '') else: reason = '不在黑名单中' return AnalysisResult(securities, not exclude, reason)
def __save_cached_data(self) -> bool: first = True result = True for exchange in self.__cached_data.keys(): df = self.__cached_data[exchange] if df is None or len(df) == 0: continue if_exists = 'replace' if first else 'append' first = False if DatabaseEntry().get_utility_db().DataFrameToDB( 'TradeCalender', df, if_exists): self._update_time_record(['TradeCalender', exchange], df, 'trade_date') else: result = False return result
def on_std_name_updated(self, old_name: str, new_name: str): for table in TABLE_LIST: data_table = DatabaseEntry().get_finance_table(table) data_table.replace_key(old_name, new_name)
def on_std_name_removed(self, name: str): for table in TABLE_LIST: data_table = DatabaseEntry().get_finance_table(table) data_table.remove_key(name)
class ColumnTable: COLUMN_TABLE_FIELD = ['column_name', 'column_index'] def __init__(self, table_name: str): self.__table_name = table_name self.__column_name_index_table = {} def Init(self, auto: bool) -> bool: if auto: if not self.LoadFromDB(): print('Error: Load Column Table [' + self.__table_name + '] Fail!') return False return True def Reset(self): self.__column_name_index_table = {} def GetTableName(self) -> str: return self.__table_name def AddColumn(self, column_name: str) -> int: if column_name not in self.__column_name_index_table.keys(): index = self.__assign_new_index() self.__column_name_index_table[column_name] = index else: index = self.__column_name_index_table[column_name] self.DumpToDB() return index def DelColumn(self, column_name: str): if column_name in self.__column_name_index_table.keys(): del self.__column_name_index_table[column_name] self.DumpToDB() def UpdateColumn(self, column_name_old: str, column_name_new: str) -> bool: if column_name_new in self.__column_name_index_table.keys(): return False if column_name_old not in self.__column_name_index_table.keys(): return False index = self.__column_name_index_table[column_name_old] del self.__column_name_index_table[column_name_old] self.__column_name_index_table[column_name_new] = index return True def GetColumnIndex(self, column_name: str) -> int: return self.__column_name_index_table[column_name] if column_name in self.__column_name_index_table else -1 def ColumnsToIndex(self, column_names: [str]) -> [str]: return [self.AddColumn(column_name) for column_name in column_names] def GetColumnNameIndexTable(self) -> dict: return self.__column_name_index_table # --------------------------------------------------- Load/Save --------------------------------------------------- def LoadFromDB(self) -> bool: self.Reset() self.__column_name_index_table = DatabaseEntry().get_utility_db().DictFromDB( self.__table_name, ColumnTable.COLUMN_TABLE_FIELD) return True def DumpToDB(self) -> bool: return DatabaseEntry().get_utility_db().DictToDB( self.__table_name, self.__column_name_index_table, ColumnTable.COLUMN_TABLE_FIELD) def __assign_new_index(self) -> int: index = 0 while index in self.__column_name_index_table.values(): index += 1 return index
def LoadFromDB(self) -> bool: self.Reset() self.__column_name_index_table = DatabaseEntry().get_utility_db().DictFromDB( self.__table_name, ColumnTable.COLUMN_TABLE_FIELD) return True
def get_update_record(self, tag1: str, tag2: str, tag3: str) -> []: return DatabaseEntry().get_utility_db().ListFromDB( UpdateTable.TABLE, UpdateTable.FIELD, "L1Tag = '%s' AND L2Tag = '%s' AND L3Tag = '%s'" % (tag1, tag2, tag3))
def delete_update_record(self, tag1: str, tag2: str, tag3: str): sql_delete = ( "DELETE FROM %s WHERE L1Tag='%s' AND L2Tag='%s' AND L3Tag='%s';" % (UpdateTable.TABLE, tag1, tag2, tag3)) return DatabaseEntry().get_utility_db().QuickExecuteDML( sql_delete, True)
def DumpToDB(self) -> bool: return DatabaseEntry().get_utility_db().DictToDB( self.__table_name, self.__column_name_index_table, ColumnTable.COLUMN_TABLE_FIELD)
def dump_to_db(self, **kw) -> bool: if self.__calendar is None: return False return DatabaseEntry().get_utility_db().DataFrameToDB( 'StockCalendar', self.__calendar.reset_index())
def __singleton_init(self): self.__update_table = UpdateTable() self.__securities_table = ItkvTable(DatabaseEntry().get_mongo_db_client(), 'StockAnalysisSystem', 'SecuritiesData')
def __init__(self, database_entry: DatabaseEntry, collector_plugin: PluginManager): self.__finance_data = FinanceData(collector_plugin, database_entry.get_update_table()) self.__trade_calendar = TradeCalendar(collector_plugin, database_entry.get_update_table()) self.__securities_info = SecuritiesInfo(collector_plugin, database_entry.get_update_table()) database_entry.get_alias_table().register_participant(self.__finance_data)
def test_update(): data_center = __build_data_center() data_center.register_data_table( UniversalDataTable('test.entry1', DatabaseEntry(), 'test_db', 'test_table')) data_center.update_local_data('test.entry1', 'identity_test1')