def main(): # Load config config = getConfig() logger = log.getLogger(config) logger.info(str(config)) dbclient = SqliteClient(base=Base, url=config.get('db_url')) influxInfo = getInfluxDB(config) logger.info(str(influxInfo)) influx = influxClient.getClient(dbinfo=influxInfo) influx.create_database(influxInfo.get('db')) # Update Ticks logging.info("Daily Update Ticks") start = datetime.now() - timedelta(seconds=60) #end = start + timedelta(days=15) end = datetime.now() try: codes = service.getAllCodes(dbclient) # codes = ['000002'] tickService.getTickAsync( codes=codes, dbinfo=influxInfo, start=start, end=end, multiplier=config.get('thread_multi')) except Exception as e: logging.warning("Failed Update Ticks for {}".format(str(e))) return
def main(): import logging from share.client.SqliteClient import SqliteClient from share.model.dao import Base import share.service as service import share.service.klineService as getKlines from datetime import timedelta from datetime import datetime from share.util.config import getConfig from share.util import log # Load config config = getConfig() logger = log.getLogger(config) logging.info(str(config)) dbclient = SqliteClient(base=Base, url=config.get('db_url')) # Update Klines codes = service.getAllCodes(dbclient) start = datetime(year=1990, month=1, day=1) # getKlines.getKLines(dbclient, codes=codes, ktype='D', start=start) getKlines.getKLinesAsync(dbclient, codes=codes, ktype='D', start=start) # # Update Index Klines # codes = service.getAllIndexCodes() # start = datetime(year=1990,month=1,day=1) # getKlines.getKLinesAsync(dbclient, codes=codes, ktype='D', start=start, index=True) return
def main(): import logging from share.client.SqliteClient import SqliteClient from share.model.dao import Base from share.util.config import getConfig from share.util import log from share.util.config import getConfig # Load config config = getConfig() logger = log.getLogger(config) logging.info(str(config)) dbclient = SqliteClient(base=Base, url=config.get('db_url')) import share.service.referenceService as service from datetime import datetime import tushare as ts from share.util.dateModel import YearQuarter # service.getReports(con=dbclient,fromYearQuarter=YearQuarter.fromDate().__last__()) service.getAll(con=dbclient, start=datetime(year=1991, month=1, day=1), end=datetime.now()) return
def main(): import logging from share.client.SqliteClient import SqliteClient from share.model.dao import Base import share.service as service import share.service.klineService as getKlines from datetime import timedelta from datetime import datetime from share.util.config import getConfig from share.util import log # Load config config = getConfig() logger = log.getLogger(config) logging.info(str(config)) dbclient = SqliteClient(base=Base, url=config.get('db_url')) # Update BoxOffice logging.info("Daily Update BoxOffice") from share.service import boxOfficeService try: boxOfficeService.hourly(con=dbclient) except Exception as e: logging.warning("Failed Update BoxOffice {}".format(str(e))) return
def main(): import logging from share.client.SqliteClient import SqliteClient from share.model.dao import Base from share.util.config import getConfig from share.util import log # Load config config = getConfig() logger = log.getLogger(config) logger.info(str(config)) dbclient = SqliteClient(base=Base, url=config.get('db_url')) influxInfo = getInfluxDB(config) logger.info(str(influxInfo)) influx = influxClient.getDataFrameClient(influxInfo) influx.create_database(influxInfo.get('db')) start = datetime(year=2018, month=6, day=1, hour=0, minute=0, second=0) end = datetime(year=2019, month=1, day=8, hour=2, minute=10, second=0) codes = [] codesSql = ''' SELECT DISTINCT code FROM `share-fvt`.CLASSIFIED_HS300S WHERE `date`='2019-01-30'; ''' codeDF = pd.read_sql_query(codesSql, dbclient.engine) for _, row in codeDF.iterrows(): code = row.loc['code'] codes.append(code) # query = ''' # SELECT # (max("price")-first("price"))/first("price") AS "max_price", # (min("price")-first("price"))/first("price") AS "min_price" # FROM "shareDev"."autogen"."share_tick" # WHERE # time > now() - 180d # AND ("code"='000001' OR "code"='000001') # GROUP BY time(5d), "code" fill(null) # ''' xSet: DataFrame = None for code in codes: res = getDataXMax(code, influx, start, end) if xSet is None: xSet = res else: xSet = pd.concat([xSet, res], axis=1, join_axes=[xSet.index]) xSet.to_pickle('./xSet.pickle') pass
def main(): import logging from share.client.SqliteClient import SqliteClient from share.model.dao import Base from share.util import log from share.util.config import getConfig # Load config config = getConfig() logger = log.getLogger(config) logging.info(str(config)) dbclient = SqliteClient(base=Base, url=config.get('db_url')) import share.service.boxOfficeService as service import tushare as ts from share.util.dateModel import YearQuarter # service.getReports(con=dbclient,fromYearQuarter=YearQuarter.fromDate().__last__()) # from share.service import boxOfficeService # boxOfficeService.daily(con=dbclient) service.daily(con=dbclient) # return service.updateDayBoxoffice(con=dbclient, date=datetime.now()) service.updateRealtimeBoxoffice(con=dbclient) date = datetime(year=2010,month=3,day=1) while date.timestamp() < datetime.now().timestamp(): logging.info("Update box office data of {}".format(date.strftime("%Y-%m-%d"))) try: service.updateDayCinema(con=dbclient, date=date) except: logging.warning("Failed Update updateDayCinema data of {}".format(date.strftime("%Y-%m-%d"))) if date.day == 15: try: service.updateMonthBoxoffice(con=dbclient, year=date.year,month=date.month) except: logging.warning("Failed Update updateMonthBoxoffice data of {}".format(date.strftime("%Y-%m-%d"))) date = date + timedelta(days=1) return
def main(): import logging from share.client.SqliteClient import SqliteClient from share.model.dao import Base from share.util.config import getConfig from share.util import log # Load config config = getConfig() logger = log.getLogger(config) logging.info(str(config)) dbclient = SqliteClient(base=Base, url=config.get('db_url')) influxClient = InfluxDBClient('localhost', 8086, 'root', 'root', 'example') influxClient.create_database('shareDev') code = '000001' start = datetime(year=2018, month=3, day=1) end = datetime.now() current = start while current < end: # Weekends if current.weekday() >= 5: current = current + timedelta(days=1) continue currentStr = current.strftime("%Y-%m-%d") logger.info("code: {}, date: {}".format(code, currentStr)) ticks = ts.get_tick_data(code=code, date=currentStr, src='tt') if ticks is None: logger.info("Skip") current = current + timedelta(days=1) continue points = [] for _, row in ticks.iterrows(): points.append( tick.rowToORM(row=row, code=code, currentStr=currentStr)) influxClient.write_points(points=points, database='shareDev') current = current + timedelta(days=1) pass
def main(): import logging from share.client.SqliteClient import SqliteClient from share.model.dao import Base from share.util.config import getConfig from share.util import log from share.util.config import getConfig # Load config config = getConfig() logger = log.getLogger(config) logging.info(str(config)) dbclient = SqliteClient(base=Base, url=config.get('db_url')) import share.service.reportService as service import tushare as ts from share.util.dateModel import YearQuarter # service.getReports(con=dbclient,fromYearQuarter=YearQuarter.fromDate().__last__()) service.getReports(con=dbclient, fromYearQuarter=YearQuarter(year=1985, quarter=3)) return
def main(): import logging from share.client.SqliteClient import SqliteClient from share.model.dao import Base from share.util.config import getConfig from share.util import log from share.util.config import getConfig # Load config config = getConfig() logger = log.getLogger(config) logging.info(str(config)) dbclient = SqliteClient(base=Base, url=config.get('db_url')) import share.service.basicInfoService as service import tushare as ts import share.model.dao.macro.GdpQuarter as package service.getBasicInfo(con=dbclient, package=package, fun=ts.get_gdp_quarter, clean=False) import share.model.dao.macro.GdpYear as package service.getBasicInfo(con=dbclient, package=package, fun=ts.get_gdp_year, clean=False) import share.model.dao.macro.GdpContrib as package service.getBasicInfo(con=dbclient, package=package, fun=ts.get_gdp_contrib, clean=False) import share.model.dao.macro.GdpFor as package service.getBasicInfo(con=dbclient, package=package, fun=ts.get_gdp_for, clean=False) import share.model.dao.macro.GdpPull as package service.getBasicInfo(con=dbclient, package=package, fun=ts.get_gdp_pull, clean=False) import share.model.dao.macro.LoanRate as package service.getBasicInfo(con=dbclient, package=package, fun=ts.get_loan_rate, clean=False) import share.model.dao.macro.MoneySupply as package service.getBasicInfo(con=dbclient, package=package, fun=ts.get_money_supply, clean=False) import share.model.dao.macro.MoneySupplyBal as package service.getBasicInfo(con=dbclient, package=package, fun=ts.get_money_supply_bal, clean=False) import share.model.dao.macro.Ppi as package service.getBasicInfo(con=dbclient, package=package, fun=ts.get_ppi, clean=False) import share.model.dao.macro.RRR as package service.getBasicInfo(con=dbclient, package=package, fun=ts.get_rrr, clean=False) import share.model.dao.macro.Cpi as package service.getBasicInfo(con=dbclient, package=package, fun=ts.get_cpi, clean=False) import share.model.dao.macro.DepositRate as package service.getBasicInfo(con=dbclient, package=package, fun=ts.get_deposit_rate, clean=False) return
def main(): import logging from share.client.SqliteClient import SqliteClient from share.model.dao import Base import share.service as service import share.service.klineService as getKlines from datetime import timedelta from datetime import datetime from share.util.config import getConfig from share.util import log # Load config config = getConfig() logger = log.getLogger(config) logging.info(str(config)) dbclient = SqliteClient(base=Base, url=config.get('db_url')) # Update Reference logging.info("Daily Update Reference") import share.service.referenceService as referenceService try: referenceService.daily(con=dbclient) except Exception as e: logging.warning("Failed Daily Update Reference {}".format(str(e))) # Update classified logging.info("Daily Update classified") from share.service import classifiedService try: classifiedService.daily(con=dbclient) except Exception as e: logging.warning("Failed Update classified {}".format(str(e))) # Update BoxOffice logging.info("Daily Update BoxOffice") from share.service import boxOfficeService try: boxOfficeService.daily(con=dbclient) except Exception as e: logging.warning("Failed Update BoxOffice {}".format(str(e))) # Update macro logging.info("Daily Update macro") from share.service import macroService try: macroService.daily(con=dbclient) except Exception as e: logging.warning("Failed Daily Update macro {}".format(str(e))) # Update reports logging.info("Daily Update reports") from share.service import reportService try: reportService.daily(con=dbclient) except Exception as e: logging.warning("Failed Update reports {}".format(str(e))) # Update Index Klines logging.info("Daily Update Index Klines") start = datetime.now() - timedelta(config.get('start_days_r')) try: codes = service.getAllIndexCodes() getKlines.getKLinesAsync( dbClient=dbclient, codes=codes, ktype='D', start=start, index=True, multiplier=config.get('thread_multi')) except Exception as e: logging.warning("Failed Update index Klines {}".format(str(e))) # Update Klines logging.info("Daily Update Klines") start = datetime.now() - timedelta(config.get('start_days_r')) try: codes = service.getAllCodes(dbclient) getKlines.getKLinesAsync( codes=codes,dbClient=dbclient,ktype='D',start=start, multiplier=config.get('thread_multi')) except Exception as e: logging.warning("Failed Update Klines for {}".format(str(e))) logging.info("All tasks finished, big brother is watching.") return
def main(): import logging from share.client.SqliteClient import SqliteClient from share.model.dao import Base from share.util import log from share.util.config import getConfig # Load config config = getConfig() logger = log.getLogger(config) logging.info(str(config)) dbclient = SqliteClient(base=Base, url=config.get('db_url')) import share.service.basicInfoService as service import tushare as ts # Update Suspended classified #import share.model.ClassifiedSuspended as package #service.getClassified(con=dbclient,package=package,fun=ts.get_suspended,clean=False) # Update Terminated classified #import share.model.ClassifiedTerminated as package #service.getClassified(con=dbclient,package=package,fun=ts.get_terminated,clean=False) # Update HS300 classified import share.model.dao.classified.ClassifiedHS300S as package service.getBasicInfo(con=dbclient, package=package, fun=ts.get_hs300s, clean=False) # Update SZ50 classified import share.model.dao.classified.ClassifiedSZ50s as package service.getBasicInfo(con=dbclient, package=package, fun=ts.get_sz50s) # Update ZZ500 classified import share.model.dao.classified.ClassifiedZZ500S as package service.getBasicInfo(con=dbclient, package=package, fun=ts.get_zz500s) # Update St classified import share.model.dao.classified.ClassifiedSt as package service.getBasicInfo(con=dbclient, package=package, fun=ts.get_st_classified) # Update Gem classified import share.model.dao.classified.ClassifiedGem as package service.getBasicInfo(con=dbclient, package=package, fun=ts.get_gem_classified) # Update Sme classified import share.model.dao.classified.ClassifiedSme as package service.getBasicInfo(con=dbclient, package=package, fun=ts.get_sme_classified) # Update area classified import share.model.dao.classified.ClassifiedArea as package service.getBasicInfo(con=dbclient, package=package, fun=ts.get_area_classified) # Update concept classified import share.model.dao.classified.ClassifiedConcept as package service.getBasicInfo(con=dbclient, package=package, fun=ts.get_concept_classified) # Update industry classified import share.model.dao.classified.ClassifiedIndustry as package service.getBasicInfo(con=dbclient, package=package, fun=ts.get_industry_classified) return