def on_bar(self, bar: BarData): """""" self.bg.update_bar(bar) if self.get_engine_type() == EngineType.LIVE: self.current_time = datetime.now().time() else: self.current_time = time1(bar.datetime.hour, bar.datetime.minute) # 以下停盘前5分钟开始平常出场的逻辑,缺乏细粒度委托控制,需要进一步完善 if self.current_time > time1(14, 56): # 停盘前5分钟,首先取消所有尚在活动状态的委托 for buf_orderids in [ self.buy_vt_orderids, self.sell_vt_orderids, self.short_vt_orderids, self.cover_vt_orderids ]: for vt_orderid in buf_orderids: self.cancel_order(vt_orderid) # 然后平仓 if self.pos > 0: self.sell_vt_orderids = self.sell(bar.close_price - 5, abs(self.pos)) elif self.pos < 0: self.cover_vt_orderids = self.cover(bar.close_price + 5, abs(self.pos))
def __init__(self, cta_engine, strategy_name, vt_symbol, setting): """""" super().__init__(cta_engine, strategy_name, vt_symbol, setting) self.bg = XminBarGenerator(self.on_bar, self.interval, self.on_xmin_bar) self.am = ArrayManager() self.liq_price = 0 self.on_bar_time = time1(0, 0) self.clearance_time = time1(14, 57) self.liq_time = time1(14, 59) trade_record_fields = [ "vt_symbol", "orderid", "tradeid", "offset", "direction", "price", "volume", "datetime", "strategy", "strategy_name" ] self.trade_record_file = open( "D:/CTA/1-策略开发/1-开发中的策略/14-oscillator_drive/backtesting_trade_record.csv", "a", newline="") self.trade_record_file_writer = csv.DictWriter(self.trade_record_file, trade_record_fields) self.trade_record_file_writer.writeheader()
def __init__(self, cta_engine, strategy_name, vt_symbol, setting): """""" super().__init__(cta_engine, strategy_name, vt_symbol, setting) self.bg = XminBarGenerator(self.on_bar, 1, self.on_hour_bar, interval=Interval.HOUR) self.am = ArrayManager() self.liq_price = 0 self.trading_size = 0 self.on_bar_time = time1(0, 0) self.clearance_time = time1(14, 57) self.liq_time = time1(14, 59) self.day_clearance = False self.buy_svt_orderids = [] self.sell_svt_orderids = [] self.short_svt_orderids = [] self.cover_svt_orderids = [] self.sell_lvt_orderids = [] self.cover_lvt_orderids = []
def __init__(self, cta_engine, strategy_name, vt_symbol, setting): """""" super().__init__(cta_engine, strategy_name, vt_symbol, setting) self.bg = XminBarGenerator(self.on_bar, self.interval, self.on_xmin_bar) self.am = ArrayManager() self.liq_price = 0 self.trading_size = 0 self.on_bar_time = time1(0, 0) self.clearance_time = time1(14, 57) # 清仓开始时间 self.liq_time = time1(14, 59) # 交易所结算开始时间 self.day_clearance = False self.buy_svt_orderids = [] self.sell_svt_orderids = [] self.short_svt_orderids = [] self.cover_svt_orderids = [] self.sell_lvt_orderids = [] self.cover_lvt_orderids = [] # 2021年5月10日有极端行情,收益异常高,不具有普遍参考价值,因此在回测中跳过这一天 extreme_date = "2021-05-10" extreme_date = time.strptime(extreme_date, "%Y-%m-%d") year, month, day = extreme_date[:3] self.extreme_date = datetime.date(year, month, day)
def __init__(self, cta_engine, strategy_name, vt_symbol, setting): """""" super().__init__(cta_engine, strategy_name, vt_symbol, setting) self.bg = XminBarGenerator(self.on_bar, self.interval, self.on_xmin_bar) self.am = ArrayManager() self.liq_price = 0 self.trading_size = 0 self.on_bar_time = time1(0, 0) self.clearance_time = time1(14, 57) # 清仓开始时间 self.liq_time = time1(14, 59) # 交易所结算开始时间 self.day_clearance = False self.buy_svt_orderids = [] self.sell_svt_orderids = [] self.short_svt_orderids = [] self.cover_svt_orderids = [] self.buy_lvt_orderids = [] self.sell_lvt_orderids = [] self.short_lvt_orderids = [] self.cover_lvt_orderids = [] self.path = Path.cwd() self.trade_record_dict = {} self.tick_count = 0 trade_record_fields = [ "vt_symbol", "orderid", "tradeid", "offset", "direction", "price", "volume", "datetime", "strategy" ] self.trade_record_wb = openpyxl.load_workbook( self.path / "strategies" / "PaperAccount_reord_table.xlsx") self.trade_record_wb.iso_dates = True sheet_names = self.trade_record_wb.sheetnames if self.strategy_name not in sheet_names: self.trade_record_sheet = self.trade_record_wb.create_sheet( index=0, title=self.strategy_name) else: self.trade_record_sheet = self.trade_record_wb[self.strategy_name] if not self.trade_record_sheet.cell(row=1, column=1).value: for i in range(1, len(trade_record_fields) + 1): column = get_column_letter(i) self.trade_record_sheet[column + str(1)] = trade_record_fields[i - 1] self.trade_record_sheet.freeze_panes = "A2" self.trade_record_wb.save(self.path / "strategies" / "PaperAccount_reord_table.xlsx")
def update_bar_minute_window(self, bar: BarData) -> None: """""" # If not inited, create window bar object if not self.window_bar: dt = bar.datetime.replace(second=0, microsecond=0) self.window_bar = BarData( symbol=bar.symbol, exchange=bar.exchange, datetime=dt, gateway_name=bar.gateway_name, open_price=bar.open_price, high_price=bar.high_price, low_price=bar.low_price ) # Otherwise, update high/low price into window bar else: self.window_bar.high_price = max( self.window_bar.high_price, bar.high_price ) self.window_bar.low_price = min( self.window_bar.low_price, bar.low_price ) # Update close price/volume into window bar self.window_bar.close_price = bar.close_price self.window_bar.volume += int(bar.volume) self.window_bar.open_interest = bar.open_interest # Check if window bar completed # if not (bar.datetime.minute + 1) % self.window: # self.on_window_bar(self.window_bar) # self.window_bar = None finished = False self.interval_count += 1 if not self.interval_count % self.window: finished = True self.interval_count = 0 elif bar.datetime.time() in [time1(10, 14), time1(11, 29), time1(14, 59), time1(22, 59)]: if bar.exchange in [Exchange.SHFE, Exchange.DCE, Exchange.CZCE]: finished = True self.interval_count = 0 if finished: self.on_window_bar(self.window_bar) self.window_bar = None # Cache last bar object self.last_bar = bar
def on_tick(self, tick: TickData): """""" # 显示策略启动过程中收到的前10个tick self.count += 1 if self.count <= 30: self.write_log(tick) # 过滤掉非交易时段收到的tick,如果不过滤,Bargenerator将不能合成bar(具体原因见其代码),交易策略将不会发单 # if ( # (time1(9, 0) < tick.datetime.time() < time1(11, 31)) # or (time1(13, 30) < tick.datetime.time() < time1(15, 1)) # or (time1(21, 0) < tick.datetime.time() < time1(23, 1)) # ): before_20 = datetime.now().time() < time1(20, 0) after_20 = datetime.now().time() >= time1(20, 0) morning_market = (time1(9, 0) < tick.datetime.time() < time1(11, 31)) afternoon_market = (time1(13, 30) < tick.datetime.time() < time1( 15, 1)) night_market = (time1(21, 0) < tick.datetime.time() < time1(23, 1)) day_trade_time = before_20 and (morning_market or afternoon_market) night_trade_time = after_20 and night_market if day_trade_time or night_trade_time: self.bg.update_tick(tick)
def on_order(self, order: OrderData): """""" if self.current_time > time1(14, 56): if order.status in (Status.ALLTRADED, Status.CANCELLED, Status.REJECTED): for buf_orderids in [ self.buy_vt_orderids, self.sell_vt_orderids, self.short_vt_orderids, self.cover_vt_orderids ]: if order.orderid in buf_orderids: buf_orderids.remove(order.stop_orderid)
def __init__(self, cta_engine, strategy_name, vt_symbol, setting): """""" super().__init__(cta_engine, strategy_name, vt_symbol, setting) self.bg = XminBarGenerator(self.on_bar, self.interval, self.on_xmin_bar) self.am = ArrayManager() self.liq_price = 0 self.on_bar_time = time1(0, 0) self.clearance_time = time1(14, 57) # 清仓开始时间 self.liq_time = time1(14, 59) # 交易所清算开始时间 self.buy_svt_orderids = [] self.sell_svt_orderids = [] self.short_svt_orderids = [] self.cover_svt_orderids = [] self.sell_lvt_orderids = [] self.cover_lvt_orderids = []
def __init__(self, cta_engine, strategy_name, vt_symbol, setting): """""" super().__init__(cta_engine, strategy_name, vt_symbol, setting) self.bg = XminBarGenerator(self.on_bar, self.interval, self.on_xmin_bar) self.am = ArrayManager() self.current_time = time1(0, 0) self.day_start = time1(8, 45) self.day_end = time1(14, 58) self.liq_time = time1(15, 0) self.night_start = time1(20, 45) self.night_end = time1(23, 0) trade_record_fields = [ "vt_symbol", "orderid", "tradeid", "offset", "direction", "price", "volume", "datetime", "strategy", "strategy_name" ] self.trade_record_file = open("D:/CTA/1-策略开发/1-开发中的策略/14-oscillator_drive/trade_record.csv", "a", newline="") self.trade_record_file_writer = csv.DictWriter(self.trade_record_file, trade_record_fields) self.trade_record_file_writer.writeheader()
def __init__(self, strategy_engine: StrategyEngine, strategy_name: str, vt_symbols: List[str], setting: dict): """""" super().__init__(strategy_engine, strategy_name, vt_symbols, setting) self.singal_price = {} self.singal_direction = {} self.ams: Dict[str, ArrayManager] = {} self.bgs: Dict[str, XminBarGenerator] = {} self.liq_price = 0 self.trading_size = 1 self.intra_trade_low = {} self.intra_trade_high = {} self.day_clearance = False self.liq_time = time1(14, 59) # 交易所结算开始时间 self.on_bar_time = time1(0, 0) self.clearance_time = time1(14, 57) # 清仓开始时间 # 初始化数据容器 for vt_symbol in self.vt_symbols: def on_bar(bar: BarData): """""" pass self.singal_price[vt_symbol] = 0 self.singal_direction[vt_symbol] = "" self.intra_trade_low[vt_symbol] = 0 self.intra_trade_high[vt_symbol] = 0 self.ams[vt_symbol] = ArrayManager() self.bgs[vt_symbol] = BarGenerator(on_bar, self.interval, self.on_xmin_bar)
def __init__(self, cta_engine, strategy_name, vt_symbol, setting): """""" super().__init__(cta_engine, strategy_name, vt_symbol, setting) self.bg = XminBarGenerator(self.on_bar, self.interval, self.on_xmin_bar) self.am = ArrayManager() self.liq_price = 0 self.trading_size = 0 self.on_bar_time = time1(0, 0) self.clearance_time = time1(14, 57) # 清仓开始时间 self.liq_time = time1(14, 59) # 交易所结算开始时间 self.day_clearance = False self.buy_svt_orderids = [] self.sell_svt_orderids = [] self.short_svt_orderids = [] self.cover_svt_orderids = [] self.sell_lvt_orderids = [] self.cover_lvt_orderids = [] self.trade_record_dict = {} self.symbol, exchange_str = vt_symbol.split(".") self.exchange = Exchange(exchange_str) self.hndatabase = HNDataBase() self.hndatabase.init_trade_database( symbol=self.symbol, exchange=self.exchange, strategy_name="Oscillator1", enginetype=EngineType.BACKTESTING.value)
def __init__(self, cta_engine, strategy_name, vt_symbol, setting): """""" super().__init__(cta_engine, strategy_name, vt_symbol, setting) self.bg = XminBarGenerator(self.on_bar, self.interval, self.on_xmin_bar) self.am = ArrayManager() self.pricetick = self.get_pricetick() self.buy_vt_orderids = [] self.sell_vt_orderids = [] self.short_vt_orderids = [] self.cover_vt_orderids = [] self.current_time = time1(0, 0)
def on_tick(self, tick: TickData): """""" # 显示策略启动过程中收到的前30个tick if self.tick_count <= 30: self.tick_count += 1 self.write_log(tick) before_20 = datetime.now().time() < time1(20, 0) after_20 = datetime.now().time() >= time1(20, 0) morning_market = (time1(9, 0) < tick.datetime.time() < time1(11, 31)) afternoon_market = (time1(13, 30) < tick.datetime.time() < time1( 15, 1)) night_market = (time1(21, 0) < tick.datetime.time() < time1(23, 1)) day_trade_time = before_20 and (morning_market or afternoon_market) night_trade_time = after_20 and night_market if day_trade_time or night_trade_time: self.bg.update_tick(tick)
def on_tick(self, tick: TickData): """""" # 显示策略启动过程中收到的前10个tick self.count += 1 if self.count <= 10: self.write_log(tick) # 过滤掉非交易时段收到的tick if ((time1(9, 0) <= tick.datetime.time() <= time1(11, 30)) or (time1(13, 30) <= tick.datetime.time() <= time1(15, 0)) or (time1(21, 0) <= tick.datetime.time() <= time1(23, 0))): self.bg.update_tick(tick)
def update_bar(self, bar: BarData) -> None: """ Update 1 minute bar into generator """ # If not inited, creaate window bar object if not self.window_bar: # Generate timestamp for bar data if self.interval == Interval.MINUTE: dt = bar.datetime.replace(second=0, microsecond=0) else: dt = bar.datetime.replace(minute=0, second=0, microsecond=0) self.window_bar = BarData(symbol=bar.symbol, exchange=bar.exchange, datetime=dt, gateway_name=bar.gateway_name, open_price=bar.open_price, high_price=bar.high_price, low_price=bar.low_price) # Otherwise, update high/low price into window bar else: self.window_bar.high_price = max(self.window_bar.high_price, bar.high_price) self.window_bar.low_price = min(self.window_bar.low_price, bar.low_price) # Update close price/volume into window bar self.window_bar.close_price = bar.close_price self.window_bar.volume += int(bar.volume) self.window_bar.open_interest = bar.open_interest # Check if window bar completed finished = False if self.interval == Interval.MINUTE: # x-minute bar # if not (bar.datetime.minute + 1) % self.window: # finished = True self.interval_count += 1 if not self.interval_count % self.window: finished = True self.interval_count = 0 elif bar.datetime.time() in [ time1(10, 14), time1(11, 29), time1(14, 59), time1(22, 59) ]: if bar.exchange in [ Exchange.SHFE, Exchange.DCE, Exchange.CZCE ]: finished = True self.interval_count = 0 elif self.interval == Interval.HOUR: if self.last_bar: new_hour = bar.datetime.hour != self.last_bar.datetime.hour last_minute = bar.datetime.minute == 59 not_first = self.window_bar.datetime != bar.datetime # To filter duplicate hour bar finished condition if (new_hour or last_minute) and not_first: # 1-hour bar if self.window == 1: finished = True # x-hour bar else: self.interval_count += 1 if not self.interval_count % self.window: finished = True self.interval_count = 0 if finished: self.on_window_bar(self.window_bar) self.window_bar = None # Cache last bar object self.last_bar = bar
master_MTL_path = master_path[:-6] + 'MTL.txt' slave_MTL_path = slave_path[:-6] + 'MTL.txt' master_time = str.split( str.split( runCmd('fgrep "SCENE_CENTER_TIME" ' + master_MTL_path))[2][1:-2], ':') slave_time = str.split( str.split( runCmd('fgrep "SCENE_CENTER_TIME" ' + slave_MTL_path))[2][1:-2], ':') from datetime import time as time1 master_time = time1(int(master_time[0]), int(master_time[1]), int(float(master_time[2]))) slave_time = time1(int(slave_time[0]), int(slave_time[1]), int(float(slave_time[2]))) import netcdf_output as no version = '1.0.7' pair_type = 'optical' detection_method = 'feature' coordinates = 'map' # out_nc_filename = 'Jakobshavn_opt.nc' out_nc_filename = master_filename[ 0:-4] + '_' + slave_filename[0:-4] + '.nc' out_nc_filename = './' + out_nc_filename roi_valid_percentage = int( round(
def on_xmin_bar(self, bar: BarData): """""" am = self.am am.update_bar(bar) if not am.inited: return DAY_START = time1(8, 45) LIQ_TIME = time1(14, 56) NIGHT_START = time1(20, 45) NIGHT_END = time1(23, 0) if ((self.current_time >= DAY_START and self.current_time <= LIQ_TIME) or (self.current_time >= NIGHT_START and self.current_time <= NIGHT_END)): self.boll_up, self.boll_down = am.boll(self.boll_window, self.boll_dev) self.ultosc = am.ultosc() self.buy_dis = 50 + self.dis_open self.sell_dis = 50 - self.dis_open self.atr_value = am.atr(self.atr_window) if self.pos == 0: self.trading_size = max(int(self.risk_level / self.atr_value), 1) if self.trading_size >= 2: self.trading_size = 2 self.intra_trade_high = bar.high_price self.intra_trade_low = bar.low_price if self.ultosc > self.buy_dis: if not self.buy_vt_orderids: self.buy_vt_orderids = self.buy( self.boll_up, self.trading_size, True) else: for vt_orderid in self.buy_vt_orderids: self.cancel_order(vt_orderid) elif self.ultosc < self.sell_dis: if not self.short_vt_orderids: self.short_vt_orderids = self.short( self.boll_down, self.trading_size, True) else: for vt_orderid in self.short_vt_orderids: self.cancel_order(vt_orderid) elif self.pos > 0: self.intra_trade_high = max(self.intra_trade_high, bar.high_price) self.intra_trade_low = bar.low_price self.long_stop = self.intra_trade_high - self.atr_value * self.sl_multiplier if not self.sell_vt_orderids: self.sell_vt_orderids = self.sell(self.long_stop, abs(self.pos), True) else: for vt_orderid in self.sell_vt_orderids: self.cancel_order(vt_orderid) else: self.intra_trade_high = bar.high_price self.intra_trade_low = min(self.intra_trade_low, bar.low_price) self.short_stop = self.intra_trade_low + self.atr_value * self.sl_multiplier if not self.cover_vt_orderids: self.cover_vt_orderids = self.cover( self.short_stop, abs(self.pos), True) else: for vt_orderid in self.cover_vt_orderids: self.cancel_order(vt_orderid) self.put_event()
def generateAutoriftProduct(indir_m, indir_s, grid_location, init_offset, search_range, chip_size_min, chip_size_max, offset2vx, offset2vy, stable_surface_mask, optical_flag, nc_sensor, mpflag, geogrid_run_info=None): import numpy as np import time # from components.contrib.geo_autoRIFT.autoRIFT import __version__ as version from autoRIFT import __version__ as version if optical_flag == 1: data_m, data_s = loadProductOptical(indir_m, indir_s) # test with lena/Venus image # import scipy.io as sio # conts = sio.loadmat(indir_m) # data_m = conts['I'] # data_s = conts['I1'] else: data_m = loadProduct(indir_m) data_s = loadProduct(indir_s) xGrid = None yGrid = None Dx0 = None Dy0 = None SRx0 = None SRy0 = None CSMINx0 = None CSMINy0 = None CSMAXx0 = None CSMAXy0 = None SSM = None noDataMask = None nodata = None if grid_location is not None: ds = gdal.Open(grid_location) tran = ds.GetGeoTransform() proj = ds.GetProjection() srs = ds.GetSpatialRef() band = ds.GetRasterBand(1) nodata = band.GetNoDataValue() xGrid = band.ReadAsArray() noDataMask = (xGrid == nodata) band = ds.GetRasterBand(2) yGrid = band.ReadAsArray() band = None ds = None if init_offset is not None: ds = gdal.Open(init_offset) band = ds.GetRasterBand(1) Dx0 = band.ReadAsArray() band = ds.GetRasterBand(2) Dy0 = band.ReadAsArray() band = None ds = None if search_range is not None: ds = gdal.Open(search_range) band = ds.GetRasterBand(1) SRx0 = band.ReadAsArray() band = ds.GetRasterBand(2) SRy0 = band.ReadAsArray() band = None ds = None if chip_size_min is not None: ds = gdal.Open(chip_size_min) band = ds.GetRasterBand(1) CSMINx0 = band.ReadAsArray() band = ds.GetRasterBand(2) CSMINy0 = band.ReadAsArray() band = None ds = None if chip_size_max is not None: ds = gdal.Open(chip_size_max) band = ds.GetRasterBand(1) CSMAXx0 = band.ReadAsArray() band = ds.GetRasterBand(2) CSMAXy0 = band.ReadAsArray() band = None ds = None if stable_surface_mask is not None: ds = gdal.Open(stable_surface_mask) band = ds.GetRasterBand(1) SSM = band.ReadAsArray() SSM = SSM.astype('bool') band = None ds = None Dx, Dy, InterpMask, ChipSizeX, ScaleChipSizeY, SearchLimitX, SearchLimitY, origSize, noDataMask = runAutorift( data_m, data_s, xGrid, yGrid, Dx0, Dy0, SRx0, SRy0, CSMINx0, CSMINy0, CSMAXx0, CSMAXy0, noDataMask, optical_flag, nodata, mpflag, geogrid_run_info=geogrid_run_info, ) if optical_flag == 0: Dy = -Dy DX = np.zeros(origSize, dtype=np.float32) * np.nan DY = np.zeros(origSize, dtype=np.float32) * np.nan INTERPMASK = np.zeros(origSize, dtype=np.float32) CHIPSIZEX = np.zeros(origSize, dtype=np.float32) SEARCHLIMITX = np.zeros(origSize, dtype=np.float32) SEARCHLIMITY = np.zeros(origSize, dtype=np.float32) DX[0:Dx.shape[0], 0:Dx.shape[1]] = Dx DY[0:Dy.shape[0], 0:Dy.shape[1]] = Dy INTERPMASK[0:InterpMask.shape[0], 0:InterpMask.shape[1]] = InterpMask CHIPSIZEX[0:ChipSizeX.shape[0], 0:ChipSizeX.shape[1]] = ChipSizeX SEARCHLIMITX[0:SearchLimitX.shape[0], 0:SearchLimitX.shape[1]] = SearchLimitX SEARCHLIMITY[0:SearchLimitY.shape[0], 0:SearchLimitY.shape[1]] = SearchLimitY DX[noDataMask] = np.nan DY[noDataMask] = np.nan INTERPMASK[noDataMask] = 0 CHIPSIZEX[noDataMask] = 0 SEARCHLIMITX[noDataMask] = 0 SEARCHLIMITY[noDataMask] = 0 if SSM is not None: SSM[noDataMask] = False import scipy.io as sio sio.savemat('offset.mat', { 'Dx': DX, 'Dy': DY, 'InterpMask': INTERPMASK, 'ChipSizeX': CHIPSIZEX }) # ##################### Uncomment for debug mode # sio.savemat('debug.mat',{'Dx':DX,'Dy':DY,'InterpMask':INTERPMASK,'ChipSizeX':CHIPSIZEX,'ScaleChipSizeY':ScaleChipSizeY,'SearchLimitX':SEARCHLIMITX,'SearchLimitY':SEARCHLIMITY}) # conts = sio.loadmat('debug.mat') # DX = conts['Dx'] # DY = conts['Dy'] # INTERPMASK = conts['InterpMask'] # CHIPSIZEX = conts['ChipSizeX'] # ScaleChipSizeY = conts['ScaleChipSizeY'] # SEARCHLIMITX = conts['SearchLimitX'] # SEARCHLIMITY = conts['SearchLimitY'] # ##################### netcdf_file = None if grid_location is not None: t1 = time.time() print("Write Outputs Start!!!") # Create the GeoTiff driver = gdal.GetDriverByName('GTiff') outRaster = driver.Create("offset.tif", int(xGrid.shape[1]), int(xGrid.shape[0]), 4, gdal.GDT_Float32) outRaster.SetGeoTransform(tran) outRaster.SetProjection(proj) outband = outRaster.GetRasterBand(1) outband.WriteArray(DX) outband.FlushCache() outband = outRaster.GetRasterBand(2) outband.WriteArray(DY) outband.FlushCache() outband = outRaster.GetRasterBand(3) outband.WriteArray(INTERPMASK) outband.FlushCache() outband = outRaster.GetRasterBand(4) outband.WriteArray(CHIPSIZEX) outband.FlushCache() if offset2vx is not None: ds = gdal.Open(offset2vx) band = ds.GetRasterBand(1) offset2vx_1 = band.ReadAsArray() band = ds.GetRasterBand(2) offset2vx_2 = band.ReadAsArray() band = None ds = None ds = gdal.Open(offset2vy) band = ds.GetRasterBand(1) offset2vy_1 = band.ReadAsArray() band = ds.GetRasterBand(2) offset2vy_2 = band.ReadAsArray() band = None ds = None VX = offset2vx_1 * DX + offset2vx_2 * DY VY = offset2vy_1 * DX + offset2vy_2 * DY VX = VX.astype(np.float32) VY = VY.astype(np.float32) ############ write velocity output in Geotiff format outRaster = driver.Create("velocity.tif", int(xGrid.shape[1]), int(xGrid.shape[0]), 2, gdal.GDT_Float32) outRaster.SetGeoTransform(tran) outRaster.SetProjection(proj) outband = outRaster.GetRasterBand(1) outband.WriteArray(VX) outband.FlushCache() outband = outRaster.GetRasterBand(2) outband.WriteArray(VY) outband.FlushCache() ############ prepare for netCDF packaging if nc_sensor is not None: if geogrid_run_info is None: vxrefname = str.split( runCmd('fgrep "Velocities:" testGeogrid.txt'))[1] vyrefname = str.split( runCmd('fgrep "Velocities:" testGeogrid.txt'))[2] sxname = str.split( runCmd('fgrep "Slopes:" testGeogrid.txt') )[1][:-4] + "s.tif" syname = str.split( runCmd('fgrep "Slopes:" testGeogrid.txt') )[2][:-4] + "s.tif" maskname = str.split( runCmd('fgrep "Slopes:" testGeogrid.txt') )[2][:-8] + "sp.tif" xoff = int( str.split( runCmd( 'fgrep "Origin index (in DEM) of geogrid:" testGeogrid.txt' ))[6]) yoff = int( str.split( runCmd( 'fgrep "Origin index (in DEM) of geogrid:" testGeogrid.txt' ))[7]) xcount = int( str.split( runCmd( 'fgrep "Dimensions of geogrid:" testGeogrid.txt' ))[3]) ycount = int( str.split( runCmd( 'fgrep "Dimensions of geogrid:" testGeogrid.txt' ))[5]) else: vxrefname = geogrid_run_info['vxname'] vyrefname = geogrid_run_info['vyname'] sxname = geogrid_run_info['sxname'] syname = geogrid_run_info['syname'] maskname = geogrid_run_info['maskname'] xoff = geogrid_run_info['xoff'] yoff = geogrid_run_info['yoff'] xcount = geogrid_run_info['xcount'] ycount = geogrid_run_info['ycount'] ds = gdal.Open(vxrefname) band = ds.GetRasterBand(1) VXref = band.ReadAsArray(xoff, yoff, xcount, ycount) ds = None band = None ds = gdal.Open(vyrefname) band = ds.GetRasterBand(1) VYref = band.ReadAsArray(xoff, yoff, xcount, ycount) ds = None band = None ds = gdal.Open(sxname) band = ds.GetRasterBand(1) SX = band.ReadAsArray(xoff, yoff, xcount, ycount) ds = None band = None ds = gdal.Open(syname) band = ds.GetRasterBand(1) SY = band.ReadAsArray(xoff, yoff, xcount, ycount) ds = None band = None ds = gdal.Open(maskname) band = ds.GetRasterBand(1) MM = band.ReadAsArray(xoff, yoff, xcount, ycount) ds = None band = None DXref = offset2vy_2 / ( offset2vx_1 * offset2vy_2 - offset2vx_2 * offset2vy_1 ) * VXref - offset2vx_2 / (offset2vx_1 * offset2vy_2 - offset2vx_2 * offset2vy_1) * VYref DYref = offset2vx_1 / ( offset2vx_1 * offset2vy_2 - offset2vx_2 * offset2vy_1 ) * VYref - offset2vy_1 / (offset2vx_1 * offset2vy_2 - offset2vx_2 * offset2vy_1) * VXref stable_count = np.sum(SSM & np.logical_not(np.isnan(DX)) & (DX - DXref > -5) & (DX - DXref < 5) & (DY - DYref > -5) & (DY - DYref < 5)) if stable_count == 0: stable_shift_applied = 0 else: stable_shift_applied = 1 if stable_shift_applied == 1: temp = DX.copy() - DXref.copy() temp[np.logical_not(SSM)] = np.nan dx_mean_shift = np.median(temp[(temp > -5) & (temp < 5)]) DX = DX - dx_mean_shift temp = DY.copy() - DYref.copy() temp[np.logical_not(SSM)] = np.nan dy_mean_shift = np.median(temp[(temp > -5) & (temp < 5)]) DY = DY - dy_mean_shift else: dx_mean_shift = 0.0 dy_mean_shift = 0.0 VX = offset2vx_1 * DX + offset2vx_2 * DY VY = offset2vy_1 * DX + offset2vy_2 * DY VX = VX.astype(np.float32) VY = VY.astype(np.float32) ######################################################################################## ############ netCDF packaging for Sentinel and Landsat dataset; can add other sensor format as well if nc_sensor == "S": if geogrid_run_info is None: chipsizex0 = float( str.split( runCmd( 'fgrep "Smallest Allowable Chip Size in m:" testGeogrid.txt' ))[-1]) rangePixelSize = float( str.split( runCmd( 'fgrep "Ground range pixel size:" testGeogrid.txt' ))[4]) azimuthPixelSize = float( str.split( runCmd( 'fgrep "Azimuth pixel size:" testGeogrid.txt' ))[3]) dt = float( str.split( runCmd('fgrep "Repeat Time:" testGeogrid.txt')) [2]) epsg = float( str.split( runCmd('fgrep "EPSG:" testGeogrid.txt'))[1]) # print (str(rangePixelSize)+" "+str(azimuthPixelSize)) else: chipsizex0 = geogrid_run_info['chipsizex0'] rangePixelSize = geogrid_run_info['XPixelSize'] azimuthPixelSize = geogrid_run_info['YPixelSize'] dt = geogrid_run_info['dt'] epsg = geogrid_run_info['epsg'] runCmd('topsinsar_filename.py') # import scipy.io as sio conts = sio.loadmat('topsinsar_filename.mat') master_filename = conts['master_filename'][0] slave_filename = conts['slave_filename'][0] master_dt = conts['master_dt'][0] slave_dt = conts['slave_dt'][0] master_split = str.split(master_filename, '_') slave_split = str.split(slave_filename, '_') import netcdf_output as no pair_type = 'radar' detection_method = 'feature' coordinates = 'radar' roi_valid_percentage = int( round( np.sum(CHIPSIZEX != 0) / np.sum(SEARCHLIMITX != 0) * 1000.0)) / 1000 # out_nc_filename = 'Jakobshavn.nc' PPP = roi_valid_percentage * 100 out_nc_filename = f"./{master_filename[0:-4]}_X_{slave_filename[0:-4]}" \ f"_G{chipsizex0:04.0f}V02_P{np.floor(PPP):03.0f}.nc" CHIPSIZEY = np.round(CHIPSIZEX * ScaleChipSizeY / 2) * 2 from datetime import date d0 = date(np.int(master_split[5][0:4]), np.int(master_split[5][4:6]), np.int(master_split[5][6:8])) d1 = date(np.int(slave_split[5][0:4]), np.int(slave_split[5][4:6]), np.int(slave_split[5][6:8])) date_dt_base = d1 - d0 date_dt = np.float64(np.abs(date_dt_base.days)) if date_dt_base.days < 0: date_ct = d1 + (d0 - d1) / 2 date_center = date_ct.strftime("%Y%m%d") else: date_ct = d0 + (d1 - d0) / 2 date_center = date_ct.strftime("%Y%m%d") IMG_INFO_DICT = { 'mission_img1': master_split[0][0], 'sensor_img1': 'C', 'satellite_img1': master_split[0][1:3], 'acquisition_img1': master_dt, 'absolute_orbit_number_img1': master_split[7], 'mission_data_take_ID_img1': master_split[8], 'product_unique_ID_img1': master_split[9][0:4], 'mission_img2': slave_split[0][0], 'sensor_img2': 'C', 'satellite_img2': slave_split[0][1:3], 'acquisition_img2': slave_dt, 'absolute_orbit_number_img2': slave_split[7], 'mission_data_take_ID_img2': slave_split[8], 'product_unique_ID_img2': slave_split[9][0:4], 'date_dt': date_dt, 'date_center': date_center, 'roi_valid_percentage': roi_valid_percentage, 'autoRIFT_software_version': version } error_vector = np.array( [[0.0356, 0.0501, 0.0266, 0.0622, 0.0357, 0.0501], [0.5194, 1.1638, 0.3319, 1.3701, 0.5191, 1.1628]]) netcdf_file = no.netCDF_packaging( VX, VY, DX, DY, INTERPMASK, CHIPSIZEX, CHIPSIZEY, SSM, SX, SY, offset2vx_1, offset2vx_2, offset2vy_1, offset2vy_2, MM, VXref, VYref, rangePixelSize, azimuthPixelSize, dt, epsg, srs, tran, out_nc_filename, pair_type, detection_method, coordinates, IMG_INFO_DICT, stable_count, stable_shift_applied, dx_mean_shift, dy_mean_shift, error_vector) elif nc_sensor == "L": if geogrid_run_info is None: chipsizex0 = float( str.split( runCmd( 'fgrep "Smallest Allowable Chip Size in m:" testGeogrid.txt' ))[-1]) XPixelSize = float( str.split( runCmd( 'fgrep "X-direction pixel size:" testGeogrid.txt' ))[3]) YPixelSize = float( str.split( runCmd( 'fgrep "Y-direction pixel size:" testGeogrid.txt' ))[3]) epsg = float( str.split( runCmd('fgrep "EPSG:" testGeogrid.txt'))[1]) else: chipsizex0 = geogrid_run_info['chipsizex0'] XPixelSize = geogrid_run_info['XPixelSize'] YPixelSize = geogrid_run_info['YPixelSize'] epsg = geogrid_run_info['epsg'] master_path = indir_m slave_path = indir_s import os master_filename = os.path.basename(master_path) slave_filename = os.path.basename(slave_path) master_split = str.split(master_filename, '_') slave_split = str.split(slave_filename, '_') # master_MTL_path = master_path[:-6]+'MTL.txt' # slave_MTL_path = slave_path[:-6]+'MTL.txt' # # master_time = str.split(str.split(runCmd('fgrep "SCENE_CENTER_TIME" '+master_MTL_path))[2][1:-2],':') # slave_time = str.split(str.split(runCmd('fgrep "SCENE_CENTER_TIME" '+slave_MTL_path))[2][1:-2],':') master_time = ['00', '00', '00'] slave_time = ['00', '00', '00'] from datetime import time as time1 master_time = time1(int(master_time[0]), int(master_time[1]), int(float(master_time[2]))) slave_time = time1(int(slave_time[0]), int(slave_time[1]), int(float(slave_time[2]))) import netcdf_output as no pair_type = 'optical' detection_method = 'feature' coordinates = 'map' roi_valid_percentage = int( round( np.sum(CHIPSIZEX != 0) / np.sum(SEARCHLIMITX != 0) * 1000.0)) / 1000 # out_nc_filename = 'Jakobshavn_opt.nc' PPP = roi_valid_percentage * 100 out_nc_filename = f"./{master_filename[0:-7]}_X_{slave_filename[0:-7]}" \ f"_G{chipsizex0:04.0f}V02_P{np.floor(PPP):03.0f}.nc" CHIPSIZEY = np.round(CHIPSIZEX * ScaleChipSizeY / 2) * 2 from datetime import date d0 = date(np.int(master_split[3][0:4]), np.int(master_split[3][4:6]), np.int(master_split[3][6:8])) d1 = date(np.int(slave_split[3][0:4]), np.int(slave_split[3][4:6]), np.int(slave_split[3][6:8])) date_dt_base = d1 - d0 date_dt = np.float64(np.abs(date_dt_base.days)) if date_dt_base.days < 0: date_ct = d1 + (d0 - d1) / 2 date_center = date_ct.strftime("%Y%m%d") else: date_ct = d0 + (d1 - d0) / 2 date_center = date_ct.strftime("%Y%m%d") master_dt = master_split[3][0:8] + master_time.strftime( "T%H:%M:%S") slave_dt = slave_split[3][0:8] + slave_time.strftime( "T%H:%M:%S") IMG_INFO_DICT = { 'mission_img1': master_split[0][0], 'sensor_img1': master_split[0][1], 'satellite_img1': np.float64(master_split[0][2:4]), 'correction_level_img1': master_split[1], 'path_img1': np.float64(master_split[2][0:3]), 'row_img1': np.float64(master_split[2][3:6]), 'acquisition_date_img1': master_dt, 'processing_date_img1': master_split[4][0:8], 'collection_number_img1': np.float64(master_split[5]), 'collection_category_img1': master_split[6], 'mission_img2': slave_split[0][0], 'sensor_img2': slave_split[0][1], 'satellite_img2': np.float64(slave_split[0][2:4]), 'correction_level_img2': slave_split[1], 'path_img2': np.float64(slave_split[2][0:3]), 'row_img2': np.float64(slave_split[2][3:6]), 'acquisition_date_img2': slave_dt, 'processing_date_img2': slave_split[4][0:8], 'collection_number_img2': np.float64(slave_split[5]), 'collection_category_img2': slave_split[6], 'date_dt': date_dt, 'date_center': date_center, 'roi_valid_percentage': roi_valid_percentage, 'autoRIFT_software_version': version } error_vector = np.array([57., 57.]) netcdf_file = no.netCDF_packaging( VX, VY, DX, DY, INTERPMASK, CHIPSIZEX, CHIPSIZEY, SSM, SX, SY, offset2vx_1, offset2vx_2, offset2vy_1, offset2vy_2, MM, VXref, VYref, XPixelSize, YPixelSize, None, epsg, srs, tran, out_nc_filename, pair_type, detection_method, coordinates, IMG_INFO_DICT, stable_count, stable_shift_applied, dx_mean_shift, dy_mean_shift, error_vector) elif nc_sensor == "S2": if geogrid_run_info is None: chipsizex0 = float( str.split( runCmd( 'fgrep "Smallest Allowable Chip Size in m:" testGeogrid.txt' ))[-1]) XPixelSize = float( str.split( runCmd( 'fgrep "X-direction pixel size:" testGeogrid.txt' ))[3]) YPixelSize = float( str.split( runCmd( 'fgrep "Y-direction pixel size:" testGeogrid.txt' ))[3]) epsg = float( str.split( runCmd('fgrep "EPSG:" testGeogrid.txt'))[1]) else: chipsizex0 = geogrid_run_info['chipsizex0'] XPixelSize = geogrid_run_info['XPixelSize'] YPixelSize = geogrid_run_info['YPixelSize'] epsg = geogrid_run_info['epsg'] master_path = indir_m slave_path = indir_s master_split = master_path.split('_') slave_split = slave_path.split('_') import os master_filename = master_split[0][ -3:] + '_' + master_split[2] + '_' + master_split[ 4][:3] + '_' + os.path.basename(master_path) slave_filename = slave_split[0][-3:] + '_' + slave_split[ 2] + '_' + slave_split[4][:3] + '_' + os.path.basename( slave_path) master_time = ['00', '00', '00'] slave_time = ['00', '00', '00'] from datetime import time as time1 master_time = time1(int(master_time[0]), int(master_time[1]), int(float(master_time[2]))) slave_time = time1(int(slave_time[0]), int(slave_time[1]), int(float(slave_time[2]))) import netcdf_output as no pair_type = 'optical' detection_method = 'feature' coordinates = 'map' roi_valid_percentage = int( round( np.sum(CHIPSIZEX != 0) / np.sum(SEARCHLIMITX != 0) * 1000.0)) / 1000 PPP = roi_valid_percentage * 100 out_nc_filename = f"./{master_filename[0:-8]}_X_{slave_filename[0:-8]}" \ f"_G{chipsizex0:04.0f}V02_P{np.floor(PPP):03.0f}.nc" CHIPSIZEY = np.round(CHIPSIZEX * ScaleChipSizeY / 2) * 2 from datetime import date d0 = date(np.int(master_split[2][0:4]), np.int(master_split[2][4:6]), np.int(master_split[2][6:8])) d1 = date(np.int(slave_split[2][0:4]), np.int(slave_split[2][4:6]), np.int(slave_split[2][6:8])) date_dt_base = d1 - d0 date_dt = np.float64(np.abs(date_dt_base.days)) if date_dt_base.days < 0: date_ct = d1 + (d0 - d1) / 2 date_center = date_ct.strftime("%Y%m%d") else: date_ct = d0 + (d1 - d0) / 2 date_center = date_ct.strftime("%Y%m%d") master_dt = master_split[2] + master_time.strftime( "T%H:%M:%S") slave_dt = slave_split[2] + slave_time.strftime( "T%H:%M:%S") IMG_INFO_DICT = { 'mission_img1': master_split[0][-3], 'satellite_img1': master_split[0][-2:], 'correction_level_img1': master_split[4][:3], 'acquisition_date_img1': master_dt, 'mission_img2': slave_split[0][-3], 'satellite_img2': slave_split[0][-2:], 'correction_level_img2': slave_split[4][:3], 'acquisition_date_img2': slave_dt, 'date_dt': date_dt, 'date_center': date_center, 'roi_valid_percentage': roi_valid_percentage, 'autoRIFT_software_version': version } error_vector = np.array([57., 57.]) netcdf_file = no.netCDF_packaging( VX, VY, DX, DY, INTERPMASK, CHIPSIZEX, CHIPSIZEY, SSM, SX, SY, offset2vx_1, offset2vx_2, offset2vy_1, offset2vy_2, MM, VXref, VYref, XPixelSize, YPixelSize, None, epsg, srs, tran, out_nc_filename, pair_type, detection_method, coordinates, IMG_INFO_DICT, stable_count, stable_shift_applied, dx_mean_shift, dy_mean_shift, error_vector) elif nc_sensor is None: print('netCDF packaging not performed') else: raise Exception( 'netCDF packaging not supported for the type "{0}"'. format(nc_sensor)) print("Write Outputs Done!!!") print(time.time() - t1) return netcdf_file