def rasterize_lidar(): """ Rasterize the LiDAR tiles. """ # -- get the lidar tile names and range coords = get_lidar_tiles() flist = coords.filename.values nfiles = len(flist) mm = [[coords.xmin.min(), coords.ymin.min()], [coords.xmax.max(), coords.ymax.max()]] # -- get the range nrow = int(round(mm[1][1] - mm[0][1] + 0.5)) + 1 ncol = int(round(mm[1][0] - mm[0][0] + 0.5)) + 1 npix = nrow * ncol # -- initialize the raster and counts rast = np.zeros(npix, dtype=float) cnts = np.zeros(npix, dtype=float) # -- read the tiles for ii, fname in enumerate(flist): print("\rworking on tile {0:3} of {1}".format(ii + 1, nfiles)), sys.stdout.flush() las = lp.file.File(fname, mode="r") tile = np.vstack((las.x, las.y, las.z)).T # -- snap to grid rind = (tile[:, 1] - mm[0][1]).round().astype(int) cind = (tile[:, 0] - mm[0][0]).round().astype(int) pind = cind + rind * ncol # -- get the counts in each bin tcnt = np.bincount(pind, minlength=npix) # -- update the raster rast[pind] += tile[:, 2] cnts[...] += tcnt # -- close las file las.close() # -- convert sum to mean rast /= cnts + (cnts == 0) # -- write to output params = {"nrow" : nrow, "ncol" : ncol, "rmin" : mm[0][1], "rmax" : mm[1][1], "cmin" : mm[0][0], "cmax" : mm[1][0]} oname = os.path.join(os.environ["REBOUND_WRITE"], "rasters", "BK_raster.bin") rast.tofile(oname) write_header(oname, params) return
def main(args): if args.debug: main_load() # check the validity of output file name, do backup output = args.optf if output is None: outputfile = '{0:s}.output.xvg'.format(args.grof) else: outputfile = output backup_old_output(outputfile) # Do some logging at the beginning outputf = open(outputfile, 'w') beginning_time = write_header(outputf) # do calculation result = calc_bond_length( args.grof, args.xtcf, args.btime, args.etime, args.debug) # write results to the outputfile for r in result: outputf.write(r) # Do some logging at the end write_footer(outputf, beginning_time) outputf.close()
def main(args): utils.main_load() # check the validity of output file name, do backup output = args.optf if output is None: outputfile = '{0:s}.output.xvg'.format(args.grof) else: outputfile = output utils.backup(outputfile) # Do some logging at the beginning outputf = open(outputfile, 'w') beginning_time = utils.write_header(outputf) # do calculation result = calc_dihedral(args.grof, args.xtcf, args.btime, args.etime) # write results to the outputfile for r in result: outputf.write(r) # Do some logging at the end utils.write_footer(outputf, beginning_time) outputf.close()
def main(ARGS): if ARGS.debug: main_load() # check the validity of output file name, do backup output = ARGS.optf if output is None: outputfile = '{0:s}.output.xvg'.format(ARGS.grof) else: outputfile = output backup_old_output(outputfile) # Do some logging at the beginning outputf = open(outputfile, 'w') beginning_time = write_header(outputf) # do calculation result = count_interactions( ARGS.grof, ARGS.xtcf, ARGS.btime, ARGS.cutoff * 10, # convert to angstrom from nm ARGS.debug) # write results to the outputfile outputf.write('# {0:>10s}{1:>8s}\n'.format('time', 'num')) for r in result: outputf.write(r) # Do some logging at the end write_footer(outputf, beginning_time) outputf.close()
def main(args): if args.debug: main_load() # check the validity of output file name, do backup output = args.optf if output is None: outputfile = '{0:s}.output.xvg'.format(args.grof) else: outputfile = output backup_old_output(outputfile) # Do some logging at the beginning outputf = open(outputfile, 'w') beginning_time = write_header(outputf) # do calculation result = calc_bond_length(args.grof, args.xtcf, args.btime, args.etime, args.debug) # write results to the outputfile for r in result: outputf.write(r) # Do some logging at the end write_footer(outputf, beginning_time) outputf.close()
def main(args): utils.main_load() outputfile = args.optf if args.optf else '{0:s}.sespacing.xvg'.format( args.grof) utils.backup(outputfile) outputf = open(outputfile, 'w') beginning_time = utils.write_header(outputf) # This line will be used when there is a better code design # if ARGS.atom_sel is None: # raise ValueError("atom_selection must be specified, check --atom_selection option!") # do calculation ijdist_dict = sequence_spacing(args.grof, args.xtcf, args.btime, args.etime, args.peptide_length, args.atom_sel) # cannot yield from sequence_spacing function because the result cannot be # calculated until all frames have been looped through # write headers outputf.write('# {0:8s}{1:20s}{2:20s}{3:10s}\n'.format( 'i-j', 'average', 'std', 'num_of_data_points')) # write results to the outputfile for k in sorted(ijdist_dict.keys()): data = np.array(ijdist_dict[k]) mean = data.mean() # mean of ijdist std = data.std() # standard deviation of ijdist num = len(data) # num of data in that ijdist outputf.write('{0:8d}{1:20.8f}{2:20.8f}{3:10d}\n'.format( k, mean, std, num)) # Do some logging at the end utils.write_footer(outputf, beginning_time) outputf.close()
def main(args): utils.main_load() outputfile = args.optf if args.optf else '{0:s}.sespacing.xvg'.format(args.grof) utils.backup(outputfile) outputf = open(outputfile, 'w') beginning_time = utils.write_header(outputf) # This line will be used when there is a better code design # if ARGS.atom_sel is None: # raise ValueError("atom_selection must be specified, check --atom_selection option!") # do calculation ijdist_dict = sequence_spacing(args.grof, args.xtcf, args.btime, args.etime, args.peptide_length, args.atom_sel) # cannot yield from sequence_spacing function because the result cannot be # calculated until all frames have been looped through # write headers outputf.write('# {0:8s}{1:20s}{2:20s}{3:10s}\n'.format('i-j', 'average', 'std', 'num_of_data_points')) # write results to the outputfile for k in sorted(ijdist_dict.keys()): data = np.array(ijdist_dict[k]) mean = data.mean() # mean of ijdist std = data.std() # standard deviation of ijdist num = len(data) # num of data in that ijdist outputf.write('{0:8d}{1:20.8f}{2:20.8f}{3:10d}\n'.format(k, mean, std, num)) # Do some logging at the end utils.write_footer(outputf, beginning_time) outputf.close()
def mn_rasterize(): # -- set the minmax mm = [[978979.241501, 194479.07369], [1003555.2415, 220149.07369]] # -- get the range nrow = int(round(mm[1][1] - mm[0][1] + 0.5)) + 1 ncol = int(round(mm[1][0] - mm[0][0] + 0.5)) + 1 # -- initialize the raster and counts rast = np.zeros((nrow, ncol), dtype=float) cnts = np.zeros((nrow, ncol), dtype=float) # -- set the tile names fnames = get_tile_list() nfiles = len(fnames) # -- read the tiles for ii, fname in enumerate(fnames): print("\rworking on tile {0:3} of {1}".format(ii+1, nfiles)), sys.stdout.flush() tile = np.load(fname) # -- snap to grid rind = (tile[:, 1] - mm[0][1]).round().astype(int) cind = (tile[:, 0] - mm[0][0]).round().astype(int) # -- get the counts in each bin tcnt = np.bincount(cind + rind * ncol) # -- update the raster rast[rind, cind] += tile[:, 2] cnts[rind, cind] += tcnt[tcnt > 0] # -- convert sum to mean rast /= cnts + (cnts == 0) # -- write to output params = {"nrow" : nrow, "ncol" : ncol, "rmin" : mm[0][1], "rmax" : mm[1][1], "cmin" : mm[0][0], "cmax" : mm[1][0]} oname = os.path.join(os.environ["REBOUND_WRITE"], "rasters", "MN_raster.bin") rast.tofile(oname) write_header(oname, params)
def runserver(self): write_header(self.payload.get('query'), fieldnames=self.fieldnames) res = self.request_target(self.url, self.payload) total_count = res['searchResults']['totalCount'] total_page = total_count / 25 if total_count % 25 == 0 else total_count // 25 + 1 print(f'*****共{total_page}页*****') print('-' * 20) for page_num in range(total_page): time_list = [2, 3, 4, 5] time.sleep(random.choice(time_list)) self.parse_data(self.request_target(self.url, self.payload)) self.payload['from'] += 25 self.headers['HPK'] = str(round(time.time() * 1000)) + '-' + str( self.payload['from']) print(f'当前第{page_num + 1}页') print(self.payload['from']) print('*' * 20) csv2xlsx(self.payload.get('query'))
def main(args): utils.main_load() outputfile = args.optf if args.optf else '{0:s}.unun.xvg'.format(args.grof) utils.backup(outputfile) outputf = open(outputfile, 'w') beginning_time = utils.write_header(outputf) result = count_interactions(args.grof, args.xtcf, args.btime, args.etime, args.cutoff) # write headers outputf.write('# {0:>10s}{1:>8s}\n'.format('time', 'num')) # write results to the outputfile for r in result: outputf.write(r) # Do some logging at the end utils.write_footer(outputf, beginning_time) outputf.close()
def main(args): U.main_load() outputfile = args.optf if args.optf else '{0:s}.rama.xvg'.format(args.grof) U.backup(outputfile) outputf = open(outputfile, 'w') beginning_time = U.write_header(outputf) result = calc_rama(args.grof, args.xtcf, args.btime, args.etime) # write headers outputf.write('# {0:>10s}{1:>8s}\n'.format('phi', 'psi', 'resname-resid')) # write results to the outputfile for r in result: outputf.write(r) # Do some logging at the end U.write_footer(outputf, beginning_time) outputf.close()
def main(cmd_args): args = get_args(cmd_args) utils.main_load() output = args.optf if output is None: # it's a log since the results are written to the h5 file directly outputfile = '{0:s}.output.log'.format(args.grof) else: outputfile = output utils.backup(outputfile) outputf = open(outputfile, 'w') beginning_time = utils.write_header(outputf) A = args if not os.path.exists(A.h5): raise IOError('{0} does not exist'.format(A.h5)) # *10: convert to angstrom from nm result = count_interactions(A) path = os.path.join('/', os.path.dirname(A.xtcf)) tb_name = os.path.join(path, 'unun_map') h5 = tables.openFile(A.h5, mode='a') if h5.__contains__(tb_name): logger.info( 'found {0} already in {0}, replacing with new calculated values'. format(tb_name, A.h5)) _ = h5.getNode(tb_name) _.remove() h5.createArray(where=path, name='unun_map', object=result) h5.close() utils.write_footer(outputf, beginning_time) outputf.close()
def main(cmd_args): args = get_args(cmd_args) utils.main_load() output = args.optf if output is None: # it's a log since the results are written to the h5 file directly outputfile = '{0:s}.output.log'.format(args.grof) else: outputfile = output utils.backup(outputfile) outputf = open(outputfile, 'w') beginning_time = utils.write_header(outputf) A = args if not os.path.exists(A.h5): raise IOError('{0} does not exist'.format(A.h5)) # *10: convert to angstrom from nm result = count_interactions(A) path = os.path.join('/', os.path.dirname(A.xtcf)) tb_name = os.path.join(path, 'unun_map') h5 = tables.openFile(A.h5, mode='a') if h5.__contains__(tb_name): logger.info('found {0} already in {0}, replacing with new calculated values'.format(tb_name, A.h5)) _ = h5.getNode(tb_name) _.remove() h5.createArray(where=path, name='unun_map', object=result) h5.close() utils.write_footer(outputf, beginning_time) outputf.close()
# -- go through the tiles and make full raster sub = np.zeros((2048, 2048), dtype=float) for ii, fname in enumerate(sorted(flist)): print("\rtile {0:4} of {1:4}...".format(ii + 1, nfiles)), sys.stdout.flush() tile = gdal.Open(fname, GA_ReadOnly) geo = tile.GetGeoTransform() tile_origin = geo[0], geo[3] raster = tile.ReadAsArray() rind = result.shape[0] - 2048 - int(tile_origin[1] - ylo) cind = int(tile_origin[0] - xlo) sub[...] = result[rind:rind + 2048, cind:cind + 2048] result[rind:rind + 2048, cind:cind + 2048] = np.maximum(raster, sub) # -- write output to binary file params = { "nrow": nrow, "ncol": ncol, "rmin": ylo, "rmax": yhi, "cmin": xlo, "cmax": xhi } oname = os.path.join(os.environ["REBOUND_WRITE"], "rasters", "BK_raster.bin") result.tofile(oname) write_header(oname, params)
last_speed = None i = 0 running = True while running: i += 1 for event in pygame.event.get(): if event.type == pygame.QUIT: running = False try: pressed = pygame.key.get_pressed() image_data = rtcom["duckie"]["camera"] if image_data is not None: jpg_data = np.asarray(image_data) img = cv2.imdecode(jpg_data, cv2.IMREAD_UNCHANGED) write_header(img, "Video Feed") data = rtcom["duckie"]["data"] for i, name in enumerate(data): write_line( img, i, f"{name} : {data[name][0]:0.1f} {data[name][1]}") #cv2.imshow("preview", img) img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) img = np.moveaxis(img, [0, 1], [1, 0]) surf = pygame.surfarray.make_surface(img) screen.blit(surf, (0, 0)) pygame.display.update() #key = cv2.waitKey(20) speed = {} speed["left"] = 0
def main(): print "Executing CBO Algo for Equities" print "-------------------------------" global kite global fno_dict, base_dict, config_dict, orders global scrip_map, sub_list global order_dict inst_token = [] #TODO: Add argparser for validating input if len(sys.argv) < NO_OF_PARAMS: print "Invalid number of params" #return # read config file config_dict = utils.read_config_file() # get list of fno fno_dict = utils.get_fno_dict() # get yesterdays high low base_dict = get_yesterdays_ohlc(sys.argv[1]) #get kite object api_key, access_token, kite = kite_utils.login_kite(None) # get instrument list, create quote subscription list and # mapping between instrument token and tradingsymbol quote_list = [] data = kite.instruments("NSE") for each in fno_dict: for instrument in data: if each == instrument['tradingsymbol']: entry = "NSE:" + str(instrument['tradingsymbol']) quote_list.append(entry) # sub list for subscribing to the quotes sub_list.append(int(instrument['instrument_token'])) #mapping dictionary for token and trading symbol scrip_map[int(instrument['instrument_token'])] = str(instrument['tradingsymbol']) print scrip_map # open file to write buy/sell orders fp = open(config_dict['cbo_seed_file'], "w") # write header utils.write_header(fp, "CBO") # Generate order file count = int(0) quotes = kite.quote(quote_list) for each in quotes: scrip = each.split(":")[1].strip("\n") if scrip not in base_dict: continue if float(quotes[each]["ohlc"]["open"]) < float(config_dict['start_price']): continue if float(quotes[each]["ohlc"]["open"]) > float(config_dict['end_price']): continue count = int(count) + int(1); buy, sell = generate_orders(scrip, base_dict[scrip], quotes[each]['ohlc']['open']) if (buy != None): fp.write(buy) if (sell != None): fp.write(sell) fp.close() # create dictionary for active orders curr_order = kite.orders() print "------------------------------------------------" print curr_order print "------------------------------------------------" # push all the orders order_list = [] order_dict = {} fp = open(config_dict['cbo_seed_file']) for each in fp: #ignore line starting with # if each.startswith("#"): continue each = each.rstrip() line = each.split(" ") scrip = line[SCRIP_ID] action = line[ACTION_ID] price = line[PRICE_ID] t_price = line[TRIGGER_ID] target = line[TARGET_ID] stoploss = line[STOPLOSS_ID] live_price = line[LIVE_PRICE_ID] if line[SCRIP_ID] not in order_dict: order_dict[scrip] = {} order_dict[scrip][action] = {} else: order_dict[scrip][action] = {} order_dict[scrip][action]['price'] = price order_dict[scrip][action]['trigger_price'] = t_price order_dict[scrip][action]['target'] = target order_dict[scrip][action]['stoploss'] = stoploss order_dict[scrip][action]['flag'] = 0 order_dict[scrip][action]['live_price'] = live_price fp.close() print "----------------------------------------------------------------" print order_dict print "----------------- End of order list ----------------------------" kws = KiteTicker(api_key, access_token, debug=False) kws.on_ticks = on_ticks kws.on_connect = on_connect kws.on_close = on_close kws.on_error = on_error kws.on_noreconnect = on_noreconnect kws.on_reconnect = on_reconnect kws.on_order_update = on_order_update kws.connect()