def __str__(self): s = "" for nvp in self.pairs: if nvp.type == NVPair.DATA_TYPE_NVLIST: s += nvp.name.decode('ascii') + ":\n" s += util.shift(str(nvp.value), 1) + "\n" elif nvp.type == NVPair.DATA_TYPE_NVLIST_ARRAY: s += nvp.name.decode('ascii') + ":\n" for i in range(len(nvp.value)): s += util.shift("[%d]:" % (i), 1) + "\n" s += util.shift(str(nvp.value[i]), 2) + "\n" else: s += "%s (%s): %s\n" % (nvp.name.decode('ascii'), nvp.type, nvp.value) return s[:-1]
def calc_rsv(self, cur_price): shift(self.n_low, cur_price, self.n) # n_low.append(cur_price) shift(self.n_high, cur_price, self.n) if len(self.n_low) == self.n: logger.debug("max n_high is %d"%max(self.n_high)) logger.debug("min n_low is %d"%min(self.n_low)) denominator = (max(self.n_high) - min(self.n_low)) # TODO: 0 caused by price staying same after 3pm, fix it print(denominator) if 0 == denominator: logger.error("0 == denominator") return -1 return (cur_price - min(self.n_low)) / denominator * 100 return -1
def _dump_tree(addr, base): if type(addr) == int: block = raiddev.read(addr, 4096) input_size, = struct.unpack_from(">I", block) block = raiddev.read(addr, 4 + input_size) block = lz4_decompress(block) else: if addr.endian == 0: return try: block = pool.read_raw(addr) except: print("Read this ptr failed:") print(util.shift(str(addr), 1)) traceback.print_exc(file=sys.stdout) return for j in range(len(block) // BlkPtr.SIZE): try: ptr = BlkPtr.frombytes(block[j * BlkPtr.SIZE:(j + 1) * BlkPtr.SIZE]) if ptr.embedded and ptr.etype == BlkPtr.ETYPE_DATA: print(" [%d]: EMBEDDED" % (j, )) elif ptr.birth == 0: continue elif ptr.lvl == 0: addr = ptr.dva[0].offset nblock = pool.read_raw(ptr) if nblock == None: continue _dump_dnode_block(addr, nblock, base * 1024 + j) else: _dump_tree(ptr, base * 1024 + j) except: pass
def __str__(self): s = """PYTHON TYPE: %s TYPE: %s INDBLKSHIFT: %d NLEVELS: %d NBLKPTR: %d BONUSTYPE: %d CHECKSUM: %d COMPRESS: %d FLAGS: %x DATABLKSZSEC: %x BONUSLEN: %d MAXBLKID: %x SECPHYS: %x """ % (self.__class__.__name__, dmu_constant.TYPES[self.type], self.indblkshift, self.nlevels, self.nblkptr, self.bonustype, self.checksum, self.compress, self.flags, self.datablkszsec * 512, self.bonuslen, self.maxblkid, self.secphys) for i in range(self.nblkptr): s += "PTR[%d]: \n%s\n" % (i, util.shift(str(self.blkptr[i]), 1)) if self.bonus != None: if type(self.bonus) != bytes: s += str(self.bonus) + "\n" else: s += "BONUS: ?\n" return s[:-1]
def scan(): ashift = raiddev.ashift i = 0 while i < 9 * 1024 * 1024 * 1024 * 1024 >> ashift: if (i << ashift) % (1024 * 1024 * 1024) == 0: print("Scanned %d GB" % ((i << ashift) // (1024 * 1024 * 1024))) print("Hit rate 1: %f%%" % v1.get_hit_rate()) print("Hit rate 2: %f%%" % v2.get_hit_rate()) print("Hit rate 3: %f%%" % v3.get_hit_rate()) v1.clear_hit_rate() v2.clear_hit_rate() v3.clear_hit_rate() block = raiddev.read(i << ashift, 1 << ashift) input_size, = struct.unpack_from(">I", block) if input_size > 128 * 1024: i += 1 continue block = raiddev.read(i << ashift, 4 + input_size) try: block = lz4_decompress(block) except: i += 1 continue try: print("Found at 0x%x" % (i << ashift)) for j in range(len(block) // Dnode.SIZE): dnode = Dnode.frombytes( block[j * Dnode.SIZE:(j + 1) * Dnode.SIZE], pool) if dnode.type != 0 and dnode.type < len( dmu_constant.TYPES) and dmu_constant.TYPES[ dnode.type] != None: print(" [%d]: %s" % (j, dmu_constant.TYPES[dnode.type])) if dnode.type == 20: print(dnode.list()) elif dnode.type == 19: print(" filelen: %d" % (dnode.secphys if (dnode.flags & 1 != 0) else (dnode.secphys * 512))) for j in range(len(block) // BlkPtr.SIZE): ptr = BlkPtr.frombytes(block[j * BlkPtr.SIZE:(j + 1) * BlkPtr.SIZE]) if ptr.embedded and ptr.etype == BlkPtr.ETYPE_DATA: print(" [%d]: %s" % (j, dmu_constant.TYPES[dnode.type])) elif not ptr.embedded and ptr.dva[0].vdev == 0 and ptr.dva[ 0].offset & 0x1ff == 0 and ptr.dva[ 0].asize & 0xfff == 0 and ( ptr.comp == 15 or ptr.comp == 2) and ptr.type == 20: print(" [%d]:" % (j, )) print(util.shift(str(ptr), 2)) except Exception as e: pass print("Bad at 0x%x" % (i << ashift)) traceback.print_exc(file=sys.stdout) i += raiddev.get_asize(4 + input_size) >> ashift
def print_waterfall(self, compact=1, l=60, inverted=True): """ Inverted watefall means the most recente events shows in th bottom, not in top """ wf = self.sonar.waterfall[-l:] # filters the last "l" events len_wf = len(wf) wf_c = [] # compact waterfall #print(wf) if len_wf == 0: print ("no sonar data") return idx = 0 while idx < len_wf: # idx_compact in the number of sonar readings to be "compacted" for next printed line idx_compact = min(compact, len_wf - idx) total = [0.0] * 120 for _ in xrange(idx_compact): # compacts the display, calculanting the average wf_idx = wf[idx] for c in xrange(120): total[c] += wf_idx[c] idx += 1 #print(total) #line = [self.asciiScaler(d/idx_compact) for d in total] line = [ascii_gray(".", int(round(self.scaler(d / idx_compact)))) for d in total] wf_c.append("[{0}{1}]".format("".join(shift(line, self.sonar.WATERFALL_STEPS / 2)), ascii_reset())) if not inverted: wf_c.reverse() step = 360 / self.sonar.WATERFALL_STEPS header = [angles_to_unicode(i * step) for i in xrange(self.sonar.WATERFALL_STEPS)] print(" " + "".join(shift(header, self.sonar.WATERFALL_STEPS / 2))) for l in wf_c: print(l)
def dump_0(): line = input("Root block addr: ") while line != '': line = line.strip() addr = int(line, 16) print("Root 0x%x" % (addr, )) block = raiddev.read(addr, 4096) input_size, = struct.unpack_from(">I", block) block = raiddev.read(addr, 4 + input_size) block = lz4_decompress(block) ptr = BlkPtr.frombytes(block[0:BlkPtr.SIZE]) print(util.shift(str(ptr), 1)) line = input("Root block addr: ")
def dump_block(): line = sys.stdin.readline() line = line.strip() addr = int(line, 16) block = raiddev.read(addr, 4096) input_size, = struct.unpack_from(">I", block) if input_size > 128 * 1024: return print("Guessed psize=0x%x" % (4 + input_size, )) block = raiddev.read(addr, 4 + input_size) block = lz4_decompress(block) is_ptr = input("Is ptr block? (y/n)") if is_ptr == 'n': birth = 0 for j in range(len(block) // Dnode.SIZE): dnode = Dnode.frombytes(block[j * Dnode.SIZE:(j + 1) * Dnode.SIZE], pool) if dnode.get_birth() > birth: birth = dnode.get_birth() if dnode.type != 0 and dnode.type < len( dmu_constant.TYPES) and dmu_constant.TYPES[ dnode.type] != None: print(" [%d]: %s (@%d)" % (j, dmu_constant.TYPES[dnode.type], dnode.get_birth())) if dnode.type == 20: print(dnode.list()) elif dnode.type == 19: print(" filelen: %d" % (dnode.secphys if (dnode.flags & 1 != 0) else (dnode.secphys * 512))) print("Birth: %d" % (birth, )) else: birth = 0 for j in range(len(block) // BlkPtr.SIZE): ptr = BlkPtr.frombytes(block[j * BlkPtr.SIZE:(j + 1) * BlkPtr.SIZE]) if ptr.birth > birth: birth = ptr.birth if ptr.embedded and ptr.etype == BlkPtr.ETYPE_DATA: print(" [%d]: EMBEDDED" % (j, )) else: print(" [%d]:" % (j, )) print(util.shift(str(ptr), 2)) print("Birth: %d" % (birth, ))
def shift_images(img_lst, bg_map, out_dir): if not os.path.exists(out_dir): os.makedirs(out_dir) ret = [] bg_info = {} for f in img_lst: # if not f.endswith('png') or f.startswith("."): continue print(f, bg_map[f]) fg = misc.imread(f)[..., :3] bg = misc.imread(bg_map[f])[..., :3] print(fg.shape, bg.shape) fname = os.path.basename(f) out_f = os.path.join(out_dir, fname) print(fname, out_f) misc.imsave(out_f, syn_util.shift(bg, fg)) ret.append(out_f) bg_info[out_f] = bg_map[f] return ret, bg_info
def print_sonar(self): #s = [self.asciiScaler(x.value) for x in player_sub.sonar.sonar_array(120)] line = [ascii_gray(".", int(round(self.scaler(d)))) for d in self.sonar.sonar_array(120)] return "[{0}{1}]".format("".join(shift(line, self.sonar.WATERFALL_STEPS / 2)), ascii_reset())
def kdj_calc(self, algo,time): cursor = self.ccccc(algo,time) for stock_dict in (yield cursor.to_list(100)): if 1 == algo.period: self.cur_price = Decimal(stock_dict["d"][constants.CUR_PRICE]) shift(self.n_low, self.cur_price, self.n) # n_low.append(cur_price) shift(self.n_high,self.cur_price, self.n) else: self.cur_price = Decimal(stock_dict["e"][constants.CLOSE_Y]) low = Decimal(stock_dict["e"][constants.LO_Y]) high = Decimal(stock_dict["e"][constants.HI_Y]) shift(self.n_low, low, self.n) # n_low.append(cur_price) shift(self.n_high,high, self.n) print("self.n_low:") print(self.n_low) print(algo.stock_id) if len(self.n_low) != self.n: cursor = self.db.stocks.find(find_query, sort=sort_query) for stock_dict in (yield cursor.to_list(1000)): price = Decimal(stock_dict["d"][CUR_PRICE]) rsv = self.calc_rsv(price) if rsv != -1: k = self.calc_k(rsv) d = self.calc_d(k) j = 3 * k - 2 * d from config import datetime_repr ts = datetime.strptime(stock_dict["_id"]["d"], datetime_repr()) self.kdj.append([k, d, j, ts]) if len(self.kdj) < 2: logger.error("not enough kdj data") raise gen.Return(False) rsv = self.calc_rsv(self.cur_price) if rsv != -1: k = self.calc_k(rsv) d = self.calc_d(k) j = 3 * k - 2 * d #from config import datetime_repr #ts = datetime.strptime(stock_dict["_id"]["d"], datetime_repr()) ts = stock_dict["_id"]["d"] self.kdj =[] self.kdj.append(k) self.kdj.append(d) self.kdj.append(j) self.kdj.append(ts) print("new kdj:") print(self.kdj) self.kdjcollection.insert({"_id":{"c":algo.stock_id,"d":ts},"d":{"k":self.kdj[0],"d":self.kdj[1],"j":self.kdj[2]}}) raise gen.Return(True) else: raise gen.Return(False) @gen.coroutine def match_condition_secondary(self, algo,time): if (yield self.kdj_init(algo)): min_time = algo.time - timedelta(seconds=self.window) prev_k, prev_d, _, _ = self.kdj.popleft() for curr_k, curr_d, _, ts in self.kdj: if ts >= min_time: self.matched = self.is_match(prev_k, prev_d, curr_k, curr_d, algo.trade_method) if self.matched: logger.debug('match %s ok '%algo.algo_id) return @gen.coroutine def match_condition_primary(self, algo,time): global stock_dict if (yield self.kdj_calc(algo,time)): kdjlist = stock_dict[algo.stock_id] klist = kdjlist[0] dlist = kdjlist[1] jlist = kdjlist[2] index =1 for index in range(len(klist)): curr_k = self.klist[0] curr_d = self.dlist[1] prev_k = kdjlist[0]["d"]["k"] prev_d = kdjlist[0]["d"]["d"] self.matched = self.is_match(prev_k, prev_d, curr_k, curr_d, algo.trade_method) if(self.matched): if self.match_list == None: self.match_list = open("../match-list.txt") self.match_list.write("match ") logger.debug('match %s ok '%algo.algo_id) return else: logger.error('%s not matched '%algo.algo_id) def is_match(self, prev_k, prev_d, curr_k, curr_d, trade_method): if trade_method == "sell": # D should pass K if prev_d <= prev_k and curr_d > curr_k: return True if trade_method == "buy": # K should pass D if prev_k <= prev_d and curr_k > curr_d: return True return False
def calc_d(self, k): shift(self.m1_k, k, self.m1) return avg(self.m1_k)
def calc_k(self, rsv): shift(self.m_rsv, rsv, self.m) return avg(self.m_rsv)
def acc_fnc(predicted,target): return ((predicted > 0) == (target < util.shift(target))).cpu().numpy().mean()
def loss_fcn(predicted, target): return nn.functional.binary_cross_entropy_with_logits(predicted,(target < util.shift(target)).float())
def forward(self, x): out = self.fcn(x) out = self.fc1(out.reshape((out.shape[0], -1))) out = torch.cat((out, util.shift(out)), dim=1) return self.fc2(out).squeeze(-1)