def test(cl): print("Start Testing...") test_img_list= readImages(TEST_IMAGE_FILE,1000) test_label_list = readLabels(TEST_LABEL_FILE,1000) conf_matrix = generate_matrix() percent_matrix = generate_matrix() for i in range(len(test_img_list)): cat = int(cl.classify(test_img_list[i],default='unknown')) # category classified label = int(test_label_list[i]) # get label conf_matrix[cat][label] +=1 if i % 100 == 0 and i != 0 : print ("Finished " + str(int(i/100)) + "00 Images") for i in range(10): row_sum = rowsum(conf_matrix,i) for j in range(10): percent_matrix[i][j] = "{0:.0f}%".format(float(conf_matrix[i][j])/row_sum * 100) print("Performance results") pprint(conf_matrix) print("\n") pprint(percent_matrix)
def execute(self, namespace): mat = wfinspector.get_material(namespace.material_id, namespace.configpath) material_id = wfutils.get_material_id(namespace.material_id) # scenarios result = { 'material_id': material_id, 'material_status': mat.material_status, 'scenario': { 'name': mat.name, 'managers': [ { 'name': mgrname, 'param': mgr.param, 'models': [ { 'name': mdlname, 'hashed_id': mdl.get_hashed_id(), 'param': mdl.param } for mdlname, mdl in mgr.models.items() ] } for mgrname, mgr in mat.managers.items() ] } } from pprint import pprint pprint(result)
def get_old(): ##db = MySQLdb.connect(host='localhost', user='******', passwd=dbpass, db='movie_users', cursorclass='MySQLdb.cursors.DictCursor') #db = MySQLdb.connect(host='localhost', user='******', passwd=dbpass, db='movie_users') ##import pdb; pdb.set_trace() #cursor = db.cursor() #cursor.execute("SELECT * FROM users") #rows = cursor.fetchall() #for row in rows: # data = {} # data['name'] = row[1][1:][:-1] # data['title'] = row[2][1:][:-1] # data['email'] = row[4][1:][:-1] # data['phone'] = row[5][1:] # send_text(data['name'], data['title'], data['phone']) # send_email(data['name'], data['title'], data['email']) #cursor.execute("TRUNCATE TABLE users") post = collection.find_one() pprint(post) print post['user'] print post['email'] print post['phone'] print post['email'] send_text(post['user'], post['movie'], post['phone']) send_email(post['user'], post['movie'], post['email']) result = collection.remove(spec_or_id={'_id': post['_id']}, safe=True) print result
def ParseText(stText: str, debug = False): """ コンフィグテキストをパースする関数 各行を「階層」とともにパースする 階層とは address-family ipv4 exit-address-family のとき"address-family ipv4"の改装の下に"exit-address-family"がいる """ liPrev = [] liCur = [] # 今の「階層」 liConfig = [] inDepthChar = 0 # 各行を読み込む for line in stText.split("\n"): # \r\n, \nの正規化のために右側をstrip line = line.rstrip() # commentのみの行や空白はとばす(!飲みの行) if isComment(line) or line == "": continue # テキストの行をパースするこの際、今の階層を渡す liCur = ParseConf(line, liPrev) # 戻りをコンフィグ一覧に追加 liConfig.append(liCur) #if debug: #print(liCur) liPrev = liCur #pprint(liConfig) liConfig = deleteLengthFromliConfig(liConfig) if debug: for x in liConfig: pprint(x) return liConfig
def ID_a_ciudad(numero): """Para ver a que ciudad corresponde cierto ID""" with open('../fixtures/ciudades.json') as json_data: data = json.load(json_data) json_data.close() res = [item for item in data if item['pk'] == numero] pprint(res)
def main(): """ Main function """ parser = argparse.ArgumentParser(description='Get the progression information for a character') parser.add_argument('usernames', metavar='USERNAME', type=str, nargs='+', help='usernames to check') args = parser.parse_args() #API_KEY = os.environ['DESTINY_API_KEY'].strip() DESTINY.login() for username in args.usernames: person = Person(username=username) ## Get next faction to level up for character in person.getCharacters(): for advisor_info in character.getAdvisors()\ ['activityAdvisors'].values(): activity_advisor = ActivityAdvisor(advisor_info) pprint(activity_advisor) #pprint(activity_advisor.manifest) print print return 0
def fillCommitteeInfo(): with open(openlegdir + "/senators.dat") as f: senatorInfo = cPickle.load(f) committeeInfo = dict() for committee in getCommittees(): print "Currently filling Comittee: " + committee # Locate our soup by removing all ',' and joining the locased words with '-' cleaned = committee.replace(",", "").lower() url = "http://www.nysenate.gov/committee/" + "-".join(cleaned.split()) soup = BeautifulSoup(urllib2.urlopen(url).read()) # Get the chair, its held separately from the members chair = normalizeName(soup.find("div", "committee-chair").findAll("a")[1].text) senatorInfo[chair]["chairships"].append(committee) committeeInfo[committee] = [chair] # Get all the members from the embedded list members = [normalizeName(x.a.text) for x in soup.find("div", "committee-members").findAll("li")] [senatorInfo[member]["committees"].append(committee) for member in members] committeeInfo[committee].extend(members) pprint(senatorInfo) with open(openlegdir + "/senatorsfilled.dat", "w") as f: cPickle.dump((committeeInfo, senatorInfo), f)
def test_eif(self): """each XML file in the JATS dir with a matching *complete* output in the EIF directory are equal""" def xml_fname_to_eif(xml_fname, xml_path): return join(self.source_eif_dir, os.path.splitext(xml_fname)[0] + ".json") ddiffs = {} for xml_file, xml_path in self.xml_path_list.items(): eif_file = xml_fname_to_eif(xml_file, xml_path) if not os.path.exists(eif_file): LOG.info('skipping %s, path `%s` not found', xml_file, eif_file) continue generated_eif = json.loads(feeds.scrape(xml_path, lambda x: x[0]['article'][0])) expected_eif = json.load(open(eif_file)) LOG.info("testing %s", xml_path) ddiff = DeepDiff(self.byteify(expected_eif), self.byteify(generated_eif)) if ddiff: ddiffs[eif_file] = ddiff if len(ddiffs): for attr, value in ddiffs.items(): print attr pprint(value) print "\n" self.assertTrue(False)
def main(): getPercentChanges = ro.r(''' getPercentChanges <- function(symbol) { allData = getSymbols.yahoo(symbol, auto.assign=FALSE) relevantColumn = names(allData)[length(names(allData))] values = allData[,relevantColumn] values = values['2014::'] percentChanges = dailyReturn(values) return(percentChanges) }''') fileName = 'allGrangerCauses.txt' listOfConditionalGrangerCauses = doStuff(fileName, 'SAVE') pprint(listOfConditionalGrangerCauses) labelResults(labeledFileName = 'allGrangerCausesLabeled.txt', resultsFileName = 'conditionalGrangerCauses.txt')
def sendKey(self): data = { 'title': 'cloud-workspace', 'key': self.pubKey } postReq = requests.post(self.endpoint, data=json.dumps(data), headers=self.headers); pprint(postReq.json())
def login(self, username): password = keyring.get_password("redditbot", username) if not password: password = raw_input("Please type your password: "******"redditbot", username, password) # <platform>:<app ID>:<version string> (by /u/<reddit username>) headers = {'User-Agent': 'Python: timsbot: v0.1 (by /u/timsbot)'} auth = requests.auth.HTTPBasicAuth(self.client_id, self.client_secret) data = {'grant_type': 'password', 'username': username, 'password': password} # Make an authentication post to get the access token response = requests.post(token_url, data=data, auth=auth, headers=headers) # If the response did not go through if response.status_code != 200: pprint(response.json) sys.exit() json_data = response.json() self.access_token = json_data['access_token'] return User(username)
def _validate_setups(self): if not self.user_cfg: return first_key_name = list(self.user_cfg.keys())[0].lower() if '.exe' in first_key_name: print('updating the dictionary ...') this_path = self.mod.path cmd = os.path.join(this_path, 'sessionrestore_get_virtual_screen_size.ahk') virtual_screen_size = a2ahk.call_cmd(cmd, cwd=this_path) print(' virtual_screen_size: %s' % virtual_screen_size) if virtual_screen_size in self.user_cfg: del self.user_cfg[virtual_screen_size] self.user_cfg = {virtual_screen_size: {'setups': deepcopy(self.user_cfg)}} self.set_user_value(self.user_cfg) print(' current element cfg:') pprint(self.user_cfg) change = False for virtual_screen_size in list(self.user_cfg.keys()): setups = self.user_cfg[virtual_screen_size] if 'setups' not in setups: del self.user_cfg[virtual_screen_size] self.user_cfg = {virtual_screen_size: {'setups': deepcopy(setups)}} change = True if change: self.set_user_value(self.user_cfg)
def getVideosByCate(self): videoIdSet = set(self.allVideos.keys()) userIdSet = self.allUsers for cate in self.categories: # 所有参数设置在这 page = 1 count = 100 period = 'month' # 结束参数设置 temp = [] while True: data = self.req.query_videos_by_catetory(count = count, page = page, category = cate, period = period ) if 'error' in data and data['error']['code'] != 1017: pprint(data) break videos = data['videos'] for video in videos: temp.append(video) self.sess.insert_videos(videos = temp, videoIdSet = videoIdSet, userIdSet = userIdSet) if len(videos) < 99: break page += 1
def test_run_arast(self): # figure out where the test data lives pe_lib_info = self.getPairedEndLibInfo() pprint(pe_lib_info) # return # Object Info Contents # 0 - obj_id objid # 1 - obj_name name # 2 - type_string type # 3 - timestamp save_date # 4 - int version # 5 - username saved_by # 6 - ws_id wsid # 7 - ws_name workspace # 8 - string chsum # 9 - int size # 10 - usermeta meta params = { 'workspace_name': pe_lib_info[7], 'read_library_names': [pe_lib_info[1]], 'output_contigset_name': 'output.contigset', 'min_contig_length': 350, 'recipe': 'kiki', # 'pipeline': 'ray', # 'assembler': '', 'assembler': None, 'pipeline' : None } result = self.getImpl().run_arast(self.getContext(),params) print('RESULT:') pprint(result)
def checkMap(self): self._tree = et.parse(map_path) maptree = self._tree.getroot() self._width = int(maptree.get("width")) self._height = int(maptree.get("height")) self.initCountyDatas() layers = maptree.findall("layer") for layer in layers: if layer.get("name") == "map": data = layer.find("data") text = data.text.replace('\n', '') self._map_data = text.split(',') for x in range(self._width): for y in range(self._height): index = self.getTileIndex(x, y) gid = self._map_data[index] countyId = self._countyDatas[index] stateId = self._countyDbDatas[countyId].get("stateId") distributeData = self._distributeDatas.get(stateId) if distributeData == None: distributeData = {} self._distributeDatas[stateId] = distributeData landData = distributeData.get(gid) if landData == None: landData = {"gid":-1, "count":0} distributeData[gid] = landData landData["gid"] = gid landData["count"] = landData["count"] + 1 pprint(self._distributeDatas) pass
def _GetWeather(self): """ This is the function that acutally goes out to the website and fetches the data """ self.timestamp = datetime.datetime.now() weatherURL = "http://api.worldweatheronline.com/free/v1/weather.ashx?key=%s&q=%s&num_of_days=3&format=json" % ( self.key, self.location, ) try: weather_page = urllib.request.urlopen(weatherURL) # # http://stackoverflow.com/questions/6862770/python-3-let-json-object-accept-bytes-or-let-urlopen-output-strings # str_response = weather_page.readall().decode("utf-8") self.weather_json = json.loads(str_response) except: print("Failed to get page") return None if self.DEBUG: pprint(self.weather_json) return
def load(filename): if not os.path.isfile(filename): print("File doesn't exists \"" + filename + "\""); return [0,"","","","",""] jsonData=open(filename) data = json.load(jsonData) print("====================================") print("Settings:") print("====================================") pprint(data) jsonData.close() print("====================================") options = Options() try: options.frequency = data["frequency"] options.monitorIP = data["server_ip"] options.port = data["server_port"] options.hostname = data["hostname"] options.sensorname = data["sensorname"] options.username = data["username"] options.password = data["secret"] options.config_file = filename except KeyError as err: print ("No key " + str(err) + "in config file") sys.exit(2) return options
def test(): # NOTE: if you are running this code on your computer, with a larger dataset, # call the process_map procedure with pretty=False. The pretty=True option adds # additional spaces to the output, making it significantly larger. data = process_map('example.osm', True) #pprint.pprint(data) correct_first_elem = { "id": "261114295", "visible": "true", "type": "node", "pos": [41.9730791, -87.6866303], "created": { "changeset": "11129782", "user": "******", "version": "7", "uid": "451048", "timestamp": "2012-03-28T18:31:23Z" } } pprint(data[-1]) assert data[0] == correct_first_elem assert data[-1]["address"] == { "street": "West Lexington St.", "housenumber": "1412" } assert data[-1]["node_refs"] == [ "2199822281", "2199822390", "2199822392", "2199822369", "2199822370", "2199822284", "2199822281"]
def test_run_velvet(self): # figure out where the test data lives pe_lib_info = self.getPairedEndLibInfo() pprint(pe_lib_info) # Object Info Contents # 0 - obj_id objid # 1 - obj_name name # 2 - type_string type # 3 - timestamp save_date # 4 - int version # 5 - username saved_by # 6 - ws_id wsid # 7 - ws_name workspace # 8 - string chsum # 9 - int size # 10 - usermeta meta params = { 'workspace_name': pe_lib_info[7], 'read_library_name': pe_lib_info[1], 'output_contigset_name': 'output.contigset', 'min_contig_length': 350, #'extra_params': '-k 23' } result = self.getImpl().run_velvet(self.getContext(),params) print('RESULT:') pprint(result)
def get_nes_ids(named_entities): print "Get the Freebase Ids for the following Named Entities:" pprint(named_entities) nes_ids = {} start_time = time() for kb_filename in os.listdir(kb_dir): if kb_filename.startswith(kb_filenames_prefix): with gzip.open(os.path.join(kb_dir, kb_filename)) as kb_file: start_time_file = time() print "reading", kb_filename for kb_line in kb_file: columns = kb_line.split('\t') fb_url = columns[0] relation = columns[1] # TODO: discuss! match case or not ne = columns[2].replace('"', '').replace('_', ' ').lower() if lang == "chi" and ne.starstwith("$"): ne = ne.replace('$', '\\u') if ne in named_entities: # see if this entity matches one of the named_entities print "found ne '%s' => id: '%s'" % (ne, fb_url) if ne not in nes_ids: nes_ids[ne] = [] nes_ids[ne].append((fb_url, relation, ne)) print "%s took %d seconds.\n" % (kb_filename, time() - start_time_file) #break print "TOTAL processing time: %d seconds" % (time() - start_time) return nes_ids
def HEAD(self, url, headers=None): """Get the object headers.""" # Url is: http://controller:port/account/container/object req = requests.head(url, allow_redirects=True) from pprint import pprint pprint(req.headers) print('cliente_head', url, headers) return req.status_code
def user_test(netid): search_base = "OU=Campus Accounts,DC=ad,DC=uiuc,DC=edu" attrs = None result = [] search_filter = "(CN="+netid+")" result = LDAPRunner().run_search(search_base, search_filter, attrs) from pprint import pprint pprint(result)
def ciudad_a_ID(ciud, prov): """Para buscar ID de ciudad de acuerdo al fixture""" with open('../fixtures/ciudades.json') as json_data: data = json.load(json_data) json_data.close() res = [item for item in data if 'search_names' in item['fields'].keys() and ciud.lower()+prov.lower() in item['fields']['search_names']] pprint(res)
def post_expense(self, uri): resp = self.api_call(uri, 'POST') if resp["errors"]: sys.stderr.write( "URI:") sys.stderr.write(uri) pprint(resp, stream=sys.stderr) else: sys.stdout.write(".") sys.stdout.flush()
def debug(func, *args, **kwargs): # evaluate and print local vars in case of exception try: return func(*args, **kwargs) except: import inspect v = inspect.trace()[-1][0].f_locals pprint(v) raise
def ScheduleScan(url1): url = "https://www.virustotal.com/vtapi/v2/url/scan" parameters = {"url": "http://www.virustotal.com", "apikey": api_key} data = urllib.urlencode(parameters) req = urllib2.Request(url, data) response = urllib2.urlopen(req) json_response = response.read() pprint(json_response)
def pandasarize_all(top_dir, min_logs, n_files, n_entries): ids_dirs, dirs_logs = get_ids_dirs_logs(top_dir, min_logs) pprint(ids_dirs) pprint({dir: len(logs) for dir,logs in dirs_logs.items()}) for hdf_path, dir in get_jobs(ids_dirs): process_dir((hdf_path, dir, n_files, n_entries))
def add_to_parser( myparser, key ='*' ): for fkey, fval in funclist.items(): if fkey == key or fkey == '*': print(fval) for myarg,mykwargs in fval["args"]: myparser.add_argument( *myarg,**mykwargs ) if OtcConfig.DEBUG: pprint(funclist)
def get_parser(host): orig_host = host host = host[-4:] if host not in parser_db: pprint("unrecognized host " + orig_host) return None return parser_db[host]
def prefix(model, str): node = model checked = '' for char in str: if char in node: checked += char node = node[char] else: break print "PREFIX = {}".format(checked) pprint(node)
def getNDaysAgo(N): date_N_days_ago = datetime.now() - timedelta(days=N) return str(date_N_days_ago.date()) fname = 'C:\dump\companylist.csv' with open(fname) as f: content = csv.reader(f, delimiter=',') twentDay = getNDaysAgo(1) today = str(datetime.now().date()) for line in content: if any(x not in line[0] for x in ['^', '$']): stock = Share(line[0]) print line[0] print stock.get_name() print stock.get_50day_moving_avg() # print stock.get_200day_moving_avg() print stock.get_avg_daily_volume() pprint(stock.get_historical(twentDay, today)) else: print line[0] + ' contains a special character******' # yahoo = Share('WWW') # print yahoo.get_name() # print yahoo.get_open(); # # print yahoo.get_50day_moving_avg(); # # print yahoo.get_avg_daily_volume()
def update_console_out(self, context): pprint(dir(bpy.ops.blender_pip.console_dia))
"INSERT INTO ways_tags(id, key, value,type) VALUES (?, ?, ?, ?);", to_db) # commit the changes conn.commit() # Finding number of Railway Stations. QUERY = ('''SELECT DISTINCT COUNT(id) FROM nodes_tags WHERE value = 'station' ''') cur.execute(QUERY) all_rows = cur.fetchall() import pandas as pd df = pd.DataFrame(all_rows) print("Number of stations are") pprint(df) # Finding number of tourist places QUERY = ('''SELECT DISTINCT COUNT(id) FROM nodes_tags WHERE key = 'tourism' AND value = 'attraction' ''') cur.execute(QUERY) all_rows = cur.fetchall() import pandas as pd df = pd.DataFrame(all_rows) print("Number of Tourist places are") pprint(df) # Finding number of religion places QUERY = ('''SELECT DISTINCT COUNT(id)
+ ", and your secret message is " + message + "\n=====================================") passwordsToSeeds[userPass] = trueSeed passwordsToSeeds[userPass + str(trueSeed - 1)] = trueSeed + 1 passwordsToSeeds[userPass + str(trueSeed - 2) + "1"] = trueSeed + 2 passwordsToSeeds[userPass.lower()] = trueSeed + 3 passwordsToSeeds[userPass.lower() + str(trueSeed + 1) + "3"] = trueSeed + 4 passwordsToSeeds[userPass.upper()] = trueSeed + 5 passwordsToSeeds[userPass.upper() + str(trueSeed + 2) + "5"] = trueSeed + 6 # ENCRYPTION: c = sk XOR sm cipher = int(passwordsToSeeds[userPass]) ^ trueSeed passwords = list(passwordsToSeeds.keys()) random.shuffle(passwords) # Shuffle the passwords pprint(passwords) # Display results try: query = input("Enter a password to crack: ") keySeed = passwordsToSeeds[query] # DECRYPTION: m = sk XOR c m = keySeed ^ cipher # ^ == XOR if m != trueSeed: # Honey checker print("Intruder! SOUNDING ALARM!") # If seeds don’t match, this is an intruder pprint(seedsToMessages[m]) except KeyError: print("Password not found. ") retry = input("Would you like to enter another inquiry (Y/N): ")
# Term Document Frequency if not corpus: corpus = [id2word.doc2bow(text) for text in texts] save_pickle('corpus', corpus, gensim_files_dir) # View print(corpus[:1]) force_create = False # TODO: True for creating new model / False otherwise # Check if model exists if os.path.exists('lda_model_trained.model') and force_create is False: pp.pprint('Loading Model') lda_model = gensim.models.ldamodel.LdaModel.load('lda_model_trained.model') elif force_create is True or not os.path.exists('lda_model_trained.model'): pprint('Building Model') # Build LDA model lda_model = gensim.models.ldamodel.LdaModel(corpus=corpus, id2word=id2word, num_topics=4, random_state=100, update_every=1, chunksize=100, passes=10, alpha='auto', per_word_topics=True) lda_model.save('lda_model_trained.model') # In[17]: # Print the Keyword in the 10 topics
def print_config(self): pprint(self.config)
"count": { '$sum': 1 } } }, # The second stage in this pipe is to sort the data { "$sort": { "_id": 1 } } # Close the array with the ] tag ]) # Print the result for group in stargroup: print(group) ASingleReview = db.reviews.find_one({}) print('A sample document:') pprint(ASingleReview) result = db.reviews.update_one({'_id': ASingleReview.get('_id')}, {'$inc': { 'likes': 1 }}) print('Number of documents modified : ' + str(result.modified_count)) UpdatedDocument = db.reviews.find_one({'_id': ASingleReview.get('_id')}) print('The updated document:') pprint(UpdatedDocument)
def process_single_fname(full_fname): """ Trying to extract bead types from atomic coordinates using closest neighbors types: T si with 4 neighbors: [ob x4] Tns si with 4 neighbors: [ob x3, obos] O al with 6 neighbors: [ob x4, oh x2] Ons al with 6 neighbors: [ob x2, oh x2, obos x2] Onso al with 6 neighbors: [ob x2, ohs x2, obos x2] Os mgo Na na ClayFF charges: -0.95 oh -1.05 ob -1.0808 ohs -1.1818 obos [57, 64, 111, 116] """ lmp_reader = DataReader() lmp_reader.read_data(full_fname) lx = lmp_reader.xhi - lmp_reader.xlo ly = lmp_reader.yhi - lmp_reader.ylo lz = lmp_reader.zhi - lmp_reader.zlo print(lx, ly, lz) atoms = [[] for _ in range(lmp_reader.atoms_number)] for atom in lmp_reader.atoms: atoms[atom['id'] - 1] = [ atom['id'], atom['charge'], atom['x'], atom['y'], atom['z'], atom['type'] ] chosen_atoms = [] for atom in atoms: if atom[5] not in [1, 2, 6, 9]: # ao, st, mgo, na continue chosen_atoms.append(atom) hysto = {'T': 0, 'Tns': 0, 'O': 0, 'Ons': 0, 'Onso': 0, 'Os': 0, 'Na': 0} for atom in chosen_atoms: neighbors = {} # 6 closest neighbors: { distance: atom } for atom_1 in atoms: if atom[0] == atom_1[0]: continue if (not ae(atom_1[1], -0.95) and # ignore neighboring not-oxygens not ae(atom_1[1], -1.05) and not ae(atom_1[1], -1.0808) and not ae(atom_1[1], -1.1818)): continue dx = abs(atom_1[2] - atom[2]) dy = abs(atom_1[3] - atom[3]) dz = abs(atom_1[4] - atom[4]) dx = min(dx, lx - dx) dy = min(dy, ly - dy) dz = min(dz, lz - dz) #if dx > 5 or dy > 5 or dz > 5: # continue d = (dx**2 + dy**2 + dz**2)**0.5 #if len(neighbors.keys()) < 6 or d < max(neighbors.keys()): neighbors[d] = atom_1 #print('neighbors') #for k in sorted(neighbors.keys()): # print(k, neighbors[k]) if ae(atom[1], 1): #print('Na', atom) hysto['Na'] += 1 elif ae(atom[1], 1.36): #print('Os', atom) hysto['Os'] += 1 elif ae(atom[1], 2.1): # si ds = sorted(neighbors.keys()) neighbors_charges = [ neighbors[ds[0]][1], neighbors[ds[1]][1], neighbors[ds[2]][1], neighbors[ds[3]][1] ] if aecount(neighbors_charges, -1.05) == 4: #print('T', atom) hysto['T'] += 1 elif (aecount(neighbors_charges, -1.05) == 3 and # ob aecount(neighbors_charges, -1.1818) == 1): # obos hysto['Tns'] += 1 #print('Tns', atom) else: print('unknown si') print('si', atom) for k in sorted(neighbors.keys())[:6]: print(k, neighbors[k]) sys.exit() elif ae(atom[1], 1.575): # al ds = sorted(neighbors.keys()) neighbors_charges = [ neighbors[ds[0]][1], neighbors[ds[1]][1], neighbors[ds[2]][1], neighbors[ds[3]][1], neighbors[ds[4]][1], neighbors[ds[5]][1] ] if (aecount(neighbors_charges, -1.05) == 4 and # ob aecount(neighbors_charges, -0.95) == 2): # oh hysto['O'] += 1 #print('O', atom) elif (aecount(neighbors_charges, -1.05) == 2 and # ob aecount(neighbors_charges, -0.95) == 2 and # oh aecount(neighbors_charges, -1.1818)): # obos hysto['Ons'] += 1 #print('Ons', atom) elif (aecount(neighbors_charges, -1.05) == 2 and # ob aecount(neighbors_charges, -1.0808) == 2 and # ohs aecount(neighbors_charges, -1.1818)): # obos hysto['Onso'] += 1 #print('Onso', atom) else: print('unknown al') print('al', atom) for k in sorted(neighbors.keys())[:6]: print(k, neighbors[k]) #sys.exit() else: print('Completely unknown atom', atom) for k in sorted(neighbors.keys())[:6]: print(k, neighbors[k]) sys.exit() print('***') pprint(hysto)
def initialize_arb(): welcome_message = "\n\n---------------------------------------------------------\n\n" welcome_message+= "Hello and Welcome to the Binance Arbitrage Crypto Trader Bot Python Script\nCreated 2018 by Joaquin Roibal (@BlockchainEng)" welcome_message+= "A quick 'run-through' will be performed to introduce you to the functionality of this bot\n" welcome_message+="To learn more visit medium.com/@BlockchainEng or watch introductory Youtube Videos" welcome_message+="\nCopyright 2018 by Joaquin Roibal\n" bot_start_time = str(datetime.now()) welcome_message+= "\nBot Start Time: {}\n\n\n".format(bot_start_time) client.synced('get_account') #Example of using Sync'd print(welcome_message) #info = client.synced.get_account() #pprint(info) balance = client.synced('get_asset_balance', asset='BTC') pprint(balance) data_log_to_file(balance) #output to file - create function data_log_to_file(welcome_message) time.sleep(5) try: status = Client.synced('get_system_status()') #print("\nExchange Status: ", status) #Account Withdrawal History Info withdraws = Client.synced('get_withdraw_history()') #print("\nClient Withdraw History: ", withdraws) #for symbol in list_of_symbols: #market_depth(symbol) #Collect all Symbols for Exchange #Find Arbitrage Opportunities coin_list = ['BTC', 'ETH', 'USDT', 'BNB'] list_of_symbols = ['ETHBTC', 'BNBETH', 'BNBBTC'] list_of_symbols2 = ['ETHUSDT', 'BNBETH', 'BNBUSDT'] list_of_symbols3 = ['BTCUSDT', 'BNBBTC', 'BNBUSDT'] list_of_arb_sym = [list_of_symbols, list_of_symbols2, list_of_symbols3] #for sym in list_of_symbols: #info = client.get_symbol_info(sym) #print(info) #prices = client.get_all_tickers() tickers = client.get_orderbook_tickers() #print(prices) #print(tickers) #portfolio = [10, 100, 10000, 500, str(datetime.now())] #Number of: [Bitcoin, Ethereum, USDT, Binance Coin] #Load Binance Portfolio binance_portfolio(coin_list) #Load Portfolio File portfolio=[] with open('Portfolio.txt') as f1: read_data = f1.readlines() for line in read_data: load_portfolio = line #Load Previous Portfolio load_portfolio = list(load_portfolio[1:-1].split(',')) #print(load_portfolio) #time.sleep(5) #for i in range(0,3): #portfolio[i] = float(portfolio[i]) #Set Type for first 4 values of Portfolio i=0 for val in load_portfolio: #print(val.strip()) if i == 4: portfolio.append(str(datetime.now())) break portfolio.append(float(val)) i+=1 portf_msg = "Starting Portfolio (Paper): " + str(portfolio) #Load Balances for each coin in exchange #Split BTC into 4 equal amounts, buy all 3 other coins with that amount print(portf_msg) portf_file_save(portfolio) data_log_to_file(portf_msg) while 1: #Run Arbitrage Profit Functionality - To Determine Highest Profit Percentage - Cont Loop calc_profit_list =[] for arb_market in list_of_arb_sym: calc_profit_list.append(arbitrage_bin(arb_market, tickers, portfolio, 1, 1)) for profit1 in calc_profit_list: data_log_to_file(str(profit1)) print(calc_profit_list) exp_profit = 0 #Expected Profit, Set to 0 initially m = n = 0 #Market Position Market for exch_market in calc_profit_list: if exch_market[4]>exp_profit: exp_profit = exch_market[4] m = n n+=1 profit_message = "\nMost Profitable Market: {} \nExpected Profit: {}%".format(list_of_arb_sym[m], exp_profit) print(profit_message) data_log_to_file(profit_message) time.sleep(5) #Run Arbitrage Function on Highest Profit Percentage Coin for 10 minutes arb_list_data = [] arb_start_time = str(datetime.now()) for i in range(0,10): #Collect Arbitrage Data Into List format for 5 cycles, 30 second cycles (replaces functionality) try: arb_list_data.append(arbitrage_bin(list_of_arb_sym[m], tickers, portfolio, 1, 1, 'Yes', 'Yes')) #'Yes' to place orders binance_portfolio(coin_list) except: raise pass #print(arb_list_data) time.sleep(15) arb_end_time = str(datetime.now()) #Visualize Collected Arb List Data with MatPlotLib viz_arb_data(arb_list_data, list_of_arb_sym[m], arb_start_time, arb_end_time) except: print("\nFAILURE INITIALIZE\n") raise
def printNetwork(self): debug("NETWORK@", self.time, ":") for k, v in self.d(): debug() debug("NODE:", k) pprint(v)
def PrintItem(item): pprint(item) for (field, val) in item.items(): print "%s: %s" % (field, val)
def k8s_circe_scheduler(dag_info , temp_info): """ This script deploys CIRCE in the system. """ jupiter_config.set_globals() sys.path.append(jupiter_config.CIRCE_PATH) """ This loads the kubernetes instance configuration. In our case this is stored in admin.conf. You should set the config file path in the jupiter_config.py file. """ config.load_kube_config(config_file = jupiter_config.KUBECONFIG_PATH) """ We have defined the namespace for deployments in jupiter_config """ namespace = jupiter_config.DEPLOYMENT_NAMESPACE """ Get proper handles or pointers to the k8-python tool to call different functions. """ api = client.CoreV1Api() k8s_beta = client.ExtensionsV1beta1Api() #get DAG and home machine info first_task = dag_info[0] dag = dag_info[1] hosts = temp_info[2] print("hosts:") pprint(hosts) print(len(dag_info)) pprint(dag_info[0]) pprint(dag_info[1]) pprint(dag_info[2]) service_ips = {}; #list of all service IPs """ First create the home node's service. """ home_body = write_circe_service_specs(name = 'home') ser_resp = api.create_namespaced_service(namespace, home_body) print("Home service created. status = '%s'" % str(ser_resp.status)) try: resp = api.read_namespaced_service('home', namespace) except ApiException as e: print("Exception Occurred") service_ips['home'] = resp.spec.cluster_ip """ Iterate through the list of tasks and run the related k8 deployment, replicaset, pod, and service on the respective node. You can always check if a service/pod/deployment is running after running this script via kubectl command. E.g., kubectl get svc -n "namespace name" kubectl get deployement -n "namespace name" kubectl get replicaset -n "namespace name" kubectl get pod -n "namespace name" """ for key, value in dag.items(): task = key nexthosts = '' """ Generate the yaml description of the required service for each task """ body = write_circe_service_specs(name = task) # Call the Kubernetes API to create the service ser_resp = api.create_namespaced_service(namespace, body) print("Service created. status = '%s'" % str(ser_resp.status)) try: resp = api.read_namespaced_service(task, namespace) except ApiException as e: print("Exception Occurred") # print resp.spec.cluster_ip service_ips[task] = resp.spec.cluster_ip all_node_ips = ':'.join(service_ips.values()) all_node = ':'.join(service_ips.keys()) print(all_node) """ All services have started for CIRCE and deployment is yet to begin In the meantime, start dft_coded_detector services and their deployments """ # branch_number = 3 # how many aggregation points do you have? # dft_coded_service_ips = [] # for idx in range(branch_number): # path = "nodes_dft_coded" + str(idx)+ ".txt" # dft_coded_service_ips.append(launch_dft_coding_services(path=path)) # launch_dft_coding_deployments(dft_coded_service_ips[idx], path=path, masterIP=service_ips['dftdetector'+str(idx)]) # all_node_ips = all_node_ips + ":" + dft_coded_service_ips[idx] # all_node = all_node + (":dftslave%d0:dftslave%d1:dftslave%d2"%(idx,idx,idx)) """ Let's start the TeraSort coded detectors now!!! """ # tera_master_ips = [] # for idx in range(branch_number): # path = "nodes_tera_coded" + str(idx)+ ".txt" # tera_coded_service_ips, master_ip = launch_tera_coding_services(path=path) # launch_tera_coding_deployments(tera_coded_service_ips, path=path) # tera_master_ips.append(master_ip) """ Start circe """ for key, value in dag.items(): task = key nexthosts = '' next_svc = '' """ We inject the host info for the child task via an environment variable valled CHILD_NODES to each pod/deployment. We perform it by concatenating the child-hosts via delimeter ':' For example if the child nodes are k8node1 and k8node2, we will set CHILD_NODES=k8node1:k8node2 Note that the k8node1 and k8node2 in the example are the unique node ids of the kubernets cluster nodes. """ inputnum = str(value[0]) flag = str(value[1]) for i in range(2,len(value)): if i != 2: nexthosts = nexthosts + ':' nexthosts = nexthosts + str(hosts.get(value[i])[0]) for i in range(2, len(value)): if i != 2: next_svc = next_svc + ':' next_svc = next_svc + str(service_ips.get(value[i])) print("NEXT HOSTS") print(nexthosts) print("NEXT SVC") print(next_svc) #Generate the yaml description of the required deployment for each task dep = write_circe_deployment_specs(flag = str(flag), inputnum = str(inputnum), name = task, node_name = hosts.get(task)[1], image = jupiter_config.WORKER_IMAGE, child = nexthosts, child_ips = next_svc, host = hosts.get(task)[1], dir = '{}', home_node_ip = service_ips.get("home"), own_ip = service_ips[key], all_node = all_node, all_node_ips = all_node_ips) pprint(dep) # # Call the Kubernetes API to create the deployment resp = k8s_beta.create_namespaced_deployment(body = dep, namespace = namespace) print("Deployment created. status = '%s'" % str(resp.status)) while 1: if check_status_circe(dag): break time.sleep(30) home_dep = write_circe_home_specs(image = jupiter_config.HOME_IMAGE, host = jupiter_config.HOME_NODE, child = jupiter_config.HOME_CHILD, child_ips = service_ips.get(jupiter_config.HOME_CHILD), dir = '{}') print(home_dep) resp = k8s_beta.create_namespaced_deployment(body = home_dep, namespace = namespace) print("Home deployment created. status = '%s'" % str(resp.status)) pprint(service_ips)
def tspassc(file=None, code="", output="tmp.tspass", use_shell=False, debug: bool = False, synth: bool = False, reparse: bool = False, timeout=20): """ Parse tspass :param file: The tspass input file :param code: The tspass code (if a file is given the code will be ignored) :param output: The output parsing file :param use_shell: Run an interactive shell after parsing :param debug: boolean enable/disable debug messages :param synth: Synthesize monitors specifications from a global FOTL formula :param timeout: tspass prover timeout :return: """ # print("-------- tspassc " + " starting at : " + str(datetime.datetime.now()) + " File : " + str(file) + " --------\n") p = sys.platform if p.startswith("linux"): os_name = "linux" elif p.startswith("darwin"): os_name = "mac" elif p.startswith("win"): # os_name = "win" print(Color("{autored}Windows is not supported yet {/red}")) sys.exit(-1) else: print(Color("{autored}Unknown platform " + p + "{/red}")) sys.exit(-1) res = "" if file is not None: # Handle code from file if reparse: input_file = FileStream(file) lexer = TSPASSLexer(input_file) stream = CommonTokenStream(lexer) parser = TSPASSParser(stream) parser.buildParseTrees = True if synth: # Adding synthesizer parser.addParseListener(FOTLCompilerListener()) tr = parser.program() else: tr = parser.formula() bt = Trees2.tspassTree(tr, recog=parser) # print(bt) else: with open(file, mode='r') as f: bt = f.read() generated_tspass = file.replace(".tspass", "_gen.tspass") else: # Handle code from string generated_tspass = output.replace(".tspass", "_gen.tspass") bt = code fotl_file = generated_tspass.replace(".tspass", ".fotl") result_file = generated_tspass.replace(".tspass", ".result") # TSPASS parsing with open(generated_tspass, mode='w') as f: f.write(bt) if debug: pprint(bt) # FOTL Translate p = Popen(['tools/' + os_name + '/fotl-translate', generated_tspass], stdout=PIPE, stderr=PIPE, stdin=PIPE) fotl = p.stdout.read().decode("utf-8") if fotl == "": fotl = p.stderr.read().decode("utf-8") res += fotl + "\n" if debug: print(fotl) print(p.stderr.read().decode("utf-8")) with open(fotl_file, mode='w') as f: f.write(fotl) # TSPASS p = Popen(['tools/' + os_name + '/tspass', fotl_file], stdout=PIPE, stderr=PIPE, stdin=PIPE) # Handling timeout # start = datetime.datetime.now() # while p.poll() is None: # time.sleep(0.1) # now = datetime.datetime.now() # if (now - start).seconds > timeout: # os.kill(p.pid, signal.SIGKILL) # os.waitpid(-1, os.WNOHANG) # print(Color("{autored}=== TSPASS prover Time out after " + str(timeout) + "sc ! === {/red}")) tspass = p.stdout.read().decode("utf-8") if tspass == "": tspass = p.stderr.read().decode("utf-8") res += tspass + "\n" if debug: print(tspass) print(p.stderr.read().decode("utf-8")) with open(result_file, mode='w') as f: # Writing the result f.write(tspass) lookup = "SPASS beiseite:" sat = "" for line in tspass.split("\n"): if lookup in line: res += "[TSPASS] " + line.replace("SPASS beiseite:", "") sat = line.replace("SPASS beiseite:", "").replace(".", "").replace(" ", "") break return {"res": sat, "print": res}
def mongo_pprint(): print('mongo_pprint') results_dict = mongo_get_dict('time') pprint(results_dict) print('\nnumber of entries: {0}'.format(results_dict)) return 0
def log(object): if isinstance(object, str): print(object) else: pprint(object)
def k8s_heft_scheduler(profiler_ips, ex_profiler_ips, node_names,app_name): """ This script deploys HEFT in the system. """ jupiter_config.set_globals() """ This loads the node list """ nexthost_ips = '' nexthost_names = '' path2 = jupiter_config.HERE + 'nodes.txt' nodes, homes = utilities.k8s_get_nodes_worker(path2) """ This loads the kubernetes instance configuration. In our case this is stored in admin.conf. You should set the config file path in the jupiter_config.py file. """ config.load_kube_config(config_file = jupiter_config.KUBECONFIG_PATH) """ We have defined the namespace for deployments in jupiter_config """ namespace = jupiter_config.MAPPER_NAMESPACE """ Get proper handles or pointers to the k8-python tool to call different functions. """ api = client.CoreV1Api() k8s_beta = client.ExtensionsV1beta1Api() service_ips = {}; """ Loop through the list of nodes and run all WAVE related k8 deployment, replicaset, pods, and service. You can always check if a service/pod/deployment is running after running this script via kubectl command. E.g., kubectl get svc -n "namespace name" kubectl get deployement -n "namespace name" kubectl get replicaset -n "namespace name" kubectl get pod -n "namespace name" """ home_name = app_name+'-home' home_body = write_heft_service_specs(name = home_name, label = home_name) ser_resp = api.create_namespaced_service(namespace, home_body) print("Home service created. status = '%s'" % str(ser_resp.status)) try: resp = api.read_namespaced_service(home_name, namespace) except ApiException as e: print("Exception Occurred") service_ips[home_name] = resp.spec.cluster_ip home_ip = service_ips[home_name] node_profiler_ips = profiler_ips.copy() home_profiler_ips = {} for key in homes: print(key) home_profiler_ips[key] = profiler_ips[key] del node_profiler_ips[key] profiler_ips_str = ' '.join('{0}:{1}'.format(key, val) for key, val in sorted(node_profiler_ips.items())) home_profiler_str = ' '.join('{0}:{1}'.format(key, val) for key, val in sorted(home_profiler_ips.items())) home_dep = write_heft_specs(name = home_name, label = home_name, image = jupiter_config.HEFT_IMAGE, host = jupiter_config.HOME_NODE, node_names = node_names, home_ip = home_ip, profiler_ips = profiler_ips_str, execution_home_ip = ex_profiler_ips['home'], home_profiler_ip = home_profiler_str) resp = k8s_beta.create_namespaced_deployment(body = home_dep, namespace = namespace) print("Home deployment created. status = '%s'" % str(resp.status)) pprint(service_ips)
print(topic) print('Perplexity: ', ldamodel.log_perplexity(corpus)) # 내부 평가 지표, 숫자가 낮을수록 좋은 성능 # 코히런스모델? 코히런스가 높으면 주제 파악이 쉽다. from gensim.models.coherencemodel import CoherenceModel coherence_model_lda = CoherenceModel(model=ldamodel, texts=tokenized_doc, dictionary=dictionary, coherence='c_v') coherence_lda = coherence_model_lda.get_coherence() print('Coherence Score : ', coherence_lda) # 0.5803686335572611 좋지도 나쁘지도 x """https://coredottoday.github.io/2018/09/17/%EB%AA%A8%EB%8D%B8-%ED%8C%8C%EB%9D%BC%EB%AF%B8%ED%84%B0-%ED%8A%9C%EB%8B%9D/""" print(ldamodel.show_topics(formatted=False)) pprint(ldamodel.show_topics(formatted=False)) import pprint pp = pprint.PrettyPrinter(indent=2) pp.pprint(ldamodel.show_topics(formatted=False)) """# 실제 뉴스 데이터로 뉴스 기사 주제를 분류하는 텍스트 분류기 만들기 ## download """ !pip install beautifulsoup4 !pip install newspaper3k !pip install konlpy
# ignore blink, underline and anything we don't understand continue n += len(chunk) if chunk: res.append((u"0x%x"%attr, chunk)) return res #trtable={0:"black",1:"red",2:"green",3:"yellow",4:"blue",5:"magenta",6:"cyan",7:"white"} if __name__==u"__main__x": import pprint pprint=pprint.pprint s=u"\033[0;31mred\033[0;32mgreen\033[0;33myellow\033[0;34mblue\033[0;35mmagenta\033[0;36mcyan\033[0;37mwhite\033[0m" pprint (write_color(s)) pprint (write_color_old(s)) s=u"\033[1;31mred\033[1;32mgreen\033[1;33myellow\033[1;34mblue\033[1;35mmagenta\033[1;36mcyan\033[1;37mwhite\033[0m" pprint (write_color(s)) pprint (write_color_old(s)) s=u"\033[0;7;31mred\033[0;7;32mgreen\033[0;7;33myellow\033[0;7;34mblue\033[0;7;35mmagenta\033[0;7;36mcyan\033[0;7;37mwhite\033[0m" pprint (write_color(s)) pprint (write_color_old(s)) s=u"\033[1;7;31mred\033[1;7;32mgreen\033[1;7;33myellow\033[1;7;34mblue\033[1;7;35mmagenta\033[1;7;36mcyan\033[1;7;37mwhite\033[0m" pprint (write_color(s)) pprint (write_color_old(s)) if __name__==u"__main__": import console
# Create the Q minor matrix Q_min = [ [float(i==j) - 2.0 * v[i] * v[j] for i in xrange(n-k)] for j in xrange(n-k) ] # "Pad out" the Q minor matrix with elements from the identity Q_t = [[ Q_i(Q_min,i,j,k) for i in xrange(n)] for j in xrange(n)] # If this is the first run through, right multiply by A, # else right multiply by Q if k == 0: Q = Q_t R = mult_matrix(Q_t,A) else: Q = mult_matrix(Q_t,Q) R = mult_matrix(Q_t,R) # Since Q is defined as the product of transposes of Q_t, # we need to take the transpose upon returning it return trans_matrix(Q), R A = [[12, -51, 4], [6, 167, -68], [-4, 24, -41]] Q, R = householder(A) print "A:" pprint(A) print "Q:" pprint(Q) print "R:" pprint(R)
chosenMemory = random.choice(memories) print(f" : Checking memory '{chosenMemory['title']}'") if chosenMemory['title'] in stateDb: print( f" : WARNING: Memory {chosenMemory['title']} already posted; choosing new one..." ) continue else: break print(" : Memory Chosen") print("==================================================================") pprint(chosenMemory) print("==================================================================") # Download the memory image response = requests.get(chosenMemory['photo_url'], stream=True) with open('img.jpg', 'wb') as out_file: shutil.copyfileobj(response.raw, out_file) # Assemble the tweet text tweet = f"{chosenMemory['caption']} " print(" : Preview of tweet to be posted") print("==================================================================")
'PartitionKey': 'my_friends1', 'RowKey': str(k), 'first_name': user[0]['first_name'], 'last_name': user[0]['last_name'], 'user_id': user[0]['uid'] } batch.insert_entity(friends_info) for f in friends: user = api.users.get(user_ids=f) k = k + 1 friends_info = { 'PartitionKey': 'my_friends1', 'RowKey': str(k), 'first_name': user[0]['first_name'], 'last_name': user[0]['last_name'], 'user_id': user[0]['uid'] } batch.insert_entity(friends_info) if k % 10 == 0: # try: table_service.commit_batch('MyVkApp', batch) batch = TableBatch() print('Коммит прошёл') # except: # print('произошла ошибка.') print(k) pprint(user) time.sleep(1) table_service.commit_batch('MyVkApp', batch)
pprint({ "a": {"a": "b"}, "b": [somelist, somelist], "c": [ (1, ), (1,2,3), ], "ordereddict": OrderedDict([ (1, 1), (10, 10), (2, 2), (11, 11) ]), "counter": [ Counter(), Counter("asdfasdfasdf"), ], "dd": [ defaultdict(int, {}), defaultdict(int, {"foo": 42}), ], "frozenset": frozenset("abc"), "np": [ "hello", #np.array([[1,2],[3,4]]), "world", ], u"u": ["a", u"\u1234", "b"], "recursive": recursive, "z": { "very very very long key stuff 1234": { "much value": "very nest! " * 10, u"unicode": u"4U!'\"", }, "aldksfj alskfj askfjas fkjasdlkf jasdlkf ajslfjas": ["asdf"] * 10, }, })
def describe(self): """Scrub Type: Beta, Corr., n""" pprint("{}-> Beta: {:.2f}, Corr.: {:.2f}, n = {:.0f}".format(self.scrub_type, self.beta1, self.corr, self.degrees_of_freedom))
benchmarks = ['benchmarks/wordsim353.csv','benchmarks/Mtruk.csv'] for bench in benchmarks: test_words = [] with open(bench, 'rb') as csvfile: reader = csv.reader(csvfile, delimiter=',', quotechar='"') for row in reader: test_words.append([row[0],row[1],float(row[2])]) list_score1 = [] list_score2 = [] for test in test_words: word1 = test[0].lower() word2 = test[1].lower() if word1 in model and word2 in model: list_score1.append(test[2]) #score2 = 1-distance(vec1,vec2) score2 = model.similarity(word1, word2) list_score2.append(score2) print "Word1:"+word1+" Word2:"+word2+" Human Score:"+str(test[2])+" Hash2Vec:"+str(score2) rho = sci.stats.spearmanr(list_score1, list_score2) print bench+" Rho:"+str(rho[0]) words_to_find = ['computer','king','queen','physics','north','italy','wounded','car','church','wednesday','two','man','woman'] for w in words_to_find: print w pprint(model.most_similar(w,topn=10))
def update_instPack(self, context): prefs = context.user_preferences.addons[__name__].preferences pprint(prefs['instPack'])
"""Retrieves the VOEvent object associated with a given event and returns it as either a Python dictionary or an XML string.""" # Build URL params = { "cmd": "export-voevent", "cosec": 1, "ivorn": self['kb_archivid'] } url = base_url + urllib.parse.urlencode(params) # Query and read response response = urllib.request.urlopen(url).read() # Return a string or dict if as_dict: return xml_to_dict(response) else: return response if __name__ == '__main__': import pprint from sunpy.net.hek import attrs as a c = HEKClient() b = c.search( a.Time((2010, 1, 1), (2010, 1, 2)) | a.Time((2010, 1, 3), (2010, 1, 4)), a.AR, a.FL) pprint(b[0].vso_all)
if __name__ == "__main__": tryTable = """head1 important2! !OMP_NUM_THREADS! abcd 1 1.1 1.2 1.3 'a' 'b' 'c' 'd' ### comment # empty line 1 = = g """ file = '/tmp/try-tbl.txt' f = open(file, 'w') f.write(tryTable) f.close() from pprint import * pprint(TableParamReader(file).paramDict()) def runningInBatch(): 'Tell whether we are running inside the batch or separately.' import os return 'YADE_BATCH' in os.environ def waitIfBatch(): 'Block the simulation if running inside a batch. Typically used at the end of script so that it does not finish prematurely in batch mode (the execution would be ended in such a case).' if runningInBatch(): O.wait() def readParamsFromTable(tableFileLine=None, noTableOk=True,
def log(object): if isinstance(object, (str, unicode)): print object else: pprint(object)
def gentable(pface): pnonface = 1 - pface hard = {j: [0 for i in range(6)] for j in range(1, 23)} soft = {j: [0 for i in range(6)] for j in range(12, 22)} for k, v in soft.items(): if k >= 17 and k <= 21: v[k - 17] = 1 for k, v in hard.items(): if k >= 17 and k <= 22: v[k - 17] = 1 i = 16 while i >= 11: for k in range(1, 11): if k == 10: a = i + k if a >= 22: hard[i] = add(hard[i], [pface * x for x in hard[22]]) else: hard[i] = add(hard[i], [pface * x for x in hard[a]]) else: a = i + k if a >= 22: hard[i] = add(hard[i], [(pnonface / 9) * x for x in hard[22]]) else: hard[i] = add(hard[i], [(pnonface / 9) * x for x in hard[a]]) i -= 1 j = 16 while j >= 12: for k in range(1, 11): if k == 10: a = j + k if a > 21: soft[j] = add(soft[j], [pface * x for x in hard[a - 10]]) elif a <= 21: soft[j] = add(soft[j], [pface * x for x in soft[a]]) else: a = j + k if a > 21: soft[j] = add(soft[j], [(pnonface / 9) * x for x in hard[a - 10]]) elif a <= 21: soft[j] = add(soft[j], [(pnonface / 9) * x for x in soft[a]]) j -= 1 t = 10 while t >= 1: for k in range(1, 11): if k == 10: a = t + k if a > 21: hard[t] = add(hard[t], [pface * x for x in hard[22]]) else: hard[t] = add(hard[t], [pface * x for x in hard[a]]) elif k == 1: hard[t] = add(hard[t], [(pnonface / 9) * x for x in soft[t + 11]]) elif k >= 2 and k <= 9: a = t + k if a > 21: hard[t] = add(hard[t], [(pnonface / 9) * x for x in hard[22]]) else: hard[t] = add(hard[t], [(pnonface / 9) * x for x in hard[a]]) t -= 1 fout1 = open("soft.txt", "w") fout2 = open("hard.txt", "w") fout3 = open("ace.txt", "w") l1 = sorted(hard.keys()) for k in l1: print(" ".join(map(str, hard[k])), file=fout2) print(k, sum(hard[k])) print("soft") l2 = sorted(soft.keys()) for k in l2: print(" ".join(map(str, soft[k])), file=fout1) print(k, sum(soft[k])) a = [0 for i in range(6)] pprint(soft) for i in range(1, 11): if i == 10: a = add(a, [pface * x for x in soft[11 + i]]) else: a = add(a, [(pnonface / 9) * x for x in soft[11 + i]]) print(" ".join(map(str, a)), file=fout3) fout1.close() fout2.close() fout3.close() pprint(hard) pprint(soft) print(sum(a))
def run_test(self): min_relay_tx_fee = self.nodes[0].getnetworkinfo()['relayfee'] # This test is not meant to test fee estimation and we'd like # to be sure all txs are sent at a consistent desired feerate for node in self.nodes: node.settxfee(min_relay_tx_fee) # if the fee's positive delta is higher than this value tests will fail, # neg. delta always fail the tests. # The size of the signature of every input may be at most 2 bytes larger # than a minimum sized signature. # = 2 bytes * minRelayTxFeePerByte feeTolerance = 2 * min_relay_tx_fee / 1000 self.nodes[2].generate(1) self.sync_all() self.nodes[0].generate(121) self.sync_all() # ensure that setting changePosition in fundraw with an exact match is handled properly rawmatch = self.nodes[2].createrawtransaction( [], {self.nodes[2].getnewaddress(): 5000}) rawmatch = self.nodes[2].fundrawtransaction( rawmatch, { "changePosition": 1, "subtractFeeFromOutputs": [0] }) assert_equal(rawmatch["changepos"], -1) watchonly_address = self.nodes[0].getnewaddress() watchonly_pubkey = self.nodes[0].validateaddress( watchonly_address)["pubkey"] watchonly_amount = Decimal(20000) self.nodes[3].importpubkey(watchonly_pubkey, "", True) watchonly_txid = self.nodes[0].sendtoaddress(watchonly_address, watchonly_amount) self.nodes[0].sendtoaddress(self.nodes[3].getnewaddress(), watchonly_amount / 10) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.5) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1.0) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 5.0) self.nodes[0].generate(1) self.sync_all() ############### # simple test # ############### inputs = [] outputs = {self.nodes[0].getnewaddress(): 1.0} rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) assert (len(dec_tx['vin']) > 0) #test that we have enough inputs ############################## # simple test with two coins # ############################## inputs = [] outputs = {self.nodes[0].getnewaddress(): 2.2} rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) assert (len(dec_tx['vin']) > 0) #test if we have enough inputs ############################## # simple test with two coins # ############################## inputs = [] outputs = {self.nodes[0].getnewaddress(): 2.6} rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) assert (len(dec_tx['vin']) > 0) assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '') ################################ # simple test with two outputs # ################################ inputs = [] outputs = { self.nodes[0].getnewaddress(): 2.6, self.nodes[1].getnewaddress(): 2.5 } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) totalOut = 0 for out in dec_tx['vout']: totalOut += out['value'] assert (len(dec_tx['vin']) > 0) assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '') ######################################################################### # test a fundrawtransaction with a VIN greater than the required amount # ######################################################################### utx = get_unspent(self.nodes[2].listunspent(), 5) inputs = [{'txid': utx['txid'], 'vout': utx['vout']}] outputs = {self.nodes[0].getnewaddress(): 1.0} rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) totalOut = 0 for out in dec_tx['vout']: totalOut += out['value'] assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee ##################################################################### # test a fundrawtransaction with which will not get a change output # ##################################################################### utx = get_unspent(self.nodes[2].listunspent(), 5) inputs = [{'txid': utx['txid'], 'vout': utx['vout']}] outputs = { self.nodes[0].getnewaddress(): Decimal(5.0) - fee - feeTolerance } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) totalOut = 0 for out in dec_tx['vout']: totalOut += out['value'] assert_equal(rawtxfund['changepos'], -1) assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee #################################################### # test a fundrawtransaction with an invalid option # #################################################### utx = get_unspent(self.nodes[2].listunspent(), 5) inputs = [{'txid': utx['txid'], 'vout': utx['vout']}] outputs = {self.nodes[0].getnewaddress(): Decimal(4.0)} rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) assert_raises_rpc_error(-3, "Unexpected key foo", self.nodes[2].fundrawtransaction, rawtx, {'foo': 'bar'}) ############################################################ # test a fundrawtransaction with an invalid change address # ############################################################ utx = get_unspent(self.nodes[2].listunspent(), 5) inputs = [{'txid': utx['txid'], 'vout': utx['vout']}] outputs = {self.nodes[0].getnewaddress(): Decimal(4.0)} rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) assert_raises_rpc_error( -5, "changeAddress must be a valid colombo address", self.nodes[2].fundrawtransaction, rawtx, {'changeAddress': 'foobar'}) ############################################################ # test a fundrawtransaction with a provided change address # ############################################################ utx = get_unspent(self.nodes[2].listunspent(), 5) inputs = [{'txid': utx['txid'], 'vout': utx['vout']}] outputs = {self.nodes[0].getnewaddress(): Decimal(4.0)} rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) change = self.nodes[2].getnewaddress() assert_raises_rpc_error(-8, "changePosition out of bounds", self.nodes[2].fundrawtransaction, rawtx, { 'changeAddress': change, 'changePosition': 2 }) rawtxfund = self.nodes[2].fundrawtransaction(rawtx, { 'changeAddress': change, 'changePosition': 0 }) dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) out = dec_tx['vout'][0] assert_equal(change, out['scriptPubKey']['addresses'][0]) ######################################################################### # test a fundrawtransaction with a VIN smaller than the required amount # ######################################################################### utx = get_unspent(self.nodes[2].listunspent(), 1) inputs = [{'txid': utx['txid'], 'vout': utx['vout']}] outputs = {self.nodes[0].getnewaddress(): 1.0} rawtx = self.nodes[2].createrawtransaction(inputs, outputs) # 4-byte version + 1-byte vin count + 36-byte prevout then script_len rawtx = rawtx[:82] + "0100" + rawtx[84:] dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex']) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) totalOut = 0 matchingOuts = 0 for i, out in enumerate(dec_tx['vout']): totalOut += out['value'] if out['scriptPubKey']['addresses'][0] in outputs: matchingOuts += 1 else: assert_equal(i, rawtxfund['changepos']) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex']) assert_equal(matchingOuts, 1) assert_equal(len(dec_tx['vout']), 2) ########################################### # test a fundrawtransaction with two VINs # ########################################### utx = get_unspent(self.nodes[2].listunspent(), 1) utx2 = get_unspent(self.nodes[2].listunspent(), 5) inputs = [{ 'txid': utx['txid'], 'vout': utx['vout'] }, { 'txid': utx2['txid'], 'vout': utx2['vout'] }] outputs = {self.nodes[0].getnewaddress(): 6.0} rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) totalOut = 0 matchingOuts = 0 for out in dec_tx['vout']: totalOut += out['value'] if out['scriptPubKey']['addresses'][0] in outputs: matchingOuts += 1 assert_equal(matchingOuts, 1) assert_equal(len(dec_tx['vout']), 2) matchingIns = 0 for vinOut in dec_tx['vin']: for vinIn in inputs: if vinIn['txid'] == vinOut['txid']: matchingIns += 1 assert_equal( matchingIns, 2) #we now must see two vins identical to vins given as params ######################################################### # test a fundrawtransaction with two VINs and two vOUTs # ######################################################### utx = get_unspent(self.nodes[2].listunspent(), 1) utx2 = get_unspent(self.nodes[2].listunspent(), 5) inputs = [{ 'txid': utx['txid'], 'vout': utx['vout'] }, { 'txid': utx2['txid'], 'vout': utx2['vout'] }] outputs = { self.nodes[0].getnewaddress(): 6.0, self.nodes[0].getnewaddress(): 1.0 } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(utx['txid'], dec_tx['vin'][0]['txid']) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) fee = rawtxfund['fee'] dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) totalOut = 0 matchingOuts = 0 for out in dec_tx['vout']: totalOut += out['value'] if out['scriptPubKey']['addresses'][0] in outputs: matchingOuts += 1 assert_equal(matchingOuts, 2) assert_equal(len(dec_tx['vout']), 3) ############################################################ #compare fee of a standard pubkeyhash transaction inputs = [] outputs = {self.nodes[1].getnewaddress(): 1.1} rawtx = self.nodes[0].createrawtransaction(inputs, outputs) fundedTx = self.nodes[0].fundrawtransaction(rawtx) #create same transaction over sendtoaddress txId = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1.1) signedFee = self.nodes[0].getrawmempool(True)[txId]['fee'] #compare fee feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee) assert (feeDelta >= 0 and feeDelta <= feeTolerance) ############################################################ ############################################################ #compare fee of a standard pubkeyhash transaction with multiple outputs inputs = [] outputs = { self.nodes[1].getnewaddress(): 1.1, self.nodes[1].getnewaddress(): 1.2, self.nodes[1].getnewaddress(): 0.1, self.nodes[1].getnewaddress(): 1.3, self.nodes[1].getnewaddress(): 0.2, self.nodes[1].getnewaddress(): 0.3 } rawtx = self.nodes[0].createrawtransaction(inputs, outputs) fundedTx = self.nodes[0].fundrawtransaction(rawtx) #create same transaction over sendtoaddress txId = self.nodes[0].sendmany("", outputs) signedFee = self.nodes[0].getrawmempool(True)[txId]['fee'] #compare fee feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee) assert (feeDelta >= 0 and feeDelta <= feeTolerance) ############################################################ ############################################################ #compare fee of a 2of2 multisig p2sh transaction # create 2of2 addr addr1 = self.nodes[1].getnewaddress() addr2 = self.nodes[1].getnewaddress() addr1Obj = self.nodes[1].validateaddress(addr1) addr2Obj = self.nodes[1].validateaddress(addr2) mSigObj = self.nodes[1].addmultisigaddress( 2, [addr1Obj['pubkey'], addr2Obj['pubkey']]) inputs = [] outputs = {mSigObj: 1.1} rawtx = self.nodes[0].createrawtransaction(inputs, outputs) fundedTx = self.nodes[0].fundrawtransaction(rawtx) #create same transaction over sendtoaddress txId = self.nodes[0].sendtoaddress(mSigObj, 1.1) signedFee = self.nodes[0].getrawmempool(True)[txId]['fee'] #compare fee feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee) assert (feeDelta >= 0 and feeDelta <= feeTolerance) ############################################################ ############################################################ #compare fee of a standard pubkeyhash transaction # create 4of5 addr addr1 = self.nodes[1].getnewaddress() addr2 = self.nodes[1].getnewaddress() addr3 = self.nodes[1].getnewaddress() addr4 = self.nodes[1].getnewaddress() addr5 = self.nodes[1].getnewaddress() addr1Obj = self.nodes[1].validateaddress(addr1) addr2Obj = self.nodes[1].validateaddress(addr2) addr3Obj = self.nodes[1].validateaddress(addr3) addr4Obj = self.nodes[1].validateaddress(addr4) addr5Obj = self.nodes[1].validateaddress(addr5) mSigObj = self.nodes[1].addmultisigaddress(4, [ addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey'], addr4Obj['pubkey'], addr5Obj['pubkey'] ]) inputs = [] outputs = {mSigObj: 1.1} rawtx = self.nodes[0].createrawtransaction(inputs, outputs) fundedTx = self.nodes[0].fundrawtransaction(rawtx) #create same transaction over sendtoaddress txId = self.nodes[0].sendtoaddress(mSigObj, 1.1) signedFee = self.nodes[0].getrawmempool(True)[txId]['fee'] #compare fee feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee) assert (feeDelta >= 0 and feeDelta <= feeTolerance) ############################################################ ############################################################ # spend a 2of2 multisig transaction over fundraw # create 2of2 addr addr1 = self.nodes[2].getnewaddress() addr2 = self.nodes[2].getnewaddress() addr1Obj = self.nodes[2].validateaddress(addr1) addr2Obj = self.nodes[2].validateaddress(addr2) mSigObj = self.nodes[2].addmultisigaddress( 2, [addr1Obj['pubkey'], addr2Obj['pubkey']]) # send 1.2 CLM to msig addr txId = self.nodes[0].sendtoaddress(mSigObj, 1.2) self.sync_all() self.nodes[1].generate(1) self.sync_all() oldBalance = self.nodes[1].getbalance() inputs = [] outputs = {self.nodes[1].getnewaddress(): 1.1} rawtx = self.nodes[2].createrawtransaction(inputs, outputs) fundedTx = self.nodes[2].fundrawtransaction(rawtx) signedTx = self.nodes[2].signrawtransaction(fundedTx['hex']) txId = self.nodes[2].sendrawtransaction(signedTx['hex']) self.sync_all() self.nodes[1].generate(1) self.sync_all() # make sure funds are received at node1 assert_equal(oldBalance + Decimal('1.10000000'), self.nodes[1].getbalance()) ############################################################ # locked wallet test self.stop_node(0) self.nodes[1].node_encrypt_wallet("test") self.stop_node(2) self.stop_node(3) self.start_nodes() # This test is not meant to test fee estimation and we'd like # to be sure all txs are sent at a consistent desired feerate for node in self.nodes: node.settxfee(min_relay_tx_fee) connect_nodes_bi(self.nodes, 0, 1) connect_nodes_bi(self.nodes, 1, 2) connect_nodes_bi(self.nodes, 0, 2) connect_nodes_bi(self.nodes, 0, 3) self.sync_all() # drain the keypool self.nodes[1].getnewaddress() self.nodes[1].getrawchangeaddress() inputs = [] outputs = {self.nodes[0].getnewaddress(): 1.1} rawtx = self.nodes[1].createrawtransaction(inputs, outputs) # fund a transaction that requires a new key for the change output # creating the key must be impossible because the wallet is locked assert_raises_rpc_error( -4, "Keypool ran out, please call keypoolrefill first", self.nodes[1].fundrawtransaction, rawtx) #refill the keypool self.nodes[1].walletpassphrase("test", 100) self.nodes[1].keypoolrefill( 8) #need to refill the keypool to get an internal change address self.nodes[1].walletlock() assert_raises_rpc_error(-13, "walletpassphrase", self.nodes[1].sendtoaddress, self.nodes[0].getnewaddress(), 1.2) oldBalance = self.nodes[0].getbalance() inputs = [] outputs = {self.nodes[0].getnewaddress(): 1.1} rawtx = self.nodes[1].createrawtransaction(inputs, outputs) fundedTx = self.nodes[1].fundrawtransaction(rawtx) #now we need to unlock self.nodes[1].walletpassphrase("test", 600) signedTx = self.nodes[1].signrawtransaction(fundedTx['hex']) txId = self.nodes[1].sendrawtransaction(signedTx['hex']) self.nodes[1].generate(1) self.sync_all() # make sure funds are received at node1 assert_equal(oldBalance + Decimal('5001.10000000'), self.nodes[0].getbalance()) ############################################### # multiple (~19) inputs tx test | Compare fee # ############################################### #empty node1, send some small coins from node0 to node1 self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True) self.sync_all() self.nodes[0].generate(1) self.sync_all() for i in range(0, 20): self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01) self.nodes[0].generate(1) self.sync_all() #fund a tx with ~20 small inputs inputs = [] outputs = { self.nodes[0].getnewaddress(): 0.15, self.nodes[0].getnewaddress(): 0.04 } rawtx = self.nodes[1].createrawtransaction(inputs, outputs) fundedTx = self.nodes[1].fundrawtransaction(rawtx) #create same transaction over sendtoaddress txId = self.nodes[1].sendmany("", outputs) signedFee = self.nodes[1].getrawmempool(True)[txId]['fee'] #compare fee feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee) assert (feeDelta >= 0 and feeDelta <= feeTolerance * 19) #~19 inputs ############################################# # multiple (~19) inputs tx test | sign/send # ############################################# #again, empty node1, send some small coins from node0 to node1 self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True) self.sync_all() self.nodes[0].generate(1) self.sync_all() for i in range(0, 20): self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01) self.nodes[0].generate(1) self.sync_all() #fund a tx with ~20 small inputs oldBalance = self.nodes[0].getbalance() inputs = [] outputs = { self.nodes[0].getnewaddress(): 0.15, self.nodes[0].getnewaddress(): 0.04 } rawtx = self.nodes[1].createrawtransaction(inputs, outputs) fundedTx = self.nodes[1].fundrawtransaction(rawtx) fundedAndSignedTx = self.nodes[1].signrawtransaction(fundedTx['hex']) txId = self.nodes[1].sendrawtransaction(fundedAndSignedTx['hex']) self.sync_all() self.nodes[0].generate(1) self.sync_all() assert_equal(oldBalance + Decimal('5000.19000000'), self.nodes[0].getbalance()) #0.19+block reward ##################################################### # test fundrawtransaction with OP_RETURN and no vin # ##################################################### rawtx = "0100000000010000000000000000066a047465737400000000" dec_tx = self.nodes[2].decoderawtransaction(rawtx) assert_equal(len(dec_tx['vin']), 0) assert_equal(len(dec_tx['vout']), 1) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) assert_greater_than(len(dec_tx['vin']), 0) # at least one vin assert_equal(len(dec_tx['vout']), 2) # one change output added ################################################## # test a fundrawtransaction using only watchonly # ################################################## inputs = [] outputs = {self.nodes[2].getnewaddress(): watchonly_amount / 2} rawtx = self.nodes[3].createrawtransaction(inputs, outputs) result = self.nodes[3].fundrawtransaction(rawtx, {'includeWatching': True}) res_dec = self.nodes[0].decoderawtransaction(result["hex"]) assert_equal(len(res_dec["vin"]), 1) assert_equal(res_dec["vin"][0]["txid"], watchonly_txid) assert ("fee" in result.keys()) assert_greater_than(result["changepos"], -1) ############################################################### # test fundrawtransaction using the entirety of watched funds # ############################################################### inputs = [] outputs = {self.nodes[2].getnewaddress(): watchonly_amount} rawtx = self.nodes[3].createrawtransaction(inputs, outputs) # Backward compatibility test (2nd param is includeWatching) result = self.nodes[3].fundrawtransaction(rawtx, True) res_dec = self.nodes[0].decoderawtransaction(result["hex"]) assert_equal(len(res_dec["vin"]), 2) assert (res_dec["vin"][0]["txid"] == watchonly_txid or res_dec["vin"][1]["txid"] == watchonly_txid) assert_greater_than(result["fee"], 0) assert_greater_than(result["changepos"], -1) assert_equal( result["fee"] + res_dec["vout"][result["changepos"]]["value"], watchonly_amount / 10) signedtx = self.nodes[3].signrawtransaction(result["hex"]) assert (not signedtx["complete"]) signedtx = self.nodes[0].signrawtransaction(signedtx["hex"]) assert (signedtx["complete"]) self.nodes[0].sendrawtransaction(signedtx["hex"]) self.nodes[0].generate(1) self.sync_all() ####################### # Test feeRate option # ####################### # Make sure there is exactly one input so coin selection can't skew the result assert_equal(len(self.nodes[3].listunspent(1)), 1) inputs = [] outputs = {self.nodes[3].getnewaddress(): 1} pprint(self.nodes[3].getbalance()) rawtx = self.nodes[3].createrawtransaction(inputs, outputs) result = self.nodes[3].fundrawtransaction( rawtx) # uses DEFAULT_TRANSACTION_MINFEE result2 = self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 2 * 0.0005}) result3 = self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 10 * 0.0005}) result_fee_rate = result['fee'] * 1000 / count_bytes(result['hex']) assert_fee_amount(result['fee'], count_bytes(result['hex']), result_fee_rate) assert_fee_amount(result2['fee'], count_bytes(result2['hex']), 2 * result_fee_rate) assert_fee_amount(result3['fee'], count_bytes(result3['hex']), 10 * result_fee_rate) ################################ # Test no address reuse occurs # ################################ result3 = self.nodes[3].fundrawtransaction(rawtx) res_dec = self.nodes[0].decoderawtransaction(result3["hex"]) changeaddress = "" for out in res_dec['vout']: if out['value'] > 1.0: changeaddress += out['scriptPubKey']['addresses'][0] assert (changeaddress != "") nextaddr = self.nodes[3].getnewaddress() # Now the change address key should be removed from the keypool assert (changeaddress != nextaddr) ###################################### # Test subtractFeeFromOutputs option # ###################################### # Make sure there is exactly one input so coin selection can't skew the result assert_equal(len(self.nodes[3].listunspent(1)), 1) inputs = [] outputs = {self.nodes[2].getnewaddress(): 1} rawtx = self.nodes[3].createrawtransaction(inputs, outputs) result = [ self.nodes[3].fundrawtransaction( rawtx), # uses min_relay_tx_fee (set by settxfee) self.nodes[3].fundrawtransaction( rawtx, {"subtractFeeFromOutputs": []}), # empty subtraction list self.nodes[3].fundrawtransaction( rawtx, {"subtractFeeFromOutputs": [0] }), # uses min_relay_tx_fee (set by settxfee) self.nodes[3].fundrawtransaction( rawtx, {"feeRate": 2 * min_relay_tx_fee}), self.nodes[3].fundrawtransaction(rawtx, { "feeRate": 2 * min_relay_tx_fee, "subtractFeeFromOutputs": [0] }) ] dec_tx = [ self.nodes[3].decoderawtransaction(tx['hex']) for tx in result ] output = [ d['vout'][1 - r['changepos']]['value'] for d, r in zip(dec_tx, result) ] change = [ d['vout'][r['changepos']]['value'] for d, r in zip(dec_tx, result) ] assert_equal(result[0]['fee'], result[1]['fee'], result[2]['fee']) assert_equal(result[3]['fee'], result[4]['fee']) assert_equal(change[0], change[1]) assert_equal(output[0], output[1]) assert_equal(output[0], output[2] + result[2]['fee']) assert_equal(change[0] + result[0]['fee'], change[2]) assert_equal(output[3], output[4] + result[4]['fee']) assert_equal(change[3] + result[3]['fee'], change[4]) inputs = [] outputs = { self.nodes[2].getnewaddress(): value for value in (1.0, 1.1, 1.2, 1.3) } rawtx = self.nodes[3].createrawtransaction(inputs, outputs) result = [ self.nodes[3].fundrawtransaction(rawtx), # split the fee between outputs 0, 2, and 3, but not output 1 self.nodes[3].fundrawtransaction( rawtx, {"subtractFeeFromOutputs": [0, 2, 3]}) ] dec_tx = [ self.nodes[3].decoderawtransaction(result[0]['hex']), self.nodes[3].decoderawtransaction(result[1]['hex']) ] # Nested list of non-change output amounts for each transaction output = [[ out['value'] for i, out in enumerate(d['vout']) if i != r['changepos'] ] for d, r in zip(dec_tx, result)] # List of differences in output amounts between normal and subtractFee transactions share = [o0 - o1 for o0, o1 in zip(output[0], output[1])] # output 1 is the same in both transactions assert_equal(share[1], 0) # the other 3 outputs are smaller as a result of subtractFeeFromOutputs assert_greater_than(share[0], 0) assert_greater_than(share[2], 0) assert_greater_than(share[3], 0) # outputs 2 and 3 take the same share of the fee assert_equal(share[2], share[3]) # output 0 takes at least as much share of the fee, and no more than 2 satoshis more, than outputs 2 and 3 assert_greater_than_or_equal(share[0], share[2]) assert_greater_than_or_equal(share[2] + Decimal(2e-8), share[0]) # the fee is the same in both transactions assert_equal(result[0]['fee'], result[1]['fee']) # the total subtracted from the outputs is equal to the fee assert_equal(share[0] + share[2] + share[3], result[0]['fee'])