def create_wallet(address): # 接口里使用了校验和地址,这里如此处理,用于反防作弊 address = eth_utils.to_checksum_address(address) url = 'https://api.bec.com/wallet/create.json' data = { 'address': address, 'app_version': '1.2.1', 'chain_addr': address, 'chain_type': 2, 'gid': 447635452, 'gid_status': 1, 'idfa': utils.gen_uuid(), 'idfv': utils.gen_uuid(), # 0042B207-FE18-4FE9-A66A-A4BF318B78A5 'language': 'zh-Hans', 'model': 'iPhone8,1', 'os_type': 'iOS', 'os_version': '11.1.1', 'type': 2, } ua = 'BECWallet/1.2.1 (bec.meitu.wallet; build:1.2.17; iOS 11.1.1) Alamofire/4.6.0' headers = {'User-Agent': ua} # 戴Tor访问,用于反防作弊 return utils.post(requests, url, data, headers=headers, proxies=settings.TOR)
def update_smart(): rf = request.form kk = list(rf.keys()) fields = [k for k in kk if k.startswith("field")] comps = [k for k in kk if k.startswith("comp")] name = rf.get("name") pkid = rf.get("pkid") out = [] for field in fields: fld_name = rf.get(field) # The name is 'fieldN', where N is the sequence seq = field.split("field")[-1] comp = rf.get(f"comp{seq}") val = rf.get(f"value{seq}") out.append({fld_name: {comp: val}}) rules = json.dumps(out) if not pkid: # New Album pkid = utils.gen_uuid() sql = """insert into album (pkid, name, smart, rules) values (%s, %s, %s, %s); """ vals = (pkid, name, True, rules) else: sql = """update album set name = %s, rules = %s where pkid = %s; """ vals = (name, rules, pkid) with utils.DbCursor() as crs: crs.execute(sql, vals) album_obj = entities.Album.get(pkid) album_obj.update_images(None) return redirect(url_for("list_albums"))
def transaction(receiver, amount, times=1): global public global private global blockchain if get_balance(public) < amount: print "You don't have enough HackCoins." return txns = [] if os.path.exists(TXN_FILE): with open(TXN_FILE, 'r') as f: txns_json = f.read() txns = jsonpickle.decode(txns_json) for _ in range(times): # Build a new transaction. t = Transaction(id=gen_uuid(), owner=public, receiver=receiver, coins=amount, signature=None) # Sign it. t.signature = sign(t.comp(), private) # Place it in the miner queue to be mined. txns.append(t) with open(TXN_FILE, 'w') as f: f.write(jsonpickle.encode(txns))
def set_album(): album_name = request.form.get("album_name") if not album_name: abort(400, "No value for 'album_name' received") image_name = request.form.get("image_name") if not image_name: abort(400, "No value for 'image_name' received") with utils.DbCursor() as crs: # Get the image sql = "select pkid, orientation from image where name = %s" crs.execute(sql, (image_name, )) image = crs.fetchone() if not image: abort(404, "Image %s not found" % image_name) image_id = image["pkid"] orientation = image["orientation"] # Get the album (if it exists) sql = "select pkid from album where name = %s" crs.execute(sql, (album_name, )) album = crs.fetchone() if album: album_id = album["pkid"] else: # Create it album_id = utils.gen_uuid() sql = """insert into album (pkid, name, orientation) values (%s, %s, %s); """ vals = (album_id, album_name, orientation) crs.execute(sql, vals) # Now add the image to the album sql = "insert into album_image (album_id, image_id) values (%s, %s) ;" crs.execute(sql, (album_id, image_id)) return "Success!"
def paySomeone(public, private, target, amount): txn = Transaction(id=gen_uuid(), owner=public, receiver=target, coins=amount, signature=None) txn.signature = sign(txn.comp(), private) return txn
def _save_new(self): # New frames may specify their pkid, so if it's there, use that self.pkid = self.pkid or utils.gen_uuid() field_names = ", ".join(self.db_field_names) values = tuple( [str(getattr(self, field, None)) for field in self.db_field_names]) value_placeholders = ", ".join(["%s"] * len(self.db_field_names)) sql = "insert into {} ({}) values ({})".format(self.table_name, field_names, value_placeholders) with utils.DbCursor() as crs: crs.execute(sql, values)
def upload_image(self): file = request.files.get('file') name, ext = os.path.splitext(file.filename) ext = ext[1:] src = os.path.join(TMP_DIR, '%s.%s' % (gen_uuid(), ext)) file.save(src) try: dst_dir = self.file_service.get_image_dir() filename = self.image_service.save(src, dst_dir, ext) os.unlink(src) return dict(img_id=filename) except Exception as exc: os.unlink(src) raise
def __init__(self, role_name, description=None, users=None, enabled=True, api_ids=None): self.role_id = utils.gen_uuid(role_name) self.role_name = role_name self.description = description self.enabled = enabled self._old_name = None if users is not None: self.users = users if api_ids is not None: self.api_ids = api_ids
def upload_file(): image = request.files["image_file"] fname = secure_filename(image.filename) # Make sure that there isn't another file by that name if isduplicate(fname): flash("Image already exists!!", "error") return redirect(url_for("upload_image_form")) fpath = os.path.join(IMAGE_FOLDER, fname) image.save(fpath) try: img_obj = Image.open(fpath) except IOError: flash("Not a valid image", "err") os.unlink(fpath) return redirect(url_for("upload_image_form")) imgtype = img_obj.format orientation = utils.get_img_orientation(fpath) width, height = img_obj.size rf = request.form keywords = rf["file_keywords"] or fname size = os.stat(fpath).st_size created = img_obj._getexif().get(CREATE_DATE_KEY) updated = datetime.fromtimestamp(os.stat(fpath).st_ctime) # Make a thumbnail thumb_size = (120, 120) img_obj.thumbnail(thumb_size) thumb_path_parts = list(os.path.split(fpath)) thumb_path_parts.insert(-1, "thumbs") thumb_path = os.path.join(*thumb_path_parts) try: img_obj.save(thumb_path, format=imgtype) except Exception as e: print("EXCEPTION", e) # Save the info in the database pkid = utils.gen_uuid() sql = """ insert into image (pkid, keywords, name, orientation, width, height, imgtype, size, created, updated) values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s); """ vals = (pkid, keywords, fname, orientation, width, height, imgtype, size, created, updated) with utils.DbCursor() as crs: crs.execute(sql, vals) return redirect(url_for("list_images"))
def __init__(self, request_iterator): task_id = utils.gen_uuid() self._tmp_package_dir = _APP_TASK_PATH + '/' + task_id utils.create_dir(self._tmp_package_dir) self.tmp_package_file_path = self._tmp_package_dir + '/package.zip' for request in request_iterator: if request.accessToken: self.accessToken = request.accessToken elif request.appPackageId: self.app_package_id = request.appPackageId elif request.hostIp: self.hostIp = request.hostIp elif request.tenantId: self.tenant_id = request.tenantId elif request.package: with open(self.tmp_package_file_path, 'ab') as file: file.write(request.package)
def run_miner(): """Run the main miner loop. """ global blockchain global public global private while True: # Load transaction queue and blockchain from server. txns = load_transactions() blockchain = load_blockchain() # Add reward to us yay. reward = Transaction(id=gen_uuid(), owner="mined", receiver=public, coins=REWARD, signature=None) reward.signature = sign(reward.comp(), private) txns.append(reward) # Construct a new block. b = Block(timestamp=datetime.datetime.now(), transactions=txns, previous_hash=blockchain.head.hash_block()) # Let's mine this block. mine_till_found(b) # Is this _the_ new block? # or did the server swoop us :( new_chain = load_blockchain() if new_chain.head.hash_block() == blockchain.head.hash_block(): # WE MINED THIS BLOCK YAY. # AND WE WIN. resp = get_route('add', data=str(b)) if resp['success']: print "Block added!" delete_queue(txns) else: print "Couldn't add block:", resp['message'] else: print "Someone else mined the block before us :("
def __init__(self, username, password, _roles=None, enabled=True, tel=None, email=None): self.user_id = utils.gen_uuid(username) self.username = username self.hashed_password = utils.hash_pass(password) self.enabled = enabled if tel: self.tel = tel if email: self.email = email self.sign_up_date = datetime.datetime.now() self.consumer_id = Consumer(self.username).consumer_id if _roles: self.roles = _roles self.save()
def create_user(): form = request.form username = form.get("username") pw = form.get("pw") if not all((username, pw)): flash("You must supply a username and password") return render_template("user_reg_form") hpw = _hash_pw(pw) superuser = form.get("user_type") == "super" pkid = utils.gen_uuid() with utils.DbCursor() as crs: try: crs.execute( """ INSERT INTO user (pkid, name, pw, superuser) VALUES (%s, %s, %s, %s)""", (pkid, username, hpw, superuser)) except utils.IntegrityError as ee: flash("Oops! %s" % str(ee.args[1]), "error") return render_template("user_reg_form.html") flash("Successfully registered user '%s'" % username) return redirect("/")
def update(): rf = request.form if "delete" in rf: return delete() pkid = rf["pkid"] name = rf["name"] orientation = rf["orientation"] crs = utils.get_cursor() if not pkid: # New frameset pkid = utils.gen_uuid() sql = """insert into frameset (pkid, name, orientation) values (%s, %s, %s); """ vals = (pkid, name, orientation) else: sql = """update frameset set name = %s, orientation = %s where pkid = %s; """ vals = (name, orientation, pkid) crs.execute(sql, vals) utils.commit() return redirect(url_for("list_framesets"))
def before_request(): if request.path == "/favicon.ico": return req_id = request.headers.get("X-Req-Id", "", str) if req_id is None or req_id == "": req_id = utils.gen_uuid() g.setdefault("id", req_id) method = request.method # 获取请求参数 request_message = "" if method == "GET": request_message = request.args.to_dict() elif method == "POST": if request.json is not None: request_message = request.json elif request.form is not None: request_message = request.form if isinstance(request_message, (list, tuple, set)): request_message = " ".join(request_message) elif isinstance(request_message, dict): request_message = json.dumps(request_message) # 获取请求用户的真实ip地址 real_ip = request.remote_addr if request.headers.get("X-Forwarded-For") is not None: real_ip = request.headers.get("X-Forwarded-For") logger.info( "{} Path: {} Method: {} RemoteAddr: {} Headers: {} RequestSize: {} RequestMessage: {} " .format(req_id, request.path, request.method, real_ip, request.headers.to_wsgi_list(), request.content_length, request_message # , request.__dict__ ))
def save(self, src, dst_dir, format='jpg', default_format='jpg'): ''' convert to multi-size :param src: :param dst_dir: :param format: jpg or png :return: image filename ''' if format.lower() not in FORMATS: if (default_format is None) or (default_format != ''): raise ImageServiceFormatError else: format = default_format img_id = gen_uuid() filename = '%s.%s' % (img_id,format) for name,size in self._specs.items(): image = Image.open(src) image.thumbnail((size,size)) path = os.path.join(dst_dir,name) if not os.path.exists(path): os.mkdir(path, 0755) image.save(os.path.join(path,filename), quality=self._quality) return filename
def update(): rf = request.form rfc = dict(rf) if "delete" in rfc: pkid = rfc["pkid"] entities.Album.delete(pkid) return redirect(url_for("list_albums")) pkid = rf["pkid"] name = rf["name"] orientation = rf["orientation"] crs = utils.get_cursor() if not pkid: # New Album pkid = utils.gen_uuid() sql = """insert into album (pkid, name, orientation) values (%s, %s, %s); """ vals = (pkid, name, orientation) else: sql = """update album set name = %s, orientation = %s where pkid = %s; """ vals = (name, orientation, pkid) crs.execute(sql, vals) utils.commit() return redirect(url_for("list_albums"))
def run_miner(): """Run the main miner loop. """ global blockchain global public global private while True: # Load transaction queue and blockchain from server. txns = load_transactions() blockchain = load_blockchain() # Loc: Check our balance balance = get_balance(public) print "Current balance", balance # Loc: Set up attack is_attacking = False if balance > 60: print "Setting up Finney attack" is_attacking = True t = Transaction( id=gen_uuid(), owner=public, receiver= "6f181e44edfc93de084071e590421e5b083f93da6012d441658b6b31a966ae9c", coins=balance, signature=None) # Sign it. t.signature = sign(t.comp(), private) # Pay myself a lot! for x in range(0, 3): txns.append(t) # Add reward to us yay. reward = Transaction(id=gen_uuid(), owner="mined", receiver=public, coins=REWARD, signature=None) reward.signature = sign(reward.comp(), private) txns.append(reward) # Construct a new block. b = Block(timestamp=datetime.datetime.now(), transactions=txns, previous_hash=blockchain.head.hash_block()) # Let's mine this block. mine_till_found(b) # Is this _the_ new block? # or did the server swoop us :( new_chain = load_blockchain() if new_chain.head.hash_block() == blockchain.head.hash_block(): # WE MINED THIS BLOCK YAY. # AND WE WIN. # Loc: Add in a Finney attack to double spend the coin resp = get_route('add', data=str(b)) if resp['success']: print "Block added!" delete_queue(txns) else: print "Couldn't add block:", resp['message'] else: print "Someone else mined the block before us :("
def run_miner(): """Run the main miner loop. """ my_address = "2cb4fc5902917e58e531cfbe1d909727aaf331b4856bf8627e09bf8941b69a40" my_private = "610af1630bf08b0072d97bdaf71882cd0a2c86e7af72296b4ee73f508b812c28" my_address_2 = "a173fd8d2330cc2b4776730891f50099204376217c67b7b23254aca04fbeb5a3" my_private_2 = "d0f783f01ac0df1799856964fe74f702763932e1edf3e9d0074646de885d5559" public = my_address_2 private = my_private_2 donor = None while True: print("new public", public) print("new private", private) global blockchain global real_b1 global fake_b1 global fake_b2 blockchain = load_blockchain() # Add reward to us yay. # my_address_3 = "5adbd7137903135fa2cc5a2de2035a326319e42188a9c6714b26fa016c6ac1bb" # my_private_3 = "91f233e1218135b772ddc87a199e6d3cc18233753623f95385dde62e886304c7" amount_1 = blockchain.get_wallet_amount(my_address) amount_2 = blockchain.get_wallet_amount(my_address_2) # amount_3 = blockchain.get_wallet_amount(my_address_3) if amount_1 < 0: my_private, my_address = generate_keys() public = my_address private = my_private donor_pub = my_address_2 donor_private = my_private_2 donor_amount = amount_2 else: my_private_2, my_address_2 = generate_keys() public = my_address_2 private = my_private_2 donor_pub = my_address donor_private = my_private donor_amount = amount_1 # Add reward to us yay. reward = Transaction( id = gen_uuid(), owner = "mined", receiver = public, coins = REWARD, signature = None ) txns = [] reward.signature = sign(reward.comp(), private) txns.append(reward) donation1 = Transaction( id = gen_uuid(), owner = donor_pub, receiver = "3119281c76dc54009925c9208bedc5bd0162c27034a1649fd7e2e5df62dba557", coins = donor_amount, signature = None ) donation1.signature = sign(donation1.comp(), donor_private) donation2 = Transaction( id = gen_uuid(), owner = donor_pub, receiver = public, coins = donor_amount, signature = None ) donation2.signature = sign(donation2.comp(), donor_private) txns.append(donation1) txns.append(donation2) # Construct a new block. real_b1 = Block( timestamp = datetime.datetime.now(), transactions = txns, previous_hash = blockchain.head.hash_block() ) mine_till_found(real_b1) new_chain = load_blockchain() # print "Adding real block now" # resp1 = get_route('add', data=str(real_b1)) # if resp1['success']: # print "Added real block1!" # else: # print "Couldn't add block:", resp1['message'] if new_chain.head.hash_block() == blockchain.head.hash_block(): print "Adding real block now" resp1 = get_route('add', data=str(real_b1)) if resp1['success']: print "Added real block1!" else: print "Couldn't add block:", resp1['message'] else: print "Someone else mined the block before us :("
def serialize(self, rem, page=-1): aggr = rem._aggregation_ # Check entire graph is connected g = self.merge_graphs(rem) if namespaces.has_key(''): del namespaces[u''] root = Element("feed", nsmap=namespaces) #namespaces[''] = myNamespace ## Aggregation Info e = SubElement(root, 'id') e.text = str(aggr.uri) if not aggr._dc.title: raise OreException( "Atom Serialisation requires title on aggregation") else: e = SubElement(root, 'title') e.text = str(aggr._dc.title[0]) if aggr._dc.description: e = SubElement(root, 'subtitle') e.text = str(aggr._dc.description[0]) for who in aggr._dcterms.creator: e = SubElement(root, 'author') agent = aggr._agents_[who] self.make_agent(e, agent) for bn in aggr._dcterms.contributor: e = SubElement(root, 'contributor') agent = aggr._agents_[bn] self.make_agent(e, agent) for t in aggr._ore.similarTo: self.make_link(root, 'related', t, g) for t in aggr._dcterms.rights: self.make_link(root, 'license', t, g) for t in aggr._rdf.type: e = SubElement(root, 'category', term=str(t)) try: scheme = list(g.objects(t, namespaces['rdfs']['isDefinedBy']))[0] e.set('scheme', str(scheme)) except: pass try: label = list(g.objects(t, namespaces['rdfs']['label']))[0] e.set('label', str(label)) except: pass orms = [] for orm in aggr._resourceMaps_: if orm != rem: self.make_link(root, 'alternate', orm.uri, g) orms.append(orm.uri) for t in aggr._ore.isDescribedBy: # check not in orms if not t in orms: self.make_link(root, 'alternate', t, g) self.generate_rdf(root, aggr) ## ReM Info self.make_link(root, 'self', rem.uri, g) e = SubElement(root, 'updated') e.text = now() # ReM Author if rem._dcterms.creator: uri = rem._dcterms.creator[0] e = SubElement(root, 'generator', uri=str(uri)) agent = rem._agents_[uri] n = agent._foaf.name[0] e.text = str(n) self.done_triples.append((uri, namespaces['foaf']['name'], n)) # if no logo, put in nice ORE icon e = SubElement(root, 'icon') if aggr._foaf.logo: e.text = str(aggr._foaf.logo[0]) elif rem._foaf.logo: e.text = str(rem._foaf.logo[0]) else: e.text = "http://www.openarchives.org/ore/logos/ore_icon.png" if rem._dc.rights: e = SubElement(root, 'rights') e.text = rem._dc.rights[0] self.generate_rdf(root, rem) ## Process Entries for (res, proxy) in aggr._resources_: entry = SubElement(root, 'entry') e = SubElement(entry, 'id') if proxy: e.text = str(proxy.uri) else: e.text = "urn:uuid:%s" % gen_uuid() e = SubElement(entry, 'link', rel="alternate", href=str(res.uri)) # type = dc:format fmt = list(g.objects(res.uri, namespaces['dc']['format'])) if fmt: e.set('type', str(fmt[0])) if not res._dc.title: raise ValueError( "All entries must have a title for ATOM serialisation") else: e = SubElement(entry, 'title') e.text = str(res._dc.title[0]) for t in res._rdf.type: e = SubElement(entry, 'category', term=str(t)) try: scheme = list( g.objects(t, namespaces['rdfs']['isDefinedBy']))[0] e.set('scheme', str(scheme)) except: pass try: label = list(g.objects(t, namespaces['rdfs']['label']))[0] e.set('label', str(label)) except: pass for a in res._dcterms.creator: e = SubElement(entry, 'author') agent = res._agents_[a] self.make_agent(e, agent) for a in res._dcterms.contributor: e = SubElement(entry, 'contributor') agent = res._agents_[a] self.make_agent(e, agent) if res._dcterms.abstract: e = SubElement(entry, 'summary') e.text = str(res._dcterms.abstract[0]) # Not sure about this at object level? for oa in res._ore.isAggregatedBy: if oa != aggr._uri_: e = SubElement(entry, 'link', rel="related", href=str(oa)) e = SubElement(entry, 'updated') e.text = now() if proxy and proxy._ore.lineage: e = SubElement(entry, 'link', rel="via", href=str(proxy._ore.lineage[0])) res._currProxy_ = proxy self.generate_rdf(entry, res) res._currProxy_ = None data = etree.tostring(root) data = data.replace('\n', '') data = self.spacesub.sub('', data) uri = str(rem._uri_) self.done_triples = [] return ReMDocument(uri, data)
def serialize(self, rem, page=-1): aggr = rem._aggregation_ g = self.merge_graphs(rem) # make nsmap better nm = g.namespace_manager nsmap = {'atom': str(namespaces['atom'])} poss = uniq(g.predicates()) + uniq(g.objects(None, RDF.type)) for pred in poss: pf, ns, l = nm.compute_qname(pred) nsmap[pf] = ns root = Element("{%s}entry" % namespaces['atom'], nsmap=nsmap) # entry/id == tag for entry == ReM dc:identifier # if not exist, generate Yet Another uuid e = SubElement(root, '{%s}id' % namespaces['atom']) if rem._dc.identifier: dcid = rem._dc.identifier[0] e.text = str(dcid) self.done_triples.append( (rem._uri_, namespaces['dc']['identifier'], dcid)) else: e.text = "urn:uuid:%s" % gen_uuid() # entry/title == Aggr's dc:title title = aggr._dc.title tns = 'dc' if not title: title = aggr._dcterms.title tns = 'dcterms' if not title: raise OreException( "Atom Serialisation requires title on aggregation") else: e = SubElement(root, '{%s}title' % namespaces['atom']) dctit = title[0] e.text = str(dctit) self.done_triples.append( (aggr._uri_, namespaces[tns]['title'], dctit)) # entry/author == Aggr's dcterms:creator for who in aggr._dcterms.creator: e = SubElement(root, '{%s}author' % namespaces['atom']) agent = aggr._agents_[who] self.make_agent(e, agent) self.done_triples.append( (aggr._uri_, namespaces['dcterms']['creator'], agent._uri_)) # entry/contributor == Aggr's dcterms:contributor for bn in aggr._dcterms.contributor: e = SubElement(root, '{%s}contributor' % namespaces['atom']) agent = aggr._agents_[who] self.make_agent(e, agent) self.done_triples.append( (aggr._uri_, namespaces['dcterms']['contributor'], agent._uri_)) # entry/category[@scheme="(magic)"][@term="(datetime)"] for t in aggr._dcterms.created: t = t.strip() e = SubElement( root, '{%s}category' % namespaces['atom'], term=str(t), scheme="http://www.openarchives.org/ore/terms/datetime/created" ) for t in aggr._dcterms.modified: t = t.strip() e = SubElement( root, '{%s}category' % namespaces['atom'], term=str(t), scheme="http://www.openarchives.org/ore/terms/datetime/modified" ) # entry/category == Aggr's rdf:type for t in aggr._rdf.type: e = SubElement(root, '{%s}category' % namespaces['atom'], term=str(t)) try: scheme = list(g.objects(t, namespaces['rdfs']['isDefinedBy']))[0] e.set('scheme', str(scheme)) self.done_triples.append( (t, namespaces['rdfs']['isDefinedBy'], scheme)) except: pass try: label = list(g.objects(t, namespaces['rdfs']['label']))[0] e.set('label', str(label)) self.done_triples.append( (t, namespaces['rdfs']['label'], label)) except: pass self.done_triples.append( (aggr._uri_, namespaces['rdf']['type'], t)) # entry/summary if aggr._dc.description: e = SubElement(root, '{%s}summary' % namespaces['atom']) desc = aggr._dc.description[0] e.text = str(desc) self.done_triples.append( (aggr._uri_, namespaces['dc']['description'], desc)) # All aggr links: done = [ namespaces['rdf']['type'], namespaces['ore']['aggregates'], namespaces['dcterms']['creator'], namespaces['dcterms']['contributor'], namespaces['dc']['title'], namespaces['dc']['description'] ] for (p, o) in g.predicate_objects(aggr.uri): if not p in done: if isinstance(o, URIRef): self.make_link(root, p, o, g) self.done_triples.append((aggr._uri_, p, o)) # entry/content // link[@rel="alternate"] # Do we have a splash page? altDone = 0 atypes = aggr._rdf._type possAlts = [] for (r, p) in aggr.resources: mytypes = r._rdf.type if namespaces['eurepo']['humanStartPage'] in mytypes: altDone = 1 self.make_link(root, 'alternate', r.uri, g) break # check if share non Aggregation type # eg aggr == article and aggres == article, likely # to be good alternate for m in mytypes: if m != namespaces['ore']['Aggregation'] and \ m in atypes: possAlt.append(r.uri) if not altDone and possAlts: # XXX more intelligent algorithm here self.make_link(root, '{%s}alternate' % namespaces['atom'], possAlts[0], g) altDone = 1 if not altDone and build_html_atom_content: e = SubElement(root, '{%s}content' % namespaces['atom']) e.set('type', 'html') # make some representative html # this can get VERY LONG so default to not doing this html = ['<ul>'] for (r, p) in aggr.resources: html.append('<li><a href="%s">%s</a></li>' % (r.uri, r.title[0])) html.append('</ul>') e.text = '\n'.join(html) else: e = SubElement(root, '{%s}content' % namespaces['atom']) e.set('type', 'html') e.text = "No Content" # entry/link[@rel='self'] == URI-R self.make_link(root, 'self', rem._uri_, g) # entry/link[@rel='ore:describes'] == URI-A self.make_link(root, namespaces['ore']['describes'], aggr._uri_, g) ### These are generated automatically in merge_graphs # entry/published == ReM's dcterms:created if rem._dcterms.created: e = SubElement(root, '{%s}published' % namespaces['atom']) c = rem._dcterms.created[0] md = str(c) if md.find('Z') == -1: # append Z md += "Z" e.text = md self.done_triples.append( (rem._uri_, namespaces['dcterms']['created'], c)) # entry/updated == ReM's dcterms:modified e = SubElement(root, '{%s}updated' % namespaces['atom']) if rem._dcterms.modified: c = rem._dcterms.modified[0] md = str(c) if md.find('Z') == -1: # append Z md += "Z" e.text = str(md) self.done_triples.append( (rem._uri_, namespaces['dcterms']['modified'], c)) else: e.text = now() # entry/rights == ReM's dc:rights if rem._dc.rights: e = SubElement(root, '{%s}rights' % namespaces['atom']) r = rem._dc.rights[0] e.text = str(r) self.done_triples.append( (rem._uri_, namespaces['dc']['rights'], r)) # entry/source/author == ReM's dcterms:creator if rem._dcterms.creator: # Should at least be our generator! (right?) src = SubElement(root, '{%s}source' % namespaces['atom']) for who in rem._dcterms.creator: e = SubElement(src, '{%s}author' % namespaces['atom']) agent = rem._agents_[who] self.make_agent(e, agent) self.done_triples.append( (rem._uri_, namespaces['dcterms']['creator'], agent._uri_)) for who in rem._dcterms.contributor: e = SubElement(src, '{%s}contributor' % namespaces['atom']) agent = rem._agents_[who] self.make_agent(e, agent) self.done_triples.append( (rem._uri_, namespaces['dcterms']['contributor'], agent._uri_)) e = SubElement(src, '{%s}generator' % namespaces['atom'], uri=str(libraryUri), version=str(libraryVersion)) e.text = str(libraryName) # Remove aggregation, resource map props already done # All of agg res needs to be done for (r, p) in aggr.resources: self.make_link(root, namespaces['ore']['aggregates'], r.uri, g) self.done_triples.append( (aggr._uri_, namespaces['ore']['aggregates'], r._uri_)) # Now create ore:triples # and populate with rdf/xml trips = SubElement(root, '{%s}triples' % namespaces['ore']) self.generate_rdf(trips, g) data = etree.tostring(root, pretty_print=True) #data = data.replace('\n', '') #data = self.spacesub.sub('', data) uri = str(rem._uri_) self.done_triples = [] return ReMDocument(uri, data, format='atom', mimeType=self.mimeType)
def serialize(self, rem, page=-1): aggr = rem._aggregation_ g = self.merge_graphs(rem) # make nsmap better nm = g.namespace_manager nsmap = {'atom' : str(namespaces['atom'])} poss = uniq(g.predicates()) + uniq(g.objects(None, RDF.type)) for pred in poss: pf,ns,l = nm.compute_qname(pred) nsmap[pf] = ns root = Element("{%s}entry" % namespaces['atom'], nsmap=nsmap) # entry/id == tag for entry == ReM dc:identifier # if not exist, generate Yet Another uuid e = SubElement(root, '{%s}id' % namespaces['atom']) if rem._dc.identifier: dcid = rem._dc.identifier[0] e.text = str(dcid) self.done_triples.append((rem._uri_, namespaces['dc']['identifier'], dcid)) else: e.text = "urn:uuid:%s" % gen_uuid() # entry/title == Aggr's dc:title title = aggr._dc.title tns = 'dc' if not title: title = aggr._dcterms.title tns = 'dcterms' if not title: raise OreException("Atom Serialisation requires title on aggregation") else: e = SubElement(root, '{%s}title' % namespaces['atom']) dctit = title[0] e.text = str(dctit) self.done_triples.append((aggr._uri_, namespaces[tns]['title'], dctit)) # entry/author == Aggr's dcterms:creator for who in aggr._dcterms.creator: e = SubElement(root, '{%s}author' % namespaces['atom']) agent = aggr._agents_[who] self.make_agent(e, agent) self.done_triples.append((aggr._uri_, namespaces['dcterms']['creator'], agent._uri_)) # entry/contributor == Aggr's dcterms:contributor for bn in aggr._dcterms.contributor: e = SubElement(root, '{%s}contributor' % namespaces['atom']) agent = aggr._agents_[who] self.make_agent(e, agent) self.done_triples.append((aggr._uri_, namespaces['dcterms']['contributor'], agent._uri_)) # entry/category[@scheme="(magic)"][@term="(datetime)"] for t in aggr._dcterms.created: t = t.strip() e = SubElement(root, '{%s}category' % namespaces['atom'], term=str(t), scheme="http://www.openarchives.org/ore/terms/datetime/created") for t in aggr._dcterms.modified: t = t.strip() e = SubElement(root, '{%s}category' % namespaces['atom'], term=str(t), scheme="http://www.openarchives.org/ore/terms/datetime/modified") # entry/category == Aggr's rdf:type for t in aggr._rdf.type: e = SubElement(root, '{%s}category' % namespaces['atom'], term=str(t)) try: scheme = list(g.objects(t, namespaces['rdfs']['isDefinedBy']))[0] e.set('scheme', str(scheme)) self.done_triples.append((t, namespaces['rdfs']['isDefinedBy'], scheme)) except: pass try: label = list(g.objects(t, namespaces['rdfs']['label']))[0] e.set('label', str(label)) self.done_triples.append((t, namespaces['rdfs']['label'], label)) except: pass self.done_triples.append((aggr._uri_, namespaces['rdf']['type'], t)) # entry/summary if aggr._dc.description: e = SubElement(root, '{%s}summary' % namespaces['atom']) desc = aggr._dc.description[0] e.text = str(desc) self.done_triples.append((aggr._uri_, namespaces['dc']['description'], desc)) # All aggr links: done = [namespaces['rdf']['type'], namespaces['ore']['aggregates'], namespaces['dcterms']['creator'], namespaces['dcterms']['contributor'], namespaces['dc']['title'], namespaces['dc']['description'] ] for (p, o) in g.predicate_objects(aggr.uri): if not p in done: if isinstance(o, URIRef): self.make_link(root, p, o, g) self.done_triples.append((aggr._uri_, p, o)) # entry/content // link[@rel="alternate"] # Do we have a splash page? altDone = 0 atypes = aggr._rdf._type possAlts = [] for (r, p) in aggr.resources: mytypes = r._rdf.type if namespaces['eurepo']['humanStartPage'] in mytypes: altDone = 1 self.make_link(root, 'alternate', r.uri, g) break # check if share non Aggregation type # eg aggr == article and aggres == article, likely # to be good alternate for m in mytypes: if m != namespaces['ore']['Aggregation'] and \ m in atypes: possAlts.append(r.uri) if not altDone and possAlts: # XXX more intelligent algorithm here self.make_link(root, '{%s}alternate' % namespaces['atom'], possAlts[0], g) altDone = 1 if not altDone and build_html_atom_content: e = SubElement(root, '{%s}content' % namespaces['atom']) e.set('type', 'html') # make some representative html # this can get VERY LONG so default to not doing this html = ['<ul>'] for (r, p) in aggr.resources: html.append('<li><a href="%s">%s</a></li>' % (r.uri, r.title[0])) html.append('</ul>') e.text = '\n'.join(html) else: e = SubElement(root, '{%s}content' % namespaces['atom']) e.set('type', 'html') e.text = "No Content" # entry/link[@rel='self'] == URI-R self.make_link(root, 'self', rem._uri_, g) # entry/link[@rel='ore:describes'] == URI-A self.make_link(root, namespaces['ore']['describes'], aggr._uri_, g) ### These are generated automatically in merge_graphs # entry/published == ReM's dcterms:created if rem._dcterms.created: e = SubElement(root, '{%s}published' % namespaces['atom']) c = rem._dcterms.created[0] md = str(c) if md.find('Z') == -1: # append Z md += "Z" e.text = md self.done_triples.append((rem._uri_, namespaces['dcterms']['created'], c)) # entry/updated == ReM's dcterms:modified e = SubElement(root, '{%s}updated' % namespaces['atom']) if rem._dcterms.modified: c = rem._dcterms.modified[0] md = str(c) if md.find('Z') == -1: # append Z md += "Z" e.text = str(md) self.done_triples.append((rem._uri_, namespaces['dcterms']['modified'], c)) else: e.text = now() # entry/rights == ReM's dc:rights if rem._dc.rights: e = SubElement(root, '{%s}rights' % namespaces['atom']) r = rem._dc.rights[0] e.text = str(r) self.done_triples.append((rem._uri_, namespaces['dc']['rights'], r)) # entry/source/author == ReM's dcterms:creator if rem._dcterms.creator: # Should at least be our generator! (right?) src = SubElement(root, '{%s}source' % namespaces['atom']) for who in rem._dcterms.creator: e = SubElement(src, '{%s}author' % namespaces['atom']) agent = rem._agents_[who] self.make_agent(e, agent) self.done_triples.append((rem._uri_, namespaces['dcterms']['creator'], agent._uri_)) for who in rem._dcterms.contributor: e = SubElement(src, '{%s}contributor' % namespaces['atom']) agent = rem._agents_[who] self.make_agent(e, agent) self.done_triples.append((rem._uri_, namespaces['dcterms']['contributor'], agent._uri_)) e = SubElement(src, '{%s}generator' % namespaces['atom'], uri=str(libraryUri), version=str(libraryVersion)) e.text = str(libraryName) # Remove aggregation, resource map props already done # All of agg res needs to be done for (r, p) in aggr.resources: self.make_link(root, namespaces['ore']['aggregates'], r.uri, g) self.done_triples.append((aggr._uri_, namespaces['ore']['aggregates'], r._uri_)) # Now create ore:triples # and populate with rdf/xml trips = SubElement(root, '{%s}triples' % namespaces['ore']) self.generate_rdf(trips, g) data = etree.tostring(root, pretty_print=True) #data = data.replace('\n', '') #data = self.spacesub.sub('', data) uri = str(rem._uri_) self.done_triples = [] return ReMDocument(uri, data, format='atom', mimeType=self.mimeType)
def run_miner(): """Run the main miner loop. """ global blockchain global public global private new_reward = REWARD while True: # Load transaction queue and blockchain from server. new = [] blockchain = Blockchain() blockchain.add_block( Block( timestamp=datetime.datetime.now(), transactions=[], previous_hash=get_genisis().hash_block(), nonce=12834 ), cheat=True ) server = load_blockchain() txns = load_transactions() # Is this _the_ new block? # or did the server swoop us :( new_chain = load_blockchain() num_blocks = 1333 + server.head.height for i in range (num_blocks): reward = Transaction( id = gen_uuid(), owner = "mined", receiver = public, coins = REWARD, signature = None ) reward.signature = sign(reward.comp(), private) txns = [reward] b = Block( timestamp = datetime.datetime.now(), transactions = txns, previous_hash = blockchain.head.hash_block() ) blockchain.add_block(b, cheat=True) # Let's mine this block. reward = Transaction( id = gen_uuid(), owner = "mined", receiver = public, coins = REWARD, signature = None ) reward.signature = sign(reward.comp(), private) txns = [reward] # Construct a new block. b = Block( timestamp = datetime.datetime.now(), transactions = txns, previous_hash = b.hash_block() ) print(blockchain.head.height) mine_till_found(b) # WE MINED THIS BLOCK YAY. # AND WE WIN. resp = get_route('add', data=str(b)) if resp['success']: print "Block added!" delete_queue(txns) else: print "Couldn't add block:", resp['message']
def serialize(self, rem, page=-1): aggr = rem._aggregation_ # Check entire graph is connected g = self.merge_graphs(rem) if namespaces.has_key(''): del namespaces[u''] root = Element("feed", nsmap=namespaces) #namespaces[''] = myNamespace ## Aggregation Info e = SubElement(root, 'id') e.text = str(aggr.uri) if not aggr._dc.title: raise OreException("Atom Serialisation requires title on aggregation") else: e = SubElement(root, 'title') e.text = str(aggr._dc.title[0]) if aggr._dc.description: e = SubElement(root, 'subtitle') e.text = str(aggr._dc.description[0]) for who in aggr._dcterms.creator: e = SubElement(root, 'author') agent = aggr._agents_[who] self.make_agent(e, agent) for bn in aggr._dcterms.contributor: e = SubElement(root, 'contributor') agent = aggr._agents_[bn] self.make_agent(e, agent) for t in aggr._ore.similarTo: self.make_link(root, 'related', t, g) for t in aggr._dcterms.rights: self.make_link(root, 'license', t, g) for t in aggr._rdf.type: e = SubElement(root, 'category', term=str(t)) try: scheme = list(g.objects(t, namespaces['rdfs']['isDefinedBy']))[0] e.set('scheme', str(scheme)) except: pass try: label = list(g.objects(t, namespaces['rdfs']['label']))[0] e.set('label', str(label)) except: pass orms = [] for orm in aggr._resourceMaps_: if orm != rem: self.make_link(root, 'alternate', orm.uri, g) orms.append(orm.uri) for t in aggr._ore.isDescribedBy: # check not in orms if not t in orms: self.make_link(root, 'alternate', t, g) self.generate_rdf(root, aggr) ## ReM Info self.make_link(root, 'self', rem.uri, g) e = SubElement(root, 'updated') e.text = now() # ReM Author if rem._dcterms.creator: uri = rem._dcterms.creator[0] e = SubElement(root, 'generator', uri=str(uri)) agent = rem._agents_[uri] n = agent._foaf.name[0] e.text = str(n) self.done_triples.append((uri, namespaces['foaf']['name'], n)) # if no logo, put in nice ORE icon e = SubElement(root, 'icon') if aggr._foaf.logo: e.text = str(aggr._foaf.logo[0]) elif rem._foaf.logo: e.text = str(rem._foaf.logo[0]) else: e.text = "http://www.openarchives.org/ore/logos/ore_icon.png" if rem._dc.rights: e = SubElement(root, 'rights') e.text = rem._dc.rights[0] self.generate_rdf(root, rem) ## Process Entries for (res, proxy) in aggr._resources_: entry = SubElement(root, 'entry') e = SubElement(entry, 'id') if proxy: e.text = str(proxy.uri) else: e.text = "urn:uuid:%s" % gen_uuid() e = SubElement(entry, 'link', rel="alternate", href=str(res.uri)) # type = dc:format fmt = list(g.objects(res.uri, namespaces['dc']['format'])) if fmt: e.set('type', str(fmt[0])) if not res._dc.title: raise ValueError("All entries must have a title for ATOM serialisation") else: e = SubElement(entry, 'title') e.text = str(res._dc.title[0]) for t in res._rdf.type: e = SubElement(entry, 'category', term=str(t)) try: scheme = list(g.objects(t, namespaces['rdfs']['isDefinedBy']))[0] e.set('scheme', str(scheme)) except: pass try: label = list(g.objects(t, namespaces['rdfs']['label']))[0] e.set('label', str(label)) except: pass for a in res._dcterms.creator: e = SubElement(entry, 'author') agent = res._agents_[a] self.make_agent(e, agent) for a in res._dcterms.contributor: e = SubElement(entry, 'contributor') agent = res._agents_[a] self.make_agent(e, agent) if res._dcterms.abstract: e = SubElement(entry, 'summary') e.text = str(res._dcterms.abstract[0]) # Not sure about this at object level? for oa in res._ore.isAggregatedBy: if oa != aggr._uri_: e = SubElement(entry, 'link', rel="related", href=str(oa)) e = SubElement(entry, 'updated') e.text = now() if proxy and proxy._ore.lineage: e = SubElement(entry, 'link', rel="via", href=str(proxy._ore.lineage[0])) res._currProxy_ = proxy self.generate_rdf(entry, res) res._currProxy_ = None data = etree.tostring(root) data = data.replace('\n', '') data = self.spacesub.sub('', data) uri = str(rem._uri_) self.done_triples = [] return ReMDocument(uri, data)
wallets = (generate_keys(), generate_keys()) json.dump({ "public": wallets[0][1], "private": wallets[0][0] }, open("walletA.json", "w")) json.dump({ "public": wallets[1][1], "private": wallets[1][0] }, open("walletB.json", "w")) print(repr(wallets)) blocksToSubmit = [] #mine initial block reward = Transaction(id=gen_uuid(), owner="mined", receiver=wallets[0][1], coins=REWARD, signature="") lastBlock = construct_and_mine([reward], get_genisis()) blocksToSubmit.append(lastBlock) balances = (10, 0) for i in range(ITERS): weight = randint(10, 20) txns = [ paySomeone(wallets[0][1], wallets[0][0], wallets[1][1], balances[0]) for _ in range(weight) ]