def get(self): twitter = AppEngineTwitter() twitter.set_oauth(OAUTH_KEY, OAUTH_SECRET) req_token = self.request.get('oauth_token') query = OAuthRequestToken.all() query.filter('token = ', req_token) req_tokens = query.fetch(1) acc_token = twitter.exchange_oauth_tokens(req_token, req_tokens[0].secret) if (twitter.verify()==200): name = simplejson.loads(twitter.last_response.content)['screen_name'] self.response.out.write(HEADER) self.response.out.write(JS) con_begin=''' <div id="container"> <div id="content"> <h2>Congratulation!</h2> <P>Dear %s,you have got the Access Key and Access Secret.<strong>Please do keep them safely </strong>and finish the binding by the following steps:</p>'''%(name.encode("utf-8")) self.response.out.write(con_begin) self.response.out.write("Access Key:") self.response.out.write(acc_token['oauth_token']) self.response.out.write("<br />Access Secret: ") self.response.out.write(acc_token['oauth_token_secret']) con_copy=''' <form name="it"> <textarea name="select1" rows="2" cols="120">-v '''+acc_token['oauth_token']+" "+acc_token['oauth_token_secret']+'''</textarea> <input onclick="copyit('it.select1')" type="button" value="Copy" name="cpy"></input> </form> ''' self.response.out.write(con_copy) db_ser = DBService() db_ser.setQuery('TwiCount') twiNum=db_ser.getQuery() num=twiNum.count() if(num<250): con_foot='<p>Next Step: Add <b>'+ BOT +'</b> to your contact lists on GTalk\ sending the following command to the TwiTalker robot<br /><b>-v AccessKey AccessSecret</b>(Or just copy the verify infomation in the textarea and send it directly to the TwiTalker robot)'+'</p>' else: con_foot='<p>There is no TwiTalker robot available now.</p> ' self.response.out.write(con_foot) self.response.out.write('<p>Visit <a href="faq">FAQ</a> if you need more help</p>') self.response.out.write(FOOTER)
def handler(event, context): db_service = DBService() path = event["request"]["path"].split("/") path.pop(0) method = event["request"]["method"] authorized = False status = 500 if len(path) == 1: if path[0] == "projects" or path[0] == "find_user": authorized = db_service.user_exists(event["user-id"]) elif len(path) > 1: if path[0] == "projects": if len(path) == 2: if method == "DELETE": authorized = db_service.user_owns_project( event["user-id"], event["project-id"]) elif method == "GET": authorized = db_service.user_can_edit_project( event["user-id"], event["project-id"]) elif len(path) == 3: if path[2] == "collabs": if method == "POST" or method == "DELETE": authorized = db_service.user_owns_project( event["user-id"], event["project-id"]) elif method == "GET": authorized = db_service.user_can_edit_project( event["user-id"], event["project-id"]) elif path[2] == "labels" or path[2] == "meta" or path[ 2] == "results" or path[2] == "searches": authorized = db_service.user_can_edit_project( event["user-id"], event["project-id"]) elif len(path) > 3: authorized = db_service.user_can_edit_project( event["user-id"], event["project-id"]) if not authorized: if not db_service.user_exists(event["user-id"]): status = 404 else: status = 200 return {"status": status}
def get(self): twitter = AppEngineTwitter() twitter.set_oauth(OAUTH_KEY, OAUTH_SECRET) req_token = self.request.get('oauth_token') query = OAuthRequestToken.all() query.filter('token = ', req_token) req_tokens = query.fetch(1) acc_token = twitter.exchange_oauth_tokens(req_token, req_tokens[0].secret) if (twitter.verify()==200): name = simplejson.loads(twitter.last_response.content)['screen_name'] self.response.out.write(HEADER) self.response.out.write(JS) con_begin=''' <div id="container"> <div id="content"> <h2>验证成功:)</h2> <P>恭喜你,%s,你已经成功授权TwiTalker并获取验证码和密钥,请<strong>务必首先保管好它们</strong>,并按步骤在Gtalk上添加TwiTalker帐号并完成绑定</p>'''%(name.encode("utf-8")) self.response.out.write(con_begin) self.response.out.write("验证码: ") self.response.out.write(acc_token['oauth_token']) self.response.out.write("<br />密钥: ") self.response.out.write(acc_token['oauth_token_secret']) con_copy=''' <form name="it"> <textarea name="select1" rows="2" cols="120">-v '''+acc_token['oauth_token']+" "+acc_token['oauth_token_secret']+'''</textarea> <input onclick="copyit('it.select1')" type="button" value="复制绑定信息" name="cpy"></input> </form> ''' self.response.out.write(con_copy) db_ser = DBService() db_ser.setQuery('TwiCount') twiNum=db_ser.getQuery() num=twiNum.count() if(num<250): con_foot='<p>下一步: 推荐在GTalk添加'+ BOT +'为好友,并向该帐号发送以下格式绑定信息完成绑定:<br /><b>-v 空格 验证码 空格 密钥</b> (也可以直接复制框中信息并向机器人帐号发送)'+'</p>' else: con_foot='<p>目前没有可用机器人</p> ' self.response.out.write(con_foot) self.response.out.write('<p>如果你对验证过程仍有疑问,请参考<a href="faq">帮助页面</a></p>') self.response.out.write(FOOTER)
def make_app(): return tornado.web.Application( handlers=[(r"/time-entries/(.*)", TasksHandler, dict(db_service=DBService())), (r"/(.*)", tornado.web.StaticFileHandler, { 'path': settings.STATIC_PATH, 'default_filename': 'index.html' })])
def export_blocks(ctx, from_, to, file): """Export the blockchain to FILE. The chain will be stored in binary format, i.e. as a concatenated list of RLP encoded blocks, starting with the earliest block. If the file already exists, the additional blocks are appended. Otherwise, a new file is created. Use - to write to stdout. """ app = EthApp(ctx.obj['config']) DBService.register_with_app(app) AccountsService.register_with_app(app) ChainService.register_with_app(app) if from_ is None: from_ = 0 head_number = app.services.chain.chain.head.number if to is None: to = head_number if from_ < 0: log.fatal('block numbers must not be negative') sys.exit(1) if to < from_: log.fatal('"to" block must be newer than "from" block') sys.exit(1) if to > head_number: log.fatal( '"to" block not known (current head: {})'.format(head_number)) sys.exit(1) log.info('Starting export') for n in xrange(from_, to + 1): log.debug('Exporting block {}'.format(n)) if (n - from_) % 50000 == 0: log.info('Exporting block {} to {}'.format(n, min(n + 50000, to))) block_hash = app.services.chain.chain.index.get_block_by_number(n) # bypass slow block decoding by directly accessing db block_rlp = app.services.db.get(block_hash) file.write(block_rlp) log.info('Export complete')
def export_blocks(ctx, from_, to, file): """Export the blockchain to FILE. The chain will be stored in binary format, i.e. as a concatenated list of RLP encoded blocks, starting with the earliest block. If the file already exists, the additional blocks are appended. Otherwise, a new file is created. Use - to write to stdout. """ app = EthApp(ctx.obj['config']) DBService.register_with_app(app) AccountsService.register_with_app(app) ChainService.register_with_app(app) if from_ is None: from_ = 0 head_number = app.services.chain.chain.head.number if to is None: to = head_number if from_ < 0: log.fatal('block numbers must not be negative') sys.exit(1) if to < from_: log.fatal('"to" block must be newer than "from" block') sys.exit(1) if to > head_number: log.fatal('"to" block not known (current head: {})'.format(head_number)) sys.exit(1) log.info('Starting export') for n in xrange(from_, to + 1): log.debug('Exporting block {}'.format(n)) if (n - from_) % 50000 == 0: log.info('Exporting block {} to {}'.format(n, min(n + 50000, to))) block_hash = app.services.chain.chain.index.get_block_by_number(n) # bypass slow block decoding by directly accessing db block_rlp = app.services.db.get(block_hash) file.write(block_rlp) log.info('Export complete')
def main(): # ********************* mqtt section ******************************** publisher = Publisher(host='raspberrypi', port=1883) publisher.start_client() subscriber = Subscriber(host='raspberrypi', port=1883) subscriber.start_client() old_counter = subscriber.counter # subscriber.subscribe("/bedroom/corner/#") subscriber.subscribe("/cellar/electricity_meter/# ") # ********************* database section **************************** db_service = DBService( db_address='mongodb+srv://angrit:[email protected]/smarthome?retryWrites=true&w=majority') while True: if old_counter < subscriber.counter: if subscriber.topic == "/cellar/electricity_meter/X": print("magnetic value X : ", subscriber.payload) # db_service.save_data(topic=subscriber.topic, payload=float(subscriber.payload.decode('utf8'))) subscriber.topic = None elif subscriber.topic == "/cellar/electricity_meter/Y": print("magnetic value Y : ", subscriber.payload) # db_service.save_data(topic=subscriber.topic, payload=float(subscriber.payload.decode('utf8'))) subscriber.topic = None elif subscriber.topic == "/cellar/electricity_meter/Z": print("magnetic value Z : ", subscriber.payload) # db_service.save_data(topic=subscriber.topic, payload=float(subscriber.payload.decode('utf8'))) subscriber.topic = None # if subscriber.topic == "/bedroom/room/temperature": # print("bedroom temperature: ", subscriber.payload) # db_service.save_data(topic=subscriber.topic, payload=float(subscriber.payload.decode('utf8'))) # subscriber.topic = None # elif subscriber.topic == "/bedroom/room/humidity": # print("bedroom humidity: ", subscriber.payload) # db_service.save_data(topic=subscriber.topic, payload=float(subscriber.payload.decode('utf8'))) # subscriber.topic = None # elif subscriber.topic == "/bedroom/corner/temperature": # print("bedroom corner temperature: ", subscriber.payload) # db_service.save_data(topic=subscriber.topic, payload=float(subscriber.payload.decode('utf8'))) # subscriber.topic = None # elif subscriber.topic == "/bedroom/corner/humidity": # print("bedroom corner humidity: ", subscriber.payload) # db_service.save_data(topic=subscriber.topic, payload=float(subscriber.payload.decode('utf8'))) # subscriber.topic = None # elif subscriber.topic == "/bedroom/corner/dew_point": # print("bedroom corner dew point: ", subscriber.payload) # db_service.save_data(topic=subscriber.topic, payload=float(subscriber.payload.decode('utf8'))) # subscriber.topic = None old_counter = subscriber.counter sleep(0.25)
def main(): # if uncompressed index not exists if not os.path.exists('./uncompressed_index'): db_service = DBService("msuspider.db") inverted_index = InvertedIndex(db_service.get_texts()) inverted_index.create_index() with open('index.pickle', 'wb') as f: pickle.dump(inverted_index.index, f) id_service = IDService(inverted_index.index) id_service.create_postings_ids() id_service.create_word_ids() converter = IndexToByteConverter(inverted_index.index) converter.build() # if in-memory index doesnt exist if not os.path.exists('./index.pickle'): db_service = DBService("msuspider.db") inverted_index = InvertedIndex(db_service.get_texts()) inverted_index.create_index() with open('index.pickle', 'wb') as f: pickle.dump(inverted_index.index, f) inv_index = inverted_index.index else: with open('index.pickle', 'rb') as f: inv_index = pickle.load(f) # if compressed index deosnt exist if not os.path.exists('./compressed_index'): index_compressor = IndexCompressor(inv_index) index_compressor.build_delta() converter = IndexToByteConverter() print(converter.get_word_postings('ректор')[1]) print(inv_index['ректор'][1])
def main(): log_format = '%(asctime) -15s %(levelname)s:%(message)s' logging.basicConfig(filename='./log/crawler.log', level=logging.DEBUG, format=log_format) parser = build_parser() args = parser.parse_args() db_service = DBService(user=args.user, password=args.password, host=args.host, dbname=args.database) store = Store(args.root_dir) urls_domain = [] for url in args.urls: try: domain = urlparse(url) except ValueError as e: logging.warning(str(e)) continue urls_domain.append(domain.netloc) db_service.add_base(urls_domain) db_service.add_url(args.urls) Spider(db_service, store).spider()
import json import os from book import Book from db_service import DBService TABLE_NAME = os.environ.get('TABLE', "books") db_service = DBService(TABLE_NAME) strategy = { "POST": db_service.create_book, "PUT": db_service.update_book, "GET": db_service.get_book, "DELETE": db_service.delete_book, } def handler(event, _): body = json.loads(event["body"]) print(body) book = create_book(body) resp = strategy.get(event.get("httpMethod"))(book) return {"statusCode": 200, "body": json.dumps(resp)} def create_book(body): author = body["author"] title = body["title"] description = body.get("description") return Book(author=author, title=title, description=description)
def import_blocks(ctx, file): """Import blocks from FILE. Blocks are expected to be in binary format, i.e. as a concatenated list of RLP encoded blocks. Blocks are imported sequentially. If a block can not be imported (e.g. because it is badly encoded, it is in the chain already or its parent is not in the chain) it will be ignored, but the process will continue. Sole exception: If neither the first block nor its parent is known, importing will end right away. Use - to read from stdin. """ app = EthApp(ctx.obj['config']) DBService.register_with_app(app) AccountsService.register_with_app(app) ChainService.register_with_app(app) chain = app.services.chain assert chain.block_queue.empty() data = file.read() app.start() def blocks(): """Generator for blocks encoded in `data`.""" i = 0 while i < len(data): try: block_data, next_i = rlp.codec.consume_item(data, i) except rlp.DecodingError: log.fatal('invalid RLP encoding', byte_index=i) sys.exit(1) # have to abort as we don't know where to continue try: if not isinstance(block_data, list) or len(block_data) != 3: raise rlp.DeserializationError('', block_data) yield eth_protocol.TransientBlock(block_data) except (IndexError, rlp.DeserializationError): log.warning('not a valid block', byte_index=i) # we can still continue yield None i = next_i log.info('importing blocks') # check if it makes sense to go through all blocks first_block = next(blocks()) if first_block is None: log.fatal('first block invalid') sys.exit(1) if not (chain.knows_block(first_block.header.hash) or chain.knows_block(first_block.header.prevhash)): log.fatal('unlinked chains', newest_known_block=chain.chain.head.number, first_unknown_block=first_block.header.number) sys.exit(1) # import all blocks for n, block in enumerate(blocks()): if block is None: log.warning('skipping block', number_in_file=n) continue log.debug('adding block to queue', number_in_file=n, number_in_chain=block.header.number) app.services.chain.add_block(block, None) # None for proto # let block processing finish while not app.services.chain.block_queue.empty(): gevent.sleep() app.stop() log.info('import finished', head_number=app.services.chain.chain.head.number)
return [ "%.2f" % (1 - sum), "%.2f" % bet_1, "%.2f" % bet_2, "%.2f" % coefs[0], "%.2f" % coefs[1] ] return [] def scrap_matches(matches_url, game): matches = list() urls = get_urls(matches_url, root_url) for url in urls: match = scrap_match(url, game) if match: matches.append(match) return matches # Test if __name__ == "__main__": dota_2_matches = scrap_matches(dota_matches_url, DOTA) csgo_matches = scrap_matches(cs_matches_url, CSGO) dota_2_matches.extend(csgo_matches) db_service = DBService() db_service.update_matches(dota_2_matches) print(db_service.get_matches(DOTA)) print(db_service.get_matches(CSGO))
def Main(): Aggregator(graph_api['TOKEN']).update(DBService())
def handler(event, context): validator = EventValidator(event) if not validator.validate_event(): return {"statusCode": 400, "body": "Bad Request"} path = event["request"]["path"].split("/") path.pop(0) method = event["request"]["method"] auth_request = { "request": event["request"], "user-id": event["queryparams"]["user-id"] } if len(path) >= 2: auth_request["project-id"] = event["urlparams"]["project-id"] boto3_client = boto3.client('lambda', region_name='eu-central-1') auth_response = boto3_client.invoke(FunctionName='slrauthorizeuser-fabi', InvocationType='RequestResponse', Payload=json.dumps(auth_request)) auth_response = json.loads(auth_response["Payload"].read().decode()) if auth_response["status"] == 403: return {"statusCode": 403, "body": "Permission denied"} elif auth_response["status"] == 404: return {"statusCode": 404, "body": "User not found"} elif auth_response["status"] == 500: return {"statusCode": 500, "body": "Server Error"} db_service = DBService() url_params = event["urlparams"] query_params = event["queryparams"] if "payload" in event: payload = event["payload"] else: payload = {} result = "Error" if len(path) == 1: if path[0] == "projects": if method == "POST": # Handled in different Lambda pass elif method == "GET": # Handled in different Lambda pass elif path[0] == "find_user": if "username" in query_params and query_params["username"] != "": if "name" in query_params and query_params["name"] != "": result = db_service.find_user(query_params["username"], query_params["name"]) else: result = db_service.find_user( username=query_params["username"]) elif "name" in query_params and query_params["name"] != "": result = db_service.find_user(name=query_params["name"]) elif len(path) == 3: if path[2] == "results": if method == "GET": if "filter" in query_params and query_params["filter"] != "": result = db_service.get_all_results_in_project( url_params["project-id"], query_params["filter"]) else: result = db_service.get_all_results_in_project( url_params["project-id"]) elif method == "POST": result = db_service.add_result_to_project( url_params["project-id"], payload["result-id"]) if path[2] == "collabs": if method == "GET": result = db_service.get_collabs(url_params["project-id"]) elif method == "POST": result = db_service.add_collab_to_project( url_params["project-id"], payload["user-id"]) elif method == "DELETE": result = db_service.remove_collab_from_project( url_params["project-id"], query_params["del-id"]) elif path[2] == "labels": if method == "GET": result = db_service.get_labels_in_project( url_params["project-id"]) elif method == "POST": result = db_service.add_label_to_project( url_params["project-id"], payload) elif method == "DELETE": result = db_service.remove_label_from_project( url_params["project-id"], query_params["label-id"]) elif method == "PUT": result = db_service.update_label_in_project( url_params["project-id"], query_params["label-id"], payload) elif path[2] == "searches": if method == "POST": result = db_service.add_search_to_project( url_params["project-id"], payload["search-id"], payload["add-results"]) elif method == "DELETE": result = db_service.remove_search_from_project( url_params["project-id"], query_params["search-id"]) elif path[2] == "meta": if method == "PUT": result = db_service.change_meta_info(url_params["project-id"], payload) elif len(path) > 3: if len(path) == 4 and path[2] == "results": if method == "DELETE": result = db_service.remove_result_from_project( url_params["project-id"], url_params["result-id"]) elif method == "GET": result = db_service.get_result_in_project( url_params["project-id"], url_params["result-id"]) elif len(path) > 4: if path[4] == "labels": if method == "GET": # Not considered necessary pass elif method == "POST": result = db_service.add_label_to_result( url_params["project-id"], url_params["result-id"], payload["label-id"]) elif method == "DELETE": result = db_service.remove_label_from_result( url_params["project-id"], url_params["result-id"], query_params["label-id"]) elif path[4] == "comments": if method == "GET": result = db_service.get_comments_for_result( url_params["project-id"], url_params["result-id"]) elif method == "POST": result = db_service.add_comment_to_result( url_params["project-id"], url_params["result-id"], query_params["user-id"], payload) elif method == "DELETE": result = db_service.delete_comment_from_result( url_params["project-id"], url_params["result-id"], query_params["comment-id"]) return {"statusCode": 200, "body": json.loads(json_util.dumps(result))}
from celery import Celery from celery.schedules import crontab from urls import * from games import * from db_service import DBService from scrap_data import scrap_matches #celery -A tasks beat app = Celery('tasks', backend='redis://localhost:6379', broker='redis://localhost:6379/0') app.timezone = 'Europe/Moscow' db_service = DBService.inst() @app.on_after_configure.connect def setup_periodic_tasks(sender, **kwargs): sender.add_periodic_task(crontab(minute=5), save_matches.s()) @app.task def save_matches(): matches = list() matches.extend(scrap_matches(dota_matches_url, DOTA)) matches.extend(scrap_matches(cs_matches_url, CSGO)) db_service.update_matches(matches) print(matches)