def get_shop_restaurants_test(self, lat, lng, shopId, session): url = ' https://restapi.ele.me/shopping/restaurant/{}?extras[]=activities&extras[]=flavors&extras[]=albums&extras[]=videos&extras[]=coupon&extras[]=qualification&latitude={}&longitude={}'.format( shopId, lat, lng) headers = copy(self.headers) result = self.sneer(url) headers.update({ 'x-shard': 'loc={},{}'.format(lng, lat), 'x-eleme-requestid': '{}|{}'.format(tool.gen_uuid().upper().replace('-', ''), tool.get_timestamp()), 'random': '{}'.format(result[0]), 'deadpoolcontent': '{}'.format(result[1]), 'deadpool': '{}'.format(result[2]), }) try: resp = session.get(url, headers=headers, proxies=self.proxy, timeout=3) return resp.status_code, resp.json() except requests.exceptions.ConnectTimeout: self.proxy = tool.get_proxy(self.local_proxy) return None except: log.exception('login_by_mobile error.') return None
def get_captcha(self, mobile, lat, lng): url = 'https://restapi.ele.me/eus/v4/captchas?captcha_str={}'.format( mobile) headers = copy(self.headers) result = self.sneer(url) headers.update({ 'x-shard': 'loc={},{}'.format(lng, lat), 'x-eleme-requestid': '{}|{}'.format(tool.gen_uuid().upper().replace('-', ''), tool.get_timestamp()), 'ex_r': '{}'.format(result[0]), 'ex_dc': '{}'.format(result[1]), 'ex_d': '{}'.format(result[2]), }) headers.pop('content-type') try: resp = self.s.get(url, headers=headers, proxies=self.proxy, timeout=3) return resp.json() except requests.exceptions.ConnectTimeout: self.proxy = tool.get_proxy(self.local_proxy) return None except: log.exception('get_captcha error.') return None
def get_restaurants_test(self, lat, lng, city_id, order_by, offset, seesion): url = 'https://restapi.ele.me/shopping/v3/restaurants?extras[]=identification&extras[]=coupon&' \ 'latitude={}&longitude={}&city_id={}&rank_id={}&network_operator=&network=WIFI&order_by={}&' \ 'extra_filters=home&os=Android%2F{}&deivce={}&weather_code=CLEAR_DAY&offset={}&limit=20'.format( lat, lng, city_id, tool.gen_random_af(32), order_by, self.dv_params.get('RELEASE'), parse.quote(self.dv_params.get('MODEL')), offset ) headers = copy(self.headers) result = self.sneer(url) headers.update({ 'x-shard': 'loc={},{}'.format(lng, lat), 'x-eleme-requestid': '{}|{}'.format(tool.gen_uuid().upper().replace('-', ''), tool.get_timestamp()), 'ex_r': '{}'.format(result[0]), 'ex_dc': '{}'.format(result[1]), 'ex_d': '{}'.format(result[2]), }) try: resp = seesion.get(url, headers=headers, proxies=self.proxy, timeout=10) return resp.status_code, resp.json() except requests.exceptions.ConnectTimeout: self.proxy = tool.get_proxy(self.local_proxy) return None except: log.exception('login_by_mobile error.') return None
def do_replace_literals(script, depth): if depth > 10: log.error('depth exceed') return script literals = find_literals(script) if not literals: return script for i, literal in enumerate(literals): try: rep = Literal(literal).replace() except Exception as e: log.exception(e) log.error(f'failed to replace literal: {literal}') return script if literal not in script: continue # insert normal literal script = replace_literal_with_particle(script, literal, rep) # insert numbered literal numbered_literal = '%{' + str(i + 1) + '}' while numbered_literal in script: script = replace_literal_with_particle(script, numbered_literal, rep) log.error(f'script rl: {script}') return do_replace_literals(script, depth + 1)
def get_immutable_data( data_key ): """ Given the hash of the data, go through the list of immutable data handlers and look it up. Return the data on success """ global storage_handlers for handler in storage_handlers: if not hasattr( handler, "get_immutable_handler" ): continue data = None try: data = handler.get_immutable_handler( data_key ) except Exception, e: log.exception( e ) continue if data is None: continue # validate data_hash = get_data_hash( data ) if data_hash != data_key: # nope log.error("Invalid data hash") continue return data
def send(to: str, e_str: str): msg = MIMEMultipart() msg['From'] = _format('Tencent Server <%s>' % email_from_addr) msg['To'] = _format('Exception Receive <%s>' % to) msg['Subject'] = Header('Spider Exception', 'utf-8').encode() e_str = e_str.replace('<', '(') e_str = e_str.replace('>', ')') e_str = e_str.replace('\n', '<br>') e_str = e_str.replace(' ', ' ') content = email_content_template.replace('__exception__', e_str) content = content.replace( '__datetime__', datetime.now(timezone( timedelta(hours=8))).strftime('%Y-%m-%d %H:%M:%S')) msg.attach(MIMEText(content, 'html', 'utf-8')) server = smtplib.SMTP_SSL('smtp.163.com', 994) try: server.set_debuglevel(1) server.login(user=email_from_addr, password=email_password) server.send_message(msg) except BaseException as e: log.exception(e) log.error('[eMail] Send failed') finally: server.quit()
def get_immutable_data(data_key): """ Given the hash of the data, go through the list of immutable data handlers and look it up. Return the data on success """ global storage_handlers for handler in storage_handlers: if not hasattr(handler, "get_immutable_handler"): continue data = None try: data = handler.get_immutable_handler(data_key) except Exception, e: log.exception(e) continue if data is None: continue # validate data_hash = get_data_hash(data) if data_hash != data_key: # nope log.error("Invalid data hash") continue return data
def put_immutable_data(data_text, txid): """ Given a string of data (which can either be data or a route), store it into our immutable data stores. Do so in a best-effort manner--this method only fails if *all* storage providers fail. Return the hash of the data on success Return None on error """ global storage_handlers data_hash = get_data_hash(data_text) successes = 0 for handler in storage_handlers: if not getattr(handler, "put_immutable_handler"): continue rc = False try: rc = handler.put_immutable_handler(data_hash, data_text, txid) except Exception, e: log.exception(e) continue if not rc: log.error("Failed to replicate with '%s'" % handler.__name__) else: successes += 1
def get_shop_restaurants_menu(self, lat, lng, shopId, session): url = ' https://restapi.ele.me/shopping/v2/menu?restaurant_id={}'.format( shopId) headers = copy(self.headers) result = self.sneer(url) headers.update({ 'x-shard': 'loc={},{}'.format(lng, lat), 'x-eleme-requestid': '{}|{}'.format(tool.gen_uuid().upper().replace('-', ''), tool.get_timestamp()), 'random': '{}'.format(result[0]), 'deadpoolcontent': '{}'.format(result[1]), 'deadpool': '{}'.format(result[2]), }) try: resp = session.get(url, headers=headers, proxies=self.proxy, timeout=3) return resp.json() except requests.exceptions.ConnectTimeout: self.proxy = tool.get_proxy(self.local_proxy) return None except: log.exception('login_by_mobile error.') return None
def put_immutable_data( data_text, txid ): """ Given a string of data (which can either be data or a route), store it into our immutable data stores. Do so in a best-effort manner--this method only fails if *all* storage providers fail. Return the hash of the data on success Return None on error """ global storage_handlers data_hash = get_data_hash( data_text ) successes = 0 for handler in storage_handlers: if not getattr(handler, "put_immutable_handler"): continue rc = False try: rc = handler.put_immutable_handler( data_hash, data_text, txid ) except Exception, e: log.exception(e) continue if not rc: log.error("Failed to replicate with '%s'" % handler.__name__) else: successes += 1
def start_transmission(): try: """ create transmission-log """ if not os.path.exists('/usr/log/transmission.log'): call = 'touch /usr/log/transmission.log' process = subprocess.run(call, shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) call = 'chown -R debian-transmission:debian-transmission /usr/log' process = subprocess.run(call, shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) """ update ACL for transmission access """ call = 'chown -R debian-transmission:debian-transmission ' + config[ 'APPLICATION_PATH_TORRENTS'] process = subprocess.run(call, shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) call = 'chown -R debian-transmission:debian-transmission ' + config[ 'APPLICATION_PATH_OTRKEYS'] process = subprocess.run(call, shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) call = 'chown -R debian-transmission:debian-transmission ' + config[ 'APPLICATION_PATH_VIDEOS'] process = subprocess.run(call, shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) """ restart transmission service """ call = 'service transmission-daemon start' log.debug(call) process = subprocess.run(call, shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) time.sleep(5) log.info('init transmission-deamon finished. Returns {!s}'.format( process.stdout.decode(encoding='utf-8'))) return True except subprocess.CalledProcessError as e: log.error('init transmission-deamon failed with cmd:{!s} because {!s}'. format(e.cmd, e.stderr)) return False except: log.exception('start transmission failed... ') return False
def getting_data(aids: List[int]) -> MutableMapping[str, str]: log.info('Total aids: %s' % aids.__len__()) file_map: MutableMapping[str, str] = {} for i, aid in enumerate(aids): resAllCids: HTTPResponse = selfusepy.get( 'https://www.bilibili.com/widget/getPageList?aid=' + str(aid)) try: data: str = str(resAllCids.data, encoding='utf-8') danmakuCids: List[AvDanmakuCid] = selfusepy.parse_json_array( data, AvDanmakuCid()) log.info('[Start] i: %s, aid: %s, cids length: %s' % (i, aid, danmakuCids.__len__())) go_on, times = is_req_danmaku(aid, danmakuCids.__len__()) log.info('len: %s, req times:%s. go_on: %s' % (danmakuCids.__len__(), times, go_on)) if not go_on: # 如果有大量的cid, 就只进行有限获取 log.info('[Continue] do not get cids. aid: %s' % aid) continue json_file_name = '%s/danmaku/%s.json' % (config.date, aid) json_file_path = 'data-temp/%s' % json_file_name file_map[json_file_name] = json_file_path _file.save(data, json_file_path) time.sleep(2) for j, cidE in enumerate(danmakuCids): log.info('[Request] j: %s, cid: %s' % (j, cidE.cid)) res: HTTPResponse = selfusepy.get( 'https://api.bilibili.com/x/v1/dm/list.so?oid=' + str(cidE.cid), head=chromeUserAgent) xlm_file_name = '%s/danmaku/%s-%s-%s.xml' % ( config.date, aid, cidE.cid, time.time_ns()) xlm_file_path = 'data-temp/%s' % xlm_file_name file_map[xlm_file_name] = xlm_file_path _file.save( str(res.data, encoding='utf-8').replace('\\n', ''), xlm_file_path) time.sleep(2) log.info('[Done] i: %s, aid: %s' % (i, aid)) except BaseException as e: log.exception(e) log.error(resAllCids.data) log.error(aid) import traceback, _email from config import email_to_addr _email.send(email_to_addr, traceback.format_exc() + '\naid: %d' % aid) time.sleep(10) return file_map
def determine_particle(word: str, particle: str) -> str: if not particle.startswith('('): return word + particle try: ret = tossi.postfix(word, particle) except ValueError as e: log.exception(e) return word + particle log.debug(f'tossi: {word} / {particle} -> {ret}') return ret
def get_value(hash: str) -> (int, str): sql: str = 'select value from crc32 where hash = %s' % int(hash, 16) conn = engine.connect() try: res: ResultProxy = conn.execute(sql) return res.rowcount, ','.join('%s' % item[0] for item in res.fetchall()) except BaseException as e: log.error('sql: %s' % sql) log.exception(e) finally: conn.close()
def __sql_select_id(self, sql): """ 内部私有函数,具体查询sql获取内部数据库id :param sql: str: sql语句 :return: list """ try: self.cursor.execute(sql) values = self.cursor.fetchall() return values except: log.exception('sql_select error.sql:{}'.format(sql)) return []
def __sql_select_all_by_id(self, id_): """ 内部私有函数,根据数据库ID获取该组数据具体值 :param id_: str/int: 内部数据库ID :return: list """ sql = f'select strKey, strValue from {self.tbl_name} where Id = {id_[0]}' try: self.cursor.execute(sql) values = self.cursor.fetchall() return values except: log.exception(f'sql_select_all_by_id error.id_:{id_}') return []
def mergemessage(self, message): """ parse OueueMessage in Model vars """ if isinstance(message, QueueMessage): """ merge queue message vars """ for key, value in vars(message).items(): setattr(self, key, value) """ parse message content """ try: content = literal_eval(message.content) for key, value in content.items(): setattr(self, key, value) except: log.exception('cant parse message {} into attributes.'.format(message.content)) pass
def sneer(self, param_url): uri = parse.urlparse(param_url).path if parse.urlparse(param_url).query: uri += '?' + parse.urlparse(param_url).query url = 'http://127.0.0.1:60002/sneer' while True: try: resp = self.local_60002.post(url, data=uri) result = resp.text result = result.split(';') if len(result) == 3: return result log.info('sneer error, continue.{}'.format(result)) except: log.exception('127.0.0.1 requests error.') time.sleep(2) continue
def put_mutable_data(data, privatekey): """ Given the unserialized data, store it into our mutable data stores. Do so in a best-effor way. This method only fails if all storage providers fail. If the data is not signed, then it will be signed with the given private key. Return True on success Return False on error """ data_id = data['id'] data_text = data['data'] ver = data['ver'] sig = data.get('sig', None) if sig is None: sig = sign_mutable_data(data, privatekey) data['sig'] = sig data_json = json_stable_serialize(data) successes = 0 for handler in storage_handlers: if not hasattr(handler, "put_mutable_handler"): continue rc = False try: rc = handler.put_mutable_handler(data_id, ver, sig, data_json) except Exception, e: log.exception(e) continue if not rc: log.error("Failed to replicate with '%s'" % handler.__name__) else: successes += 1
def put_mutable_data( data, privatekey ): """ Given the unserialized data, store it into our mutable data stores. Do so in a best-effor way. This method only fails if all storage providers fail. If the data is not signed, then it will be signed with the given private key. Return True on success Return False on error """ data_id = data['id'] data_text = data['data'] ver = data['ver'] sig = data.get('sig', None) if sig is None: sig = sign_mutable_data( data, privatekey ) data['sig'] = sig data_json = json_stable_serialize( data ) successes = 0 for handler in storage_handlers: if not hasattr( handler, "put_mutable_handler" ): continue rc = False try: rc = handler.put_mutable_handler( data_id, ver, sig, data_json ) except Exception, e: log.exception( e ) continue if not rc: log.error("Failed to replicate with '%s'" % handler.__name__) else: successes += 1
def part_upload(filepath: str, key: str) -> bool: res: dict = s3_client.create_multipart_upload(Bucket=bucket, Key=key) upload_id = res['UploadId'] log.info(upload_id) pool = Pool(processes=10) res: Set[ApplyResult] = set() parts: dict = {'Parts': list()} try: with open(filepath, 'r+b') as f: i: int = 1 while True: data = f.read(50 * 1024 * 1024) # 50mb each part if data == b'': break res.add( pool.apply_async(func=upload, args=( data, upload_id, key, i, ))) i += 1 pool.close() pool.join() for item in res: v = item.get() parts.get('Parts').append(v) s3_client.complete_multipart_upload(Bucket=bucket, Key=key, UploadId=upload_id, MultipartUpload=parts) return True except Exception as e: log.exception(e) log.warn("Abort %s" % abort_part_upload(key, upload_id)) return False
def delete_immutable_data( data_hash, txid ): """ Given the hash of the data, the private key of the user, and the txid that deleted the data's hash from the blockchain, delete the data from all immutable data stores. """ global storage_handlers for handler in storage_handlers: if not hasattr( handler, "delete_immutable_handler" ): continue try: handler.delete_immutable_handler( data_hash, txid ) except Exception, e: log.exception( e ) continue
def delete_immutable_data(data_hash, txid): """ Given the hash of the data, the private key of the user, and the txid that deleted the data's hash from the blockchain, delete the data from all immutable data stores. """ global storage_handlers for handler in storage_handlers: if not hasattr(handler, "delete_immutable_handler"): continue try: handler.delete_immutable_handler(data_hash, txid) except Exception, e: log.exception(e) continue
def drop_added_entries(conn): while conn.added_entries: try: e = conn.added_entries.pop() log.info("removing entries %r" % conn.added_backends) conn.delete_s(e) except ldap.NOT_ALLOWED_ON_NONLEAF: log.error("Entry is not a leaf: %r" % e) except ldap.NO_SUCH_OBJECT: log.error("Cannot remove entry: %r" % e) log.info("removing backends %r" % conn.added_backends) for suffix in conn.added_backends: try: drop_backend(conn, suffix) except: log.exception("error removing %r" % suffix) for r in conn.added_replicas: try: drop_backend(conn, suffix=None, bename=r) except: log.exception("error removing %r" % r)
def mobile_send_code_captcha(self, mobile, lat, lng, captcha_hash, captcha_value): url = 'https://restapi.ele.me/eus/login/mobile_send_code' headers = copy(self.headers) result = self.sneer(url) headers.update({ 'x-shard': 'loc={},{}'.format(lng, lat), 'x-eleme-requestid': '{}|{}'.format(tool.gen_uuid().upper().replace('-', ''), tool.get_timestamp()), 'ex_r': '{}'.format(result[0]), 'ex_dc': '{}'.format(result[1]), 'ex_d': '{}'.format(result[2]), }) data = '{"mobile":"%s","captcha_hash":"%s","captcha_value":"%s","latitude":%s,"longitude":%s,"via_audio":false}' % ( str(mobile), str(captcha_hash), str(captcha_value), str(lat), str(lng), ) try: resp = self.s.post(url, headers=headers, data=data, proxies=self.proxy, timeout=3) return resp.json() except requests.exceptions.ConnectTimeout: self.proxy = tool.get_proxy(self.local_proxy) return None except: log.exception('mobile_send_code_captcha error.') return None
def get_mutable_data_route(data_id, route_hash): """ Given a data ID, go fetch its route. Verify that it matches the given hash Return a dict with the route information on success. Return None on error. """ global storage_handlers for handler in storage_handlers: if not hasattr(handler, "get_immutable_handler"): continue route_json = None route = None try: route_json = handler.get_immutable_handler(route_hash) except Exception, e: log.exception(e) continue if route_json is None: continue if get_data_hash(route_json) != route_hash: log.error("Invalid route: hash mismatch") continue # it had better be a JSON doc we can use try: route = mutable_data_route_parse(route_json) except Exception, e: log.exception(e) continue
def get_mutable_data_route( data_id, route_hash ): """ Given a data ID, go fetch its route. Verify that it matches the given hash Return a dict with the route information on success. Return None on error. """ global storage_handlers for handler in storage_handlers: if not hasattr( handler, "get_immutable_handler" ): continue route_json = None route = None try: route_json = handler.get_immutable_handler( route_hash ) except Exception, e: log.exception(e) continue if route_json is None: continue if get_data_hash( route_json ) != route_hash: log.error("Invalid route: hash mismatch") continue # it had better be a JSON doc we can use try: route = mutable_data_route_parse( route_json ) except Exception, e: log.exception(e) continue
def make_mutable_urls(data_id): """ Given a data ID for mutable data, get a list of URLs to it by asking the storage handlers. """ global storage_handlers urls = [] for handler in storage_handlers: if not hasattr(handler, "make_mutable_url"): continue new_url = None try: new_url = handler.make_mutable_url(data_id) except Exception, e: log.exception(e) continue urls.append(new_url)
def make_mutable_urls( data_id ): """ Given a data ID for mutable data, get a list of URLs to it by asking the storage handlers. """ global storage_handlers urls = [] for handler in storage_handlers: if not hasattr(handler, "make_mutable_url"): continue new_url = None try: new_url = handler.make_mutable_url( data_id ) except Exception, e: log.exception(e) continue urls.append( new_url )
def get_restaurants_all_id(self, lat, lng, city_id, order_by, categorys, offset, seesion): url = 'https://restapi.ele.me/shopping/v3/restaurants?extras[]=coupon&latitude={}&longitude={}&city_id={}&rank_id=&business_flag=0&order_by={}&{}offset={}&limit=20'.format( lat, lng, city_id, order_by, categorys, offset) Flag = self.r.has_url(url) if not Flag: log.info('重复的url') return None log.info(url) headers = copy(self.headers) result = self.sneer(url) headers.update({ 'x-shard': 'loc={},{}'.format(lng, lat), 'x-eleme-requestid': '{}|{}'.format(tool.gen_uuid().upper().replace('-', ''), tool.get_timestamp()), 'ex_r': '{}'.format(result[0]), 'ex_dc': '{}'.format(result[1]), 'ex_d': '{}'.format(result[2]), }) try: resp = seesion.get(url, headers=headers, proxies=self.proxy, timeout=10) return resp.json() except requests.exceptions.ConnectTimeout: self.proxy = tool.get_proxy(self.local_proxy) return None except: log.exception('login_by_mobile error.') return None
def get_restaurants(self, lat, lng, city_id, order_by, offset): url = 'https://restapi.ele.me/shopping/v3/restaurants?extras[]=identification&extras[]=coupon&' \ 'latitude={}&longitude={}&city_id={}&rank_id={}&network_operator=&network=WIFI&order_by={}&' \ 'extra_filters=home&os=Android%2F{}&deivce={}&weather_code=CLEAR_DAY&offset={}&limit=20'.format( lat, lng, city_id, tool.gen_random_af(32), order_by, self.dv_params.get('RELEASE'), parse.quote(self.dv_params.get('MODEL')), offset ) url = 'https://restapi.ele.me/shopping/v3/restaurants?extras[]=coupon&latitude=31.243828888982534&longitude=121.48247290402651&city_id=1&rank_id=&business_flag=0&restaurant_category_ids%5B%5D=1&offset=0&limit=20' url = 'https://restapi.ele.me/shopping/v3/restaurants?extras[]=coupon&latitude=30.725792&longitude=121.268812&city_id=1&rank_id=&business_flag=0&order_by=5&restaurant_category_ids%5B%5D=4&offset=0&limit=20' headers = copy(self.headers) result = self.sneer(url) headers.update({ 'x-shard': 'loc={},{}'.format(lng, lat), 'x-eleme-requestid': '{}|{}'.format(tool.gen_uuid().upper().replace('-', ''), tool.get_timestamp()), 'ex_r': '{}'.format(result[0]), 'ex_dc': '{}'.format(result[1]), 'ex_d': '{}'.format(result[2]), }) try: resp = self.s.get(url, headers=headers, proxies=self.proxy, timeout=3) return resp.json() except requests.exceptions.ConnectTimeout: self.proxy = tool.get_proxy(self.local_proxy) return None except: log.exception('login_by_mobile error.') return None
def login_by_mobile(self, token, sms_code, lat, lng): url = 'https://restapi.ele.me/eus/login/login_by_mobile' headers = copy(self.headers) result = self.sneer(url) headers.update({ 'x-shard': 'loc={},{}'.format(lng, lat), 'x-eleme-requestid': '{}|{}'.format(tool.gen_uuid().upper().replace('-', ''), tool.get_timestamp()), 'ex_r': '{}'.format(result[0]), 'ex_dc': '{}'.format(result[1]), 'ex_d': '{}'.format(result[2]), }) data = '{"validate_token":"%s","validate_code":"%s","latitude":%s,"longitude":%s}' % ( str(token), str(sms_code), str(lat), str(lng), ) try: resp = self.s.post(url, headers=headers, data=data, proxies=self.proxy, timeout=3) return self.s.cookies except requests.exceptions.ConnectTimeout: self.proxy = tool.get_proxy(self.local_proxy) return None except: log.exception('login_by_mobile error.') return None
def get_shop_mini(self, shopId): url = 'https://restapi.ele.me/booking/v2/cart_client' data = { "add_on_type": 0, "come_from": "mini_app", "extra_action": [], "geohash": "", "operating_entities": [], "operating_packages": [], "operating_tying_entities": [], "restaurant_id": shopId, "scene": 0, "tying_supervip": 0, "user_id": 0 } try: resp = requests.post(url, data=data) return resp.json() except requests.exceptions.ConnectTimeout: self.proxy = tool.get_proxy(self.local_proxy) return None except: log.exception('login_by_mobile error.') return None
def delete_mutable_data( data_id, privatekey ): """ Given the data ID, route hash, user private key, and transaction ID of the blockchain transaction that deleted the data route, go delete both the data route and the mutable data it points to. """ global storage_handlers sigb64 = sign_raw_data( data_id, privatekey ) # remove data for handler in storage_handlers: if not hasattr( handler, "delete_mutable_handler" ): continue try: handler.delete_mutable_handler( data_id, sigb64 ) except Exception, e: log.exception( e ) continue
def delete_mutable_data(data_id, privatekey): """ Given the data ID, route hash, user private key, and transaction ID of the blockchain transaction that deleted the data route, go delete both the data route and the mutable data it points to. """ global storage_handlers sigb64 = sign_raw_data(data_id, privatekey) # remove data for handler in storage_handlers: if not hasattr(handler, "delete_mutable_handler"): continue try: handler.delete_mutable_handler(data_id, sigb64) except Exception, e: log.exception(e) continue
def mobile_login(): while True: mobile, mobile_token = wugui.get_phone() if not mobile: log.exception('token requests error.') continue if not mobile_token: log.exception('token requests error.') continue resp = ele.mobile_send_code(mobile, lat, lng) print(resp) if resp.get('message'): time.sleep(2) # 号码被封,拉黑 wugui.lahei_Mobile(mobile, mobile_token) continue elif resp.get('validate_token'): return resp, mobile, mobile_token else: log.exception('token requests error.') time.sleep(3) continue
conn = engine.connect() try: res: ResultProxy = conn.execute(sql) return res.rowcount, ','.join('%s' % item[0] for item in res.fetchall()) except BaseException as e: log.error('sql: %s' % sql) log.exception(e) finally: conn.close() if __name__ == '__main__': hashes: List[crc32DO] = [] conn = engine.connect() for i in range(3_1908_0001, 5_0000_0000): hashes.append(crc32DO(binascii.crc32(str(i).encode("utf-8")), i)) if i % 6_0000 == 0: log.info('i: %s' % i) sql: str = "insert into crc32(hash, value) values %s" % (', '.join('%s' % item.__str__() for item in hashes)) hashes.clear() try: conn.execute(sql) except BaseException as e: log.exception(e) exit(0) log.info('Done')
def get_mutable_data( data_route, ver_min=None, ver_max=None, ver_check=None ): """ Given a data's route, go fetch the data. Optionally verify that the version ('ver') in the data returned is within [ver_min, ver_max], or no less than ver_min, or no greater than ver_max. Optionally evaluate version with ver_check, which takes the data structure and returns true if the version is valid. Return a mutable data dict on success Return None on error """ global storage_handlers data_id = data_route['id'] data_urls = data_route['urls'] data_pubkey = data_route.get('pubkey', None) for storage_handler in storage_handlers: if not hasattr(storage_handler, "get_mutable_handler"): continue for url in data_urls: data_json = None data = None try: data_json = storage_handler.get_mutable_handler( url ) except UnhandledURLException, uue: # handler doesn't handle this URL continue except Exception, e: log.exception( e ) continue if data_json is None: # no data continue # parse it, but don't decode it yet data = mutable_data_parse( data_json, decode=False ) if data is None: log.error("Unparseable data") continue # if the route includes a private key, verify it if data_pubkey is not None: rc = verify_mutable_data( data, data_pubkey ) if not rc: log.error("Invalid signature") continue # can decode the data now, since we've checked the sig mutable_data_decode( data ) # verify ver, if need be if ver_min is not None: if data['ver'] < ver_min: continue if ver_max is not None: if data['ver'] > ver_max: continue if ver_check is not None: rc = ver_check( data ) if not rc: continue return data