def getAudit(self): try: status = self.session.query(IsAudit).filter(IsAudit.id == 1).all() status_maJia = self.session.query(IsAuditPro).filter(IsAuditPro.id == 1).all() moments_control = self.session.query(MomentsControl).filter(MomentsControl.id == 1).all() moments_control_android = self.session.query(MomentsControlAndroid).filter(MomentsControlAndroid.id == 1).all() statusResult = Serializer(status, many=True).data statusPro = Serializer(status_maJia, many=True).data s_mc = Serializer(moments_control, many=True).data s_mca = Serializer(moments_control_android, many=True).data # iOS分享开关 iOSShareSwitch = s_mc[0]['control_status'] # 安卓分享开关 androidShareSwitch = s_mca[0]['control_status'] # iOS审核开关 iOSReviewSwitch = statusResult[0]['is_audit'] # iOS马甲包审核开关 iOSMaJiaReviewSwitch = statusPro[0]['is_audit'] allStatus = { 'isAudit':iOSReviewSwitch, 'iOSMaJiaAudit':iOSMaJiaReviewSwitch, 'iOSShare':iOSShareSwitch, 'androidShare':androidShareSwitch } # print allStatus except: self.session.rollback() self.session.close() return allStatus
def fit_implementation(project_name, version, registry_add): path = Serializer().get_folder_name(project_name) json = Serializer().read_model(path, version) if not 'error' in json: estimator, model, version = get_estimator(path, json, version) messages = [] try: producer = estimator.fit(model) if inspect.isgenerator(producer): for message in producer: messages.append(message) else: messages = producer Serializer().save_changes(path, version, model, add=False) Serializer().save_fit_logs(path, version, messages) if model.get_dashboard(): Serializer().set_dashboard(path, model.get_dashboard()) except: return {'exception': traceback.format_exc()} if model.registered_instance is not None: registry_add(project_name, model, model.registered_instance) return messages if messages is not None else {} return {'error': True}
def expectRepeat(self, tokens, level=0, debug=False): yield Serializer(True, [], None) # if we get to this point we need at least one instance of this pattern. index = tokens.getIndex() exhausted = tokens.isExhausted() for value in self.children[0].expect(tokens, level + 1, debug): if value and (not tokens.isExhausted()): newIndex = tokens.getIndex() newExhaust = tokens.isExhausted() tValue = value.transform(lambda x: [x]) # create a new instance of the repeat pattern # since the first thing this does is yield True, # we don't do it here. for newValue in self.expectRepeat(tokens, level, debug): if newValue: tNewValue = newValue.transform( lambda x: tValue.getArgs() + x) yield tNewValue tokens.setIndex(newIndex, newExhaust) elif value and tokens.isExhausted(): tValue = value.transform(lambda x: [x]) yield tValue tokens.setIndex(index, exhausted) err = "Unknown Error in expectRepeat" if not value: err = value.getError() else: if not newValue: err = newValue.getError() yield Serializer(False, None, err)
def on_get(self, request, response): if not check_token(request): response.media = {'error': True, 'message': 'Not authorized'} return data = json.loads(request.stream.read().decode()) result = Serializer().read_project(Serializer().get_folder_name( data['project'] if 'project' in data else '' )) response.media = {"predictions_model": result['predictions_model']} \ if 'predictions_model' in result else result
def prepare_different_items_list(self, rest_client, headers): serializer = Serializer() url = self.config["global"]["xml_url"] xml = rest_client.send_get_binary(url, None, None) read_item_list = list() if xml[1] != 200: logging.info("No file found under the URI: %s, exiting now..." % str(url)) exit(-1) root_item = serializer.deserialize_xml(str(xml[0], "utf8")) commodities = root_item magento_product_list = list(self.get_product_list(headers)[0]["items"]) magento_product_dict = dict() for magento_product in magento_product_list: magento_product_dict[magento_product["sku"]] = magento_product for commodity in commodities: if commodity[1].text: if commodity[1].text in magento_product_dict: magento_product = magento_product_dict[commodity[1].text] result_magento_product = self.link_products( magento_product, commodity) if result_magento_product: read_item_list.append(result_magento_product) else: logging.warning( "There is no product of sku {0} in Magento database.". format(commodity[1].text)) return read_item_list
def __init__(self, public_key, private_key): # save inputs self.private_key = private_key self.public_key = public_key self.public_key_string = public_key.exportKey().decode("utf-8") # build wallet self.wallet = Wallet(self.public_key_string) # register miner pseudonym = resolve_pseudonym(self.public_key_string) if pseudonym == "": pseudonym = input("\nChoose a pseudonym: ") else: print( "\nThis key has already been registered.\nRegistered pseudonym: " + pseudonym) self.miner = Miner(pseudonym, self.public_key_string) self.s = Serializer() # build menu self.menu = "\nchoose an number:" self.menu += "\n1.) mine" self.menu += "\n2.) check balance" self.menu += "\n3.) make transaction" self.menu += "\nchoice: "
def do_premine(self, args): print("\033[0;37;40m") "start auto mining process" print("Starting mining") addresses =[wallet.gen_address(wallet.wif_to_privkey(self.chain.miner_wif)),\ wallet.gen_address(wallet.wif_to_privkey(self.chain.miner_wif)), \ wallet.gen_address(wallet.wif_to_privkey(self.chain.miner_wif))] if args == "": N = 5 else: N = int(args) i = 0 while i < N: # sending random transactions between self addresses if i != 0: self.chain.utxo_pool.update_pool([]) tx = form_tx.form_transaction(self.chain.address, addresses[i % 3], 70 + i * i, self.chain.utxo_pool, self.chain.miner_wif) self.chain.submit_tx(self.server_port + "/transaction/new", Serializer().serialize(tx)) i += 1 self.chain.mine() print("Stopping mining")
def on_get(self, request, response, token): if not check_token(request): if not check_user_token(request): response.content_type = falcon.MEDIA_HTML response.status = falcon.HTTP_200 with open('login.html', 'r') as htmlfile: response.data = htmlfile.read().encode() return project = Serializer().read_project( os.path.join('saved_models', token) ) if not 'dashboard' in project: return dashboard = project['dashboard'] response.content_type = falcon.MEDIA_HTML response.status = falcon.HTTP_200 if not dashboard: return if len(dashboard) == 2: response.data = ( visdown_start.replace( "<head></head>", "<head><style>" + dashboard[1] + "</style></head>" ) + dashboard[0] + visdown_end ).encode() elif is_svg(dashboard): # we have an image response.data = ( wrap_image(dashboard) if dashboard else '' ).encode() else: response.data = dashboard.encode()
def updateUTXO(): db = TinyDB('db/blk.json') lstBlk = (db.all()[-1:])[0]['Transactions'] dlt = [] apd = [] for i in lstBlk: tmp = Param(i) obj = Serializer(tmp) seri = obj.make() hash = (sha256(sha256( binascii.unhexlify(seri)).digest()).digest()[::-1]).hex() for y in i['tx_in']: if y['Previous txid'] != "0000000000000000000000000000000000000000000000000000000000000000": dlt.append({ 'txid': y['Previous txid'], 'index': y['Previous Tx Index'] }) g = 0 for z in i['tx_out']: apd.append({ 'value': z['value'], 'txid': hash, 'index': g, 'Public_Script': z['Public Script'] }) g = g + 1 deleteUTXO(dlt) appendUTXO(apd)
def expectConcat(self, tokens, startChild, level=0, debug=False): index = tokens.getIndex() exhausted = tokens.isExhausted() child = self.children[startChild] for value in child.expect(tokens, level, debug): if value: tValue = value.transform(lambda x: [x]) if startChild + 1 == len(self): yield tValue else: for childValue in self.expectConcat( tokens, startChild + 1, level, debug): if childValue: tChildValue = childValue.transform( lambda x: tValue.getArgs() + x) yield tChildValue tokens.setIndex(index, exhausted) tokens.setIndex(index, exhausted) err = "Unknown Error in expectConcat" if not value: err = value.getError() else: if not childValue: err = childValue.getError() yield Serializer(False, None, err)
def mine(self): if self.height() == 0: return self.genesis_block() txs = pending_pool.get_first3() fee = 0 if len(txs) > 0: fee = (len(txs)) * tx_fee print(f'fee = {fee}') coinbase_tx = form_coinbase(self.address, self.miner_wif, self.get_current_reward() + fee) serialized_cb = Serializer().serialize(coinbase_tx) txs.insert(0, serialized_cb) b = Block(time.time(), self.prev_hash(), txs, 0, self.bits).mine() b.height = self.height() print("Congratulations! Block " + b.toJSON() + " was mined!") self.db.insert({ 'Block Size': 0xffffffff, 'Version': 1, 'Previous Block Hash': b.prev_hash, 'Merkle Root': b.merkle, 'Timestamp': int(b.timestamp), 'Difficulty Target': hex(b.bits), 'Nonce': b.nonce, 'Transaction Counter': len(b.txs), 'Transactions': b.txs }) if self.height() % 5 == 0: self.recalculate_bits() self.utxo_pool.update_pool(txs)
def start_workflow(shared_state, start_date, review_number=0): db_connection = setup_db().connect() logger = Logger(db_connection) shared_state.job_id = None shared_state.completed = False max_downloads = environ.get('MAX_DOWNLOADS') if max_downloads is not None: max_downloads = int(max_downloads) max_upload_workers = int(environ.get('MAX_UPLOADERS', 20)) try: workflow = Workflow( db_connection, logger, start_date, max_downloads, max_upload_workers, environ.get('ALLOW_REPEAT', 'FALSE') == 'TRUE' ) workflow.start(shared_state) except Exception: logger.exception() if shared_state.job_id is not None: job_serializer = Serializer(db_connection, job) job_serializer.put(shared_state.job_id, { 'status': JobStatus.FAILED, })
def do_send(self, arg): 'Send to <% Recipient Address%> some <% Amount%>, takes 2 arguments.Use address file and pre generated (or import) private key.Amount precise is from 1 to 50000' try: f = open('address', 'r') #get address except FileNotFoundError: print("Address file cant be found.") return False sender = f.read() f.close() args = arg.split(' ') if len(args) < 2 or self.privkey == None or int(args[1]) < 1\ or int(args[1]) > 50000: print("Invalid action.Use help for information") return False trans = Transaction(sender, args[0], args[1]) hashed = trans.transactionhash() #hash transaction sig, verkey = wallet.signmessage(hashed, self.privkey) if (tx_validator(trans, verkey, sig, hashed, self.pubkey)\ == False): print("Invalid transaction") return False self.serialize = Serializer(trans.sender, trans.recipient,\ trans.amount, wallet.getnewpubkey(self.privkey, 0), sig) print(self.serialize.serialize)
def blockReturnHash(): hash = request.form['data'] db = TinyDB('db/blk.json') lst = db.all() response = app.response_class(response=json.dumps({"status": "not find"}), status=404, mimetype='application/json') for i in lst: data = sha256( ((i["Block Header"]['Version']).to_bytes(4, "little") + binascii.unhexlify(i["Block Header"]['Previous Block Hash']) + binascii.unhexlify(i["Block Header"]['Merkle Root']) + (i["Block Header"]['Timestamp']).to_bytes(4, "little") + (binascii.unhexlify( i["Block Header"]["Difficulty Target"])[::-1]) + (i["Block Header"]['Nonce']).to_bytes(4, "little"))).hexdigest() if data == hash: tranHashLst = [] for y in i['Transactions']: tmp = Param(y) obj = Serializer(tmp) data = sha256(binascii.unhexlify(obj.make())).hexdigest() tranHashLst.append(data) i["Transaction Data"] = tranHashLst response = app.response_class(response=json.dumps(i), status=200, mimetype='application/json') break return response
def do_mine(self, arg): "Start mining process." while (1): f = open('mempool', 'r') #get trans from file nf = f.readlines() if (len(nf) < 3): print("Stopped.Required at least 3 valid transaction") return False trans = nf[-3:] f.close() f = open('minerkey', 'r') #get miners address minerkey = f.read() f.close() minerkey = wiftopriv(minerkey) miner = CoinbaseTransaction() pubkey = getnewpubkey(minerkey, 1) miner.recipient = (getnewaddress(pubkey)).decode() #form transaction hashed = miner.transactionhash() sig, verkey = signmessage(hashed, minerkey) if (tx_validator(miner, verkey, sig, hashed, pubkey)\ == False): print("Invalid transaction") return False serialize = Serializer(miner.sender, miner.recipient,\ miner.amount, getnewpubkey(minerkey, 0), sig) trans.append(serialize.serialize + '\n') newb = Block(self.lasthash, trans) if newb.validatesall() == True and self.bl.mine(newb) != False: #append to chain block = { 'timestamp': newb.timestamp, 'nonce': newb.nonce, 'previous_hash': self.lasthash, 'transactions': newb.transactions, 'merkle_root': newb.merkle_root, 'hash': newb.blockhash() } url_tn = "http://127.0.0.1:5000/mine" try: r = requests.post(url = url_tn,\ json=block) if r.ok: print("Block is added to blockchain!") except: print("Error.Check if the server is running") return False f = open('mempool', 'w') #delete last trans from file f.write(''.join(nf[:-3])) f.close() self.lasthash = newb.blockhash() else: return False
def implementation_predict(project_name, values, version, registry_add): path = Serializer().get_folder_name(project_name) json = Serializer().read_model(path, version) if not 'error' in json: estimator, model, version = get_estimator(path, json, version) try: result = estimator.predict(model, values) Serializer().save_changes(path, version, model, add=True) if model.get_dashboard(): Serializer().set_dashboard(path, model.get_dashboard()) except: return {'exception': traceback.format_exc()} if model.registered_instance is not None: registry_add(project_name, model, model.registered_instance) return result return {'error': True}
def __init__(self, problem, config): self.abs_path = os.path.dirname(os.path.abspath(__file__)) self.serializer = Serializer() self.config_file = config self.config = self.serializer.read_config(self.abs_path + "/" + config) self.ik_solution_generator = IKSolutionGenerator() self.clear_stats(problem) self.run(problem)
def on_post(self, request, response): if not check_token(request): response.media = {'error': True, 'message': 'Not authorized'} return body = json.loads(request.get_param('data')) data = request.get_param('dataset') response.media = Serializer().save_model( body, data, body['rewrite'] )
def tx_new(): serialized_tx = request.data.decode()[1:-1] deserial = Deserializer().deserialize(serialized_tx) if Serializer().serialize(deserial) != serialized_tx: print("Cant append invalid transactoiin to pending pool") pending_pool.get_tx(serialized_tx) print("Serialized transaction: " + str(serialized_tx) + " was added to the pending pool.") return ""
def get_estimator(path, json, version=None): sandbox = dict() for short_name, module in json['model']['imports']: sandbox[short_name] = importlib.import_module(module) exec(json['implementation'], sandbox) version = version if version is not None else json['version'] if version is None: version = Serializer().read_project(path)['predictions_model'] model = rv.Model(json['model']['name']) model.server_path = os.path.join(path, 'model_' + str(version) + '.data') model.settings = json['model']['settings'] data_path = Serializer().actual_dataset_file(path, version) if data_path: model.set_dataset(pd.read_pickle(data_path, compression='bz2')) estimator = (sandbox[json['class_name']])() if callable(estimator): estimator = estimator() return estimator, model, version
def __init__(self, recipient, amount=50): super().__init__('0'*35, recipient, amount) try: f = open('minerkey', 'r') pk = f.readline().rstrip('\n') pk_hex = w.decode_hex(pk) self.singin(pk_hex) self.ser = Serializer(self).ser except IOError: print("Error: can\'t find file or read data")
def __init__(self, pseudonym, public_key): self.public_key = public_key self.serializer = Serializer() data = { "pseudonym": pseudonym, "public_key": public_key, } r = requests.post("http://localhost:5000/register_miner", data=data) if r.text != "": raise ValueError(r.text)
async def longPoll(self, server_id, ban): s = Serializer() respdata = [False] for i in range(120): check = await s.longpollGet(server_id, ban) if check != False: respdata = check return JSONResponse(respdata) await asyncio.sleep(3) return JSONResponse(respdata)
def run_downloader(db_connection, logger): manager = Manager() shared_state = manager.Namespace() shared_state.default_date = get_default_date() shared_state.past_date = None job_serializer = Serializer(db_connection, job) # TODO: Check if today's job is already running. # If so, just go with missed jobs. logger.info('Starting the main workflow') p = start_main_job(shared_state) if environ.get('JUST_MAIN', False): p.join() return end_time = datetime.now()\ .replace(hour=23, minute=30, second=0, microsecond=0) while True: sleep(5) p.join(timeout=0) if not p.is_alive(): job_id = shared_state.job_id completed = shared_state.completed if job_id is not None and not completed: job_serializer.put(job_id, { 'status': JobStatus.FAILED, }) if p.exitcode != 0: logger.error('Job exited unexpectedly', f'Exit code: {p.exitcode}\nJob id: {job_id}') if datetime.now() >= end_time: break p = start_past_job(shared_state) if p is None: break elif datetime.now() >= end_time: job_id = shared_state.job_id completed = shared_state.completed if job_id is not None and not completed: # Time to end. p.terminate() job_serializer.put(job_id, { 'status': JobStatus.FAILED, }) logger.info('All jobs finished.')
def cmd_looper(self, input_handler, current_cmd, output): current_cmd.cmdloop(intro=output) user_command = current_cmd.current_command is_ethans = current_cmd == input_handler.cmd_b # is_azezs = current_cmd == input_handler.cmd_b # CMD Switcher if user_command == "do_switch_cmd": if is_ethans: current_cmd = input_handler.cmd_a else: current_cmd = input_handler.cmd_b current_cmd.current_command = "" self.cmd_looper(input_handler, current_cmd, "CMD Switched") # JS file checker if user_command == "do_create_uml": current_cmd.current_command = "" output = "UML Diagram generated ./uml.png" output_intro = "Make sure you select at least one " output_error = "Javascript file. There was a type error: " type_error_output = output_intro + output_error if is_ethans: try: my_data = self.is_file_or_dir_b(current_cmd.user_args) self.handle_javascript(my_data, "b") except TypeError as type_error: output = type_error_output + str(type_error) else: try: my_data = self.is_file_or_dir_a(current_cmd.user_args) self.handle_javascript(my_data, "a") except TypeError as t: output = type_error_output + str(t) self.cmd_looper(input_handler, current_cmd, output) if user_command == "do_deserialize": my_serializer = Serializer() # WRAP in try / catch deserialize_args = current_cmd.user_args if is_ethans: my_serializer.deserializer_b(deserialize_args) else: my_serializer.deserializer_a(deserialize_args) self.cmd_looper(input_handler, current_cmd, "") # Quitter if user_command == "do_quit": return
def get_signed_scriptSig(spendable_output, inputs_outpoint_dict, outputs, wif): scriptSig = spendable_output.output.scriptPubKey inputs_outpoint_dict[ spendable_output].scriptSig = scriptSig # set only this input scriptPubKey inputs = [] for v in inputs_outpoint_dict.values(): inputs.append(v) tx = Transaction(1, inputs, outputs, 0) serial = Serializer().serialize(tx) message = sha256(sha256(unhexlify(serial)).digest()).hexdigest() scriptSig = script.get_scriptSig(wif, message) return scriptSig
def __init__(self, db_connection, logger, date, max_downloads=None, max_upload_workers=20, allow_repeat=False): self.max_downloads = max_downloads self.max_upload_workers = max_upload_workers self.total_downloads = 0 self.upload_queue = Queue() self.lock = Lock() self.allow_repeat = allow_repeat # Setup the database connection self.db_connection = db_connection self.job_serializer = Serializer(self.db_connection, job) self.granule_serializer = Serializer(self.db_connection, granule) self.logger = logger self.date = date self.logger.info('Creating a workflow') # Downloader that handles asynchronous downloads. self.logger.info('Creating aria2 downloader client') self.downloader = Downloader( on_download_error=self._on_download_error, on_download_complete=self._on_download_complete, callback_args=(self.lock, )) # Copernicus Search API self.logger.info('Creating copernicus API connector') end_date = (self.date + timedelta(days=1)) start_date = self.date # self._get_start_date(self.date, end_date) self.copernicus = Copernicus( start_date=start_date.isoformat(), end_date=end_date.isoformat(), rows_per_query=30, )
def implementation_tasks(): projects = Serializer().list_projects() for project in projects: if not 'schedule' in project['project']: continue if not project['project']['schedule']: continue version = project['project']['predictions_model'] path = Serializer().get_folder_name(project['project']['name']) json = Serializer().read_model(path, version) if not 'error' in json: estimator, model, version = get_estimator(path, json, version) try: if hasattr(estimator, 'task'): if callable(estimator.task): result = estimator.task(model) Serializer().save_changes(path, version, model, add=False) if model.get_dashboard(): Serializer().set_dashboard(path, model.get_dashboard()) except: Serializer().log_error()
def test_serializer_for_no_chinese_split(): text1 = "\nI\'m his pupp\'peer, and i have a ball\t" text2 = '\t叫Stam一起到nba打篮球\n' text3 = '\n\n现在时刻2014-04-08\t\t' serializer = Serializer(do_chinese_split=False) serial_text1 = serializer.serialize(text1) serial_text2 = serializer.serialize(text2) serial_text3 = serializer.serialize(text3) assert serial_text1 == ['i', "'", 'm', 'his', 'pupp', "'", 'peer', ',', 'and', 'i', 'have', 'a', 'ball'] assert serial_text2 == ['叫', 'stam', '一', '起', '到', 'nba', '打', '篮', '球'] assert serial_text3 == ['现', '在', '时', '刻', '2014', '-', '04', '-', '08']
def setUp(self): self.parser = DataParser() self.cmd_view = CmdView() self.file_reader = FileReader() self.validator = Validator() self.db = Database("test.db") self.vis = Visualiser() self.val = Validator() self.serial = Serializer() self.controller = Controller(self.cmd_view, self.file_reader, self.parser, self.validator, self.db, self.vis, self.serial) self.init()