def __init__(self, binary): self.binary = os.path.abspath(binary) self.bug_find = BugFind(self.binary) self.dba = DBA(self.binary) self.exploit = Exploit(self.binary) self.verify = Verify(self.binary)
def run_flow(profiles, conv, test_id, conf, profile): print(20*"="+test_id+20*"=") conv.test_id = test_id conv.conf = conf for item in conv.flow["sequence"]: if isinstance(item, tuple): cls, funcs = item else: cls = item funcs = {} _oper = cls(conv, profile, test_id, conf, funcs) _oper.setup(profiles.PROFILEMAP) _oper() try: if conv.flow["tests"]: _ver = Verify(check_factory, message_factory, conv) _ver.test_sequence(conv.flow["tests"]) except KeyError: pass except Exception as err: raise return None
def run_flow(profiles, conv, test_id, conf, profile): print(20 * "=" + test_id + 20 * "=") conv.test_id = test_id conv.conf = conf for item in conv.flow["sequence"]: if isinstance(item, tuple): cls, funcs = item else: cls = item funcs = {} _oper = cls(conv, profile, test_id, conf, funcs) _oper.setup(profiles.PROFILEMAP) _oper() try: if conv.flow["tests"]: _ver = Verify(check_factory, message_factory, conv) _ver.test_sequence(conv.flow["tests"]) except KeyError: pass except Exception as err: raise return None
def checkAllSyntax(self, triples): """ Testmethod for Syntax - should be deleted later on """ for file in triples: print('Checking' + file) verify = Verify(file) verify.checkRDFsyntax()
def verifyAllLinks(self, triples): """ :param triples: A list of all absolute paths for .nt files :return: Errors """ for file in triples: print('Verifying ' + file) verify = Verify(file) verify.verifyLinks()
def manage_http_verify(self, part, interval=60): ''' 每隔interval时间运行一次代理验证程序 :param id: :param interval: :return: ''' verify = Verify() while True: verify.run_verify_http(part) time.sleep(interval)
def _verify(self): if self.method == 'Zero Overwrite': pattern = b'\0x00' elif self.method == 'One Overwrite': pattern = b'\0xff' else: pattern = 'random' self.ui.erase_progress.setValue(100) self.verify = Verify(self.device, pattern, self.ui.verify_percent.value()) self.verify.finished.connect(self._finished) self.verify.start() self.verify.CURRENT_DATA.connect(self.ui.verify_progress.setValue) self.verify.CURRENT_TIME.connect(self.ui.verify_duration_label.setText)
def __init__(self, caller, calling, log_level=3): self.verify = Verify() if not self.verify.setup(): sys.exit(1) self.lib = pj.Lib() self.lib.init( ua_cfg=self.ua_cfg, log_cfg=pj.LogConfig( level=7, callback=lambda level, str, len: logging.debug(str.strip())), media_cfg=self.media_cfg) self.lib.start(with_thread=True) self.caller_ddi, self.caller_account, self.caller_cb, self.caller_cfg = self.register( caller, default=True) self.calling_ddi, self.calling_account, self.calling_cb, self.calling_cfg = self.register( calling)
def add_new_block(self, block: Block) -> bool: """添加新块(带验证),验证通过则广播""" if Verify.verify_new_block(block): self.__blc.add_block(block) NetworkRouting.get_instance().add_a_msg(block) # 广播区块 return True return False
class SimpleAEG(object): def __init__(self, binary): self.binary = os.path.abspath(binary) self.bug_find = BugFind(self.binary) self.dba = DBA(self.binary) self.exploit = Exploit(self.binary) self.verify = Verify(self.binary) def attack(self): l.info("Preparation...") found_path = self.bug_find.find() if found_path is None: l.info("No exploitation found") dba_result = self.dba.analyze(found_path) l.info("Attempting to create exploit") payload = self.exploit.generate(found_path, dba_result) if not payload: l.info('Cannot generate any payload') return False if self.verify.verify(payload): filename = '%s-exploit' % self.binary with open(filename, 'w') as f: f.write(payload) l.info('Payload generated in %s' % filename) l.info('Exploitation completed') return True l.info('Failed, quit...')
def add_trans(self, *transes: Transaction) -> bool: """往交易池中添加交易(先验证)""" result = False for trans in transes: if not self.trans_cache.contain( trans) and Verify.verify_new_transaction(trans): self.trans_cache.put(trans) result = True return result
def add_new_block(self, block: Block) -> bool: """添加新块(带验证),验证通过则广播""" if Verify.verify_new_block(block): self.__blc.add_block(block) self.sqlite_driver.insert_block(block) msg = Message(recieve="B", type_="PUT", data=str(block)) to_msg = Message(recieve="N", type_="PUT", data=str(msg)) Node("localhost", NETWORK_ROUTING_PORT).send_msg(to_msg) # 广播区块 return True return False
def lambda_handler(event, context): fileName = event['file'] messages = Verify(fileName).process() errorMessages = [] for value in messages: errorMessages.append(value) return errorMessages
def add_trans(self, *transes: Transaction) -> bool: """往交易池中添加交易(先验证)""" with self.__safe_occupy(): result = False for trans in transes: if trans not in self.trans_cache and Verify.verify_new_transaction( trans): self.trans_cache.add(trans) with self.lock: self.consum_num += 1 if self.consum_num < 1: # 如果有因get而阻塞的线程,则唤醒 self.consum_cond.notify(1) result = True return result
def recv_broad_transaction(): """接收交易、广播交易的进程""" while self.server_flag: # 阻塞在取trans的地方 node, msg = M_mailbox.get() if msg.type == "PUT": if msg.command == "TRANS": trans = Transaction.load(msg.data) if trans != self.trans_later and self.add_trans(trans): self.trans_later = trans NetworkRouting.get_instance().broad_a_msg( msg) # 广播交易 elif msg.command == "BLOCK": # 其它进程先挖到,暂停挖矿 block = Block.load(msg.data) if self.block_later != block and Verify.verify_new_block( block) and FullBlockChain.get_instance( ).get_top_hash() == block.get_hash(): self.accept_block(block)
def quotas_usage(request, project_id, resource): token = request.META.get('HTTP_X_AUTH_TOKEN') mongodb_info = setting.mongodb_info host = mongodb_info['host'] user = mongodb_info['user'] password = mongodb_info['password'] port = mongodb_info['port'] database = mongodb_info['database'] m = MongoDB(user, password, host, port, database) v = Verify() v.set_request(KEY_STONE_HOST['host'], KEY_STONE_HOST['port']) v.set_tenantname(project_id) if v.is_token_available(token): start_time = int(request.GET.get('start_time')) end_time = int(request.GET.get('end_time')) response = m.load(resource, project_id, start_time, end_time) response_json = json.dumps(response) return HttpResponse(response_json, content_type="application/json") else: return HttpResponse(v.get_request_data())
def getLoginVCode(self): codeDialog = Verify() if codeDialog.showImage("login", "sjrand"): return codeDialog.getPoints() return ""
# ------------------------------------------------------- """ Instead of using a session directly to store state between user requests and responses from the CLI, we're going to simplify the process since this is not theh final API construction. Instead of a database, we're going to use a simple global dict to create key-value pairs for users that log in with their JWT. Within these key-value pairs will be more dicts, storing information about a current request for a CWL run. Once that CWL run executes or fails, we delete that key from the dict; this will help manage memory. Note that every time the app is restarted this pseudo-database is blown away, which is entirely inefficient for any type of production environment. """ gstor = {} verifier = Verify() # token verifier # Method to verify user def verify(token): """Verify a token and return claims :param str token: JSON Web Token string""" client_id = conf['client_id'] keys_url = conf['keys_url'] return verifier.verify_token(token, keys_url, client_id) #------- # ROUTES # ------
''' import json, sys from verify import Verify if len(sys.argv) > 1: RUNPATH = sys.argv[1] else: RUNPATH = '../bin' LOGPATH = RUNPATH + '/logs' CONFIGFILE = RUNPATH + '/DAM.config' config_data= open(CONFIGFILE).read() data = json.loads(config_data) USERLIST = [json.dumps(u).strip('"') for u in data["userlist"]] RULELIST = [json.dumps(r).strip('"') for r in data["rulelist"]] SERVER = json.dumps(data["server"]).strip('"') print 'Runpath: ', RUNPATH print 'Logpath: ', LOGPATH print 'Server from config file: ', SERVER print 'Userlist from config file: ', USERLIST print 'Rulelist from config file: ', RULELIST v = Verify(LOGPATH, SERVER) v.run(USERLIST, RULELIST)
class Main: if __name__ == '__main__': # Run the settings script settings = tools.settings.Settings() # Run the outputSaver script output_saver = tools.outputSaver.OutputSaver() parser = argparse.ArgumentParser( description= 'MAD downloads anime from CrunchyRoll, WCOStream and other websites.' ) parser.add_argument('--version', action='store_true', help='Shows version and exits.') required_args = parser.add_argument_group('Required Arguments :') required_args.add_argument('-i', '--input', nargs=1, help='Inputs the URL to anime.') parser.add_argument('-p', '--password', nargs=1, help='Indicates password for a website.') parser.add_argument('-u', '--username', nargs=1, help='Indicates username for a website.') parser.add_argument('-r', '--resolution', nargs=1, help='Inputs the resolution to look for.', default='720') parser.add_argument('-l', '--language', nargs=1, help='Selects the language for the show.', default='Japanese') parser.add_argument('-se', '--season', nargs=1, help='Specifies what season to download.', default='All') parser.add_argument( '--skip', action='store_true', help='skips the video download and downloads only subs.') parser.add_argument('-nl', '--nologin', action='store_true', help='Skips login for websites.') parser.add_argument( '-o', '--output', nargs=1, help='Specifies the directory of which to save the files.') parser.add_argument('-n', '--newest', help='Get the newest episode in the series.', action='store_true') parser.add_argument( '-rn', '--range', nargs=1, help='Specifies the range of episodes to download.', default='All') parser.add_argument( "-v", "--verbose", help="Prints important debugging messages on screen.", action="store_true") parser.add_argument( '-x', '--exclude', nargs=1, help='Specifies the episodes to not download (ie ova).', default=None) parser.add_argument('--search', action='store_true', help='Search for a show.') parser.add_argument('--gui', action='store_true', help='Start the GUI') args = parser.parse_args() args.logger = False args.skipper = False args.settings = settings args.outputsaver = output_saver if args.search: run_search = tools.search.Search() array = run_search.start() for item in array: print(item) exit(1) if args.gui: run_gui = tools.gui.Gui() exit(1) if args.verbose: logging.basicConfig(format='%(levelname)s: %(message)s', filename="Error Log.log", level=logging.DEBUG) logging.debug('You have successfully set the Debugging On.') logging.debug("Arguments Provided : {0}".format(args)) logging.debug("Operating System : {0} - {1} - {2}".format( platform.system(), platform.release(), platform.version())) logging.debug("Python Version : {0} ({1})".format( platform.python_version(), platform.architecture()[0])) args.logger = True if args.version: print("Current Version: {0}".format(__version__)) exit() if args.skip: print("Will be skipping video downloads") args.skipper = True if args.nologin: args.username = ['username'] args.password = ['password'] if args.input is None: try: if args.outputsaver.get_show_url(args.input[0]) is not None: args.input[0] = args.outputsaver.get_show_url( args.input[0]) except TypeError as e: print( "Please enter the required argument (Input -i). Run __main__.py --help" ) exit(1) else: if type(args.username) == list: args.username = args.username[0] else: args.username = False if type(args.password) == list: args.password = args.password[0] else: args.password = False if type(args.resolution) == list: if "," in args.resolution[0]: args.resolution = args.resolution[0].split(',') else: args.resolution = args.resolution[0] if type(args.language) == list: args.language = args.language[0] if type(args.range) == list: args.range = args.range[0] if type(args.season) == list: args.season = args.season[0] if type(args.output) == list: args.output = args.output[0] # Lets check if the url is a website we support and if it requires a username and password verify = Verify(args.__dict__) if verify.isVerified(): # It is a website we support. Lets use it if verify.getWebsite() == 'WCO': sites.wcostream.WCOStream(args.__dict__) if verify.getWebsite() == 'Crunchyroll': sites.crunchyroll.Crunchyroll(args.__dict__)
class SIPCallRecordVerify: ua_cfg = pj.UAConfig() ua_cfg.max_calls = 10 ua_cfg.nameserver = ["8.8.8.8"] ua_cfg.user_agent = "SIPCallRecordVerify" media_cfg = pj.MediaConfig() media_cfg.channel_count = 8 media_cfg.max_media_ports = 8 def __init__(self, caller, calling, log_level=3): self.verify = Verify() if not self.verify.setup(): sys.exit(1) self.lib = pj.Lib() self.lib.init( ua_cfg=self.ua_cfg, log_cfg=pj.LogConfig( level=7, callback=lambda level, str, len: logging.debug(str.strip())), media_cfg=self.media_cfg) self.lib.start(with_thread=True) self.caller_ddi, self.caller_account, self.caller_cb, self.caller_cfg = self.register( caller, default=True) self.calling_ddi, self.calling_account, self.calling_cb, self.calling_cfg = self.register( calling) def register(self, config, default=False): for k, v in config.iteritems(): config[k] = str(v) ddi = config['ddi'] logging.info("Creating transport for %s" % (config['uri'])) transport = self.lib.create_transport(pj.TransportType.UDP) logging.info( "Listening on %s:%d for %s" % (transport.info().host, transport.info().port, config['uri'])) logging.info("Attempting registration for %s" % config['uri']) account_cfg = pj.AccountConfig(domain=config['domain'], username=config['username'], password=config['password'], proxy=config['proxy']) account_cfg.id = config['uri'] account = self.lib.create_account(acc_config=account_cfg, set_default=default) account.set_transport(transport) account_cb = AccountHandler(account) account.set_callback(account_cb) account_cb.wait() logging.info("%s registered, status: %s (%s)" % (config['uri'], account.info().reg_status, account.info().reg_reason)) return (ddi, account, account_cb, account_cfg) def start_caller(self, audiotest, interval=300): try: call = None end = time.time() + interval while True: if call: while call.is_valid(): logging.info("Call in progress") sleep(1) continue remaining = end - time.time() logging.info("Seconds until next call: %d" % remaining) if time.time() <= end: sleep(1) continue end = time.time() + interval logging.info("Making call") call, callhandler = self.caller_cb.new_call( "%s@%s" % (self.calling_ddi, self.caller_cfg.proxy[0][4:-3])) if call: while call.info().state != pj.CallState.CONFIRMED: logging.info("Looping call state check with %s" % call.info().state) sleep(1) continue sleep(1) callhandler.play_file(audiotest['filename'], True) # TODO: Fetch recording, convert to text, validate. sleep(1) call.hangup() sleep(1) except pj.Error, e: logging.error("Exception: " + str(e))
def manage_https_verify(self, interval=60): verify = Verify() while True: verify.run_verify_https() time.sleep(interval)
def create_flow(self, process_id): flow = Flow() # 初始化一个flow实例 config = self.get_config() # 检查配置项是否存在 if not config.get("common")["inputdir"]: logging.error("ERROR>>no inputdir in %s<< " % self.config_file) sys.exit() self.input_dir = config.get("common")["inputdir"] if not config.get("common")["input_rule_exp"]: logging.error("ERROR>>no input_rule_exp in %s<< " % self.config_file) sys.exit() self.match_expr = config.get("common")["input_rule_exp"] if not config.get("common")["redopath"]: logging.error("ERROR>>no redopath in %s<< " % self.config_file) sys.exit() redo_path = config.get("common")["redopath"] if not config.get("common")["fieldlen"]: logging.error("ERROR>>no fieldlen in %s<< " % self.config_file) sys.exit() fieldlen = config.get("common")["fieldlen"] if not config.get("common")["line_limit"]: logging.error("ERROR>>no line_limit in %s<< " % self.config_file) sys.exit() line_limit = config.get("common")["line_limit"] if line_limit == "": line_limit = 20000 if not config.get("common")["rules"]: logging.error('ERROR>>no rules in config<<') sys.exit() rule_list = config.get("common")["rules"].split(",") self.batch_size = config.get("common")["batchsize"] if not config.get("common")["bakpath"]: logging.error('ERROR>>no bakpath in config<<') sys.exit() bak_path = config.get("common")["bakpath"] flow.set_fieldlen(fieldlen) flow.set_line_limit(int(line_limit)) flow.set_process_id(process_id) flow.set_redo_path(redo_path) flow.set_bak(bak_path) output_dirs = {} # 检查各rule是否配置了输出目录 for rule in rule_list: output_dir = config.get(rule)["destdir"] if output_dir == "": logging.error("rule:%s no destdir" % rule) sys.exit() output_dirs = {rule: output_dir} # 检查配置文件中的路径信息是否存在 all_path = { 'inputdir': self.input_dir, 'redopath': redo_path, 'bakpath': bak_path } all_path.update(output_dirs) self.output_dirs = all_path verify = Verify(all_path) if not verify.check_path(): sys.exit() self.process_input_dir = self.input_dir + "/" + process_id flow.set_dir(self.process_input_dir) for rule_name in rule_list: _config = {'rulename': rule_name} rule_items = config.get(rule_name) _config.update(rule_items) flow.add_rule(Rule(_config)) # 返回一个fields flow.config = config return flow
def do_backup(self) : self.start_time = datetime.now() self.nfiles = 0 self.nfolders = 0 self.bytes = 0 success = False message = "" self.backup_folder = os.path.join(self.backup.name, self.start_time.strftime(const.DateTimeFormat) + " " + self.type) if not self.dry_run: self.run_id = self.db.start_run(self.backup.name, self.backup.store, self.type, self.start_time) msg = _("Backup {server}/{backup}/{type} beginning").format( server=utils.get_hostname(), backup=self.backup.name, type=self.type) if self.dry_run: msg += _(" (Dry Run)") log.info(msg) self.db.save_message(msg) if self.orig_type != self.type: # The backup type was switched self.db.save_message(_("NOTE: Backup type switched to {newtype} from {oldtype}").format( newtype=self.type, oldtype=self.orig_type)) # After here we have a run set up in the database, and can begin logging errors. try: # Check that if ENCRYPTION is enabled, that there is a password defined. if self.backup.encrypt and not self.config.data_passphrase: raise Exception("Backup encryption required, but no passphrase has been configured. Backup cancelled.") self.prepare_store() # Prepare output/destinations/encryption self.prepare_output() try: # Now we actually DO the backup, for each listed folder for folder in self.backup.include_folders: self.recursive_backup_folder(folder) log.debug("Committing saved fs entries...") self.db.fs_saved_commit() log.debug("Closing...") self.close_output(success=True) #raise Exception("Test Exception") except Exception as e: log.warn("Exception during backup:", str(e)) # We are going to fail. But we need to try and close # whatever we can. Closing may fail, but in this case # we ignore that error. try: self.close_output(success=False) except: pass raise e if self.backup.verify and not self.dry_run: log.info("Starting verify phase") msg = _("Backup {server}/{backup}/{type} verification starting").format( server=utils.get_hostname(), backup=self.backup.name, type=self.type) self.db.save_message(msg) v = Verify(self.backup.name, self.start_time) v.run() msg = _("Backup {server}/{backup}/{type} verification succeeded").format( server=utils.get_hostname(), backup=self.backup.name, type=self.type) self.db.save_message(msg) # self.do_verify() # Messaging... # If its a dry run, the command line specifies messaging. # Otherwise both the command line AND backup spec do. if not self.dry_run: self.db.update_run_status(const.StatusSuccess) message = _("Backup {server}/{backup}/{type} completed").format( server=utils.get_hostname(), backup=self.backup.name, type=self.type) if self.dry_run: message += " " + _("(Dry Run)") success = True if not self.dry_run: self.db.save_message(message) except Exception as e: log.error("Exception in backup. Recording. ", e) message = _("Backup {server}/{backup}/{type} failed. {error}").format( server=utils.get_hostname(), backup=self.backup.name, type=self.type, error=str(e)) success = False if not self.dry_run: self.db.update_run_status(const.StatusFailed) # After a failed backup - we must remove the backup data because it # cannot be trusted. run = self.db.run_details(self.run_id) # Delete the remote data log.debug("Attempting to delete remote run data") self.store.delete_run_data(run) # Delete the entries in the database (but not the failed run itself) # This means the messages will persist, so we can see the usage. log.debug("Attempting to delete DB run data") self.db.delete_run_versions(self.run_id) self.db.save_message(message) if self.options.message or (self.backup.notify_msg and not self.dry_run): try: from lib.dlg import Notify Notify(const.AppTitle, message) except Exception as e: # This one is not really an error... there is probably no-one logged in. msg = _("Unable to send notification message (no-one logged in)") if not self.dry_run: self.db.save_message(message) log.info(msg) if self.options.email or (self.backup.notify_email and not self.dry_run): try: self.send_email(success, message) except Exception as e: msg = _("Unable to email notification message: {error}").format( error=str(e)) if not self.dry_run: self.db.save_message(message) log.error(msg) if self.options.shutdown or (self.backup.shutdown_after and not self.dry_run): try: cmd = ["zenity", "--question", "--ok-label", _("Shutdown Now"), "--cancel-label", _("Cancel Shutdown"), "--text", _("Backup {backup} complete. Computer will shut down in 2 minutes").format(backup=self.backup.name), "--timeout", "120"] status = subprocess.call(cmd) log.debug("Shutdown query. status=%d" % status) if status == 0 or status == 5: print("Running shutdown") subprocess.Popen(["shutdown", "-P", "now"]) print("Done running shutdown") except Exception as e: msg = _("Unable to shutdown PC: {error}").format( error=str(e)) if not self.dry_run: self.db.save_message(message) log.error(msg)
class PyRaseController: DEFAULT_METHODS = [ 'Zero Overwrite', 'One Overwrite', 'Random Overwrite', 'DOD Overwrite', ] def __init__(self): loader = QUiLoader() self.ui = loader.load('pyrase.ui') self.overwrite = None self.verify = None self.device = None self.method = None self.devices = Utils.get_block_devices() self._add_default_methods_to_method_box() self._add_devices_to_device_box() self.ui.device_box.currentIndexChanged.connect(self._set_device) self.ui.close_button.clicked.connect(self.ui.close) self.ui.erase_button.clicked.connect(self._erase) def _add_default_methods_to_method_box(self): self.ui.method_box.clear() for method in self.DEFAULT_METHODS: self.ui.method_box.addItem(method) def _add_devices_to_device_box(self): for device in self.devices: self.ui.device_box.addItem(device) def _erase(self): self.method = self.ui.method_box.currentText() self.ui.erase_button.setEnabled(False) self.ui.close_button.setEnabled(False) if self.method == 'Zero Overwrite': self.overwrite = ErasureFactory.zero_overwrite(self.device) self.overwrite.finished.connect(self._verify) self.overwrite.start() self.overwrite.CURRENT_DATA.connect( self.ui.erase_progress.setValue) self.overwrite.CURRENT_TIME.connect( self.ui.erase_duration_label.setText) self.overwrite.CURRENT_PASS.connect( self.ui.pass_value_label.setText) elif self.method == 'One Overwrite': self.overwrite = ErasureFactory.one_overwrite(self.device) self.overwrite.finished.connect(self._verify) self.overwrite.start() self.overwrite.CURRENT_DATA.connect( self.ui.erase_progress.setValue) self.overwrite.CURRENT_TIME.connect( self.ui.erase_duration_label.setText) self.overwrite.CURRENT_PASS.connect( self.ui.pass_value_label.setText) elif self.method == 'Random Overwrite': self.overwrite = ErasureFactory.random_overwrite(self.device) self.overwrite.finished.connect(self._verify) self.overwrite.start() self.overwrite.CURRENT_DATA.connect( self.ui.erase_progress.setValue) self.overwrite.CURRENT_TIME.connect( self.ui.erase_duration_label.setText) self.overwrite.CURRENT_PASS.connect( self.ui.pass_value_label.setText) elif self.method == 'DOD Overwrite': self.overwrite = ErasureFactory.dod_overwrite(self.device) self.overwrite.finished.connect(self._verify) self.overwrite.start() self.overwrite.CURRENT_DATA.connect( self.ui.erase_progress.setValue) self.overwrite.CURRENT_TIME.connect( self.ui.erase_duration_label.setText) self.overwrite.CURRENT_PASS.connect( self.ui.pass_value_label.setText) def _verify(self): if self.method == 'Zero Overwrite': pattern = b'\0x00' elif self.method == 'One Overwrite': pattern = b'\0xff' else: pattern = 'random' self.ui.erase_progress.setValue(100) self.verify = Verify(self.device, pattern, self.ui.verify_percent.value()) self.verify.finished.connect(self._finished) self.verify.start() self.verify.CURRENT_DATA.connect(self.ui.verify_progress.setValue) self.verify.CURRENT_TIME.connect(self.ui.verify_duration_label.setText) def _finished(self): self.ui.verify_progress.setValue(100) if self.verify.success: erasure_status = 'Passed' else: erasure_status = 'Failed' self.ui.close_button.setEnabled(True) def _set_device(self): if '/dev/sd' in self.ui.device_box.currentText(): self.device = self.ui.device_box.currentText()[:8] elif '/dev/nvm' in self.ui.device_box.currentText(): self.device = self.ui.device_box.currentText()[:12]
from verify import Verify fileName = '/media/Tcc_Tatiane.docx' messages = Verify(fileName).process() for value in messages: print value.content print value.detail
b1.add_transaction(t1) # 添加矿工奖励交易 mt1 = Transaction() mt1.add_output(TransOutput(Btc(MINING_BTCS), key1.get_address())) b1.set_head_transaction(mt1) b1.set_index(1) # 挖矿 b1.find_randnum() # 添加区块 bc.add_block(b1) # key1向key2转账 t2 = Transaction() t2.add_input(TransInput(1, 1, 1)) t2.add_output(TransOutput(Btc("23.567"), key2.get_address())) t2.sign_transaction(key1) if not Verify.verify_transaction(t2): print("交易有问题") b2 = Block(pre_hash=b1.get_hash()) b2.add_transaction(t2) mt2 = Transaction() mt2.add_output(TransOutput(Btc(MINING_BTCS), key2.get_address())) # 计算交易费 fee = bc.compute_block_fee(b2) mt2.add_output(TransOutput(fee, key2.get_address())) b2.set_head_transaction(mt2) b2.set_index(2) b2.find_randnum() if not Verify.verify_block_depth(b2): print("区块有问题") bc.add_block(b2)