def setup(self): # PATCH/patch the logging profile to the virtual server payload = AttrDict() payload.securityLogProfiles = [] payload.securityLogProfiles.append(self.logging_profile_name) resp = self.rstifc.patch(self.virtual.selfLink, payload) return resp
def setup(self): # PATCH/patch the logging profile to the virtual server payload = AttrDict() payload.securityLogProfiles= [] payload.securityLogProfiles.append(self.logging_profile_name) resp = self.rstifc.patch(self.virtual.selfLink, payload) return resp
def __init__(self, *args, **kwargs): super(AccessGroupTask, self).__init__(*args, **kwargs) properties = AttrDict() properties.setdefault("cm:access:import-shared", True) properties.setdefault("cm:access:access-group-name") self.setdefault("properties", properties) link = AttrDict() link.setdefault("link", None) self.setdefault("deviceReference", link) self.setdefault("snapshotWorkingConfig", False) self.setdefault("clusterName", None)
def emit(self, record): item = AttrDict() item.name = record.name item.levelname = record.levelname item.message = record.message[:MAX_LOG_LINE] # item.message = re.sub(URL_REGEX, r'<a href="\1">\1</a>', record.message) item.timestamp = time.strftime('%b %d %H:%M:%S', time.localtime(record.created)) # for x in item: # if x not in ('levelname', 'asctime', 'message'): # item.pop(x) self.buffer.append(item) self.tip += 1 self.task.save_meta(logs=list(self.buffer), tip=self.tip)
def add_result(self, test): status_id = STATUS[test.result] comment = test.traceback case_id = self.get_or_create_case(test) if not case_id: return self.case_ids.append(case_id) params = AttrDict() params.case_ids = self.case_ids params.config_ids = [] self.api.update_run(self.run_id, params) params = AttrDict() params.status_id = status_id if comment: params.comment = comment # if extra: # params.update(extra) # elapsed = int(time.time() - self.started_at) # if elapsed: # params.elapsed = "%ds" % elapsed return self.api.add_result_for_case(self.run_id, case_id, params)
def setup(self): payload = AttrDict() payload.name = self.name payload.destination = self.destination payload.partition = self.partition payload.profiles = [] for profile_name in self.profiles: profile = AttrDict() profile.name = profile_name payload.profiles.append(profile) resp = self.rstifc.post(URL_TM_LTM_VIRTUAL, payload) return resp
def setup(self): self._pre_stats = AttrDict() for device in self.devices: with SSHInterface(device=device) as ifc: ssh = ifc.api self._pre_stats[device] = ssh.stat(self.filename) return self
def setup(self): dicti = AttrDict() instance = get_instance_by_id(self.iid, ifc=self.ifc, device=self.device, region=self.region, key_id=self.key_id, access_key=self.key_id) dicti['id'] = instance.id dicti['state'] = None dicti['istate'] = None dicti['sstate'] = None if instance.state != "running": dicti['state'] = instance.state dicti['istate'] = None dicti['sstate'] = None else: istatuses = self.api.get_all_instance_status([self.iid]) if istatuses: istatus = istatuses[0] LOG.debug("Istatus was: {0}".format(istatus)) # dicti['id'] = istatus.id dicti['state'] = istatus.state_name dicti['istate'] = str(istatus.instance_status) dicti['sstate'] = str(istatus.system_status) return dicti
def tester_icontrol_post(): """Handles icontrol tester requests. """ data = AttrDict(bottle.request.json) options = AttrDict() options.username = ADMIN_USERNAME options.password = data.password options.json = True result = ictester.delay(address=data.address.strip(), method=data.method, # @UndefinedVariable options=options, params=data.arguments, user_input=data) # print arguments link = app.router.build('status', task_id=result.id) return dict(status=result.status, id=result.id, link=link)
def setup(self): #create a logging profile pointing to logging bigiq payload = AttrDict() payload.name = self.logging_profile_name payload.application = AttrDict() payload.application[self.logging_profile_name] = AttrDict() payload_log_profile = payload.application[self.logging_profile_name] payload_log_profile.format = AttrDict() payload_log_profile.format.type = "user-defined" payload_log_profile.format.userString = ("""unit_hostname=\\\"%unit_hostname%\\\",management_ip_address=""" +"""\\\"%management_ip_address%\\\",http_class_name=\\\"%http_class_name%\\\",web_application_name=\\\"%http_class_name%""" +"""\\\",policy_name=\\\"%policy_name%\\\",policy_apply_date=\\\"%policy_apply_date%\\\",violations=\\\"%violations%\\\",""" +"""support_id=\\\"%support_id%\\\",request_status=\\\"%request_status%\\\",response_code=\\\"%response_code%\\\",ip_client=""" +"""\\\"%ip_client%\\\",route_domain=\\\"%route_domain%\\\",method=\\\"%method%\\\",protocol=\\\"%protocol%\\\",query_string=""" +"""\\\"%query_string%\\\",x_forwarded_for_header_value=\\\"%x_forwarded_for_header_value%\\\",sig_ids=\\\"%sig_ids%\\\",sig_names=""" +"""\\\"%sig_names%\\\",date_time=\\\"%date_time%\\\",severity=\\\"%severity%\\\",attack_type=\\\"%attack_type%\\\",geo_location=""" +"""\\\"%geo_location%\\\",ip_address_intelligence=\\\"%ip_address_intelligence%\\\",username=\\\"%username%\\\",session_id=""" +"""\\\"%session_id%\\\",src_port=\\\"%src_port%\\\",dest_port=\\\"%dest_port%\\\",dest_ip=\\\"%dest_ip%\\\",sub_violations=""" +"""\\\"%sub_violations%\\\",virus_name=\\\"%virus_name%\\\",uri=\\\"%uri%\\\",request=\\\"%request%\\\",violation_details=""" +"""\\\"%violation_details%\\\",header=\\\"%headers%\\\",response=\\\"%response%\\\"""") payload_log_profile.guaranteeLogging = "enabled" payload_log_profile.guaranteeResponseLogging = "enabled" payload_log_profile.localStorage = "enabled" payload_log_profile.logicOperation= "and" payload_log_profile.maximumEntryLength= "64k" payload_log_profile.maximumHeaderSize= "any" payload_log_profile.maximumQuerySize= "any" payload_log_profile.maximumRequestSize = "any" payload_log_profile.protocol = "tcp" payload_log_profile.remoteStorage = "remote" payload_log_profile.reportAnomalies = "disabled" payload_log_profile.responseLogging = "all" payload_log_profile.filter = [] filter_hash = AttrDict() filter_hash.name = "request-type" filter_hash.values = ["all"] payload_log_profile.filter.append(filter_hash) filter_hash = AttrDict() filter_hash.name = "search-all" payload_log_profile.filter.append(filter_hash) payload_log_profile.servers = [] server = AttrDict() server.name = "%s:8514" % self.logging_bigiqs[0].get_address() payload_log_profile.servers.append(server) resp = self.rstifc.post(URL_TM_LOG_PROFILE, payload) return resp
def add_post(): LOG.info("ADD: called") data = AttrDict(bottle.request.json) result = add.delay(data.number_1 or 0, data.number_2 or 0, user_input=data) # @UndefinedVariable link = common_app.router.build('status', task_id=result.id) add_result = dict(status=result.status, id=result.id, link=link) LOG.info("ADD: Result: " + str(add_result)) return add_result
def validate(): data = AttrDict(bottle.request.json) bottle.response.add_header('Cache-Control', 'no-cache') # print data is_valid = validators[data.type](**data) if is_valid is not True: bottle.response.status = 406 return dict(message=is_valid)
def __init__(self, options, *filenames): self.options = AttrDict(options) self.filenames = filenames o = self.options o.update(TESTRAIL) self.case_ids = [] super(TestrailFileImporter, self).__init__()
def add_test_run(self, run, meta): params = AttrDict() params.suite_id = self.suite_id params.name = run[3] params.description = run[2] params.include_all = False params.case_ids = [] return self.api.add_run(self.project_id, params)
def setup(self): # PATCH/patch to policy with virtual server's name payload = AttrDict() payload.virtualServers = [] payload.virtualServers.append(self.virtual.fullPath) resp = self.rstifc.patch(self.policy.selfLink, payload) # TODO: put in test # # ltm policy needs to be deleted/deactived first # path = URL_TM_LTM_POLICY + "/~Common~asm_auto_l7_policy__%s" % virtual.name # ltm_activated_policy = rstifc.get(path) # LOG.info("Putting ltm_activated_policy into self.garbage_bigip") # self.garbage_bigip[device].append(ltm_activated_policy) # # # asm policy needs to be deleted after ltm policy's deletion # LOG.info("Putting attach_virtual_to_policy into self.garbage_bigip") # self.garbage_bigip[device].append(resp) return resp
def simple_decrypter_post(): from Crypto.Cipher import DES from base64 import b64decode unpad = lambda s: s[0:-ord(s[-1])] # @IgnorePep8 data = AttrDict(bottle.request.json) try: i = data.input.decode('unicode_escape') ret = unpad(DES.new('GhVJDUfx').decrypt(b64decode(i))) except Exception, e: return dict(input=str(e))
def __init__(self, *args, **kwargs): super(ApmAaaKerberos, self).__init__(*args, **kwargs) name = "kerberos_%s" % generate_hash() self.setdefault('name', name) self.setdefault('generation', 693) self.setdefault('authRealm', "*****@*****.**") self.setdefault('keytabFileObj', None) self.setdefault('keytabFileObjReference', AttrDict()) self.setdefault('locationSpecific', 'true') self.setdefault('serviceName', 'HTTP/test.lab')
def __init__(self, options, address=None, params=None): self.options = AttrDict(options) o = self.options o.port = 3307 o.db = 'test_runs' o.update(TESTRAIL) self.case_ids = [] super(TestrailImporter, self).__init__()
def callback(): ret = AttrDict() self._post_stats = AttrDict() for device in self.devices: with SSHInterface(device=device) as ifc: ssh = ifc.api self._post_stats[device] = ssh.stat(self.filename) size_before = self._pre_stats[device].st_size size_after = self._post_stats[device].st_size delta = size_after - size_before LOG.debug('delta: %d', delta) resp = ssh.run('tail --bytes={0} {1}'.format(delta, self.filename)) ret[device] = resp.stdout return self.testcb(ret, self._post_stats)
def retrieve_runtime(self, restifc): return_dict = AttrDict() try: restcall = restifc.api.get(DiagnosticsRuntime.URI) for field in self.jvm_stats: return_dict[field] = restcall[field] except: for field in self.jvm_stats: return_dict[field] = "REST API Error!" return return_dict
def wait_status(self, rest, resource, loop=None, check_no_pending_conflicts=False, *args, **kwargs): def get_status(): return rest.get(resource.selfLink) if loop is None: loop = get_status ret = wait(loop, condition=lambda x: x.status not in ('NEW', 'STARTED', 'PENDING_UPDATE_TASK'), progress_cb=lambda x: 'Status: {0}'.format(x.status), *args, **kwargs) msg = json.dumps(ret, sort_keys=True, indent=4, ensure_ascii=False) if "currentStep" in ret.keys(): pending_conflicts = 0 if check_no_pending_conflicts and ret.currentStep in ('PENDING_CONFLICTS', 'PENDING_CHILD_CONFLICTS'): pending_conflicts = 1 # Resolve 'PENDING_CONFLICTS' when the resolution to a conflict is 'NONE'. if ret.status == 'FINISHED' and ret.currentStep in ('PENDING_CONFLICTS', 'PENDING_CHILD_CONFLICTS'): for conflict in ret.conflicts: if conflict.resolution == 'NONE': conflict.resolution = "USE_BIGIQ" payload = AttrDict() payload.conflicts = [conflict] payload.status = "STARTED" resp = rest.patch(ret.selfLink, payload) self.wait_status(rest, resp, interval=2, timeout=90, timeout_message="Patch PENDING_CONFLICTS timed out after 60s.") else: raise TaskError("DMA has pending conflicts to resolve. Task failed:\n%s" % msg) # Used in asm deploy elif ret.status == 'FINISHED' and ret.currentStep in ('DISTRIBUTE_CONFIG',): pass elif ret.status != 'FINISHED' or ret.currentStep != 'DONE': raise TaskError("Either '%s' != 'FINISHED' or '%s' != 'DONE'. Task failed:\n%s" % (ret.status, ret.currentStep, msg)) else: if ret.status not in ('COMPLETED', 'FINISHED'): raise TaskError("'%s' not in ('COMPLETED', 'FINISHED'). Task failed:\n%s" % (ret.status, msg)) if check_no_pending_conflicts: return pending_conflicts == 0 return ret
def config_post(): """Handles confgen requests. """ data = AttrDict(bottle.request.json) options = AttrDict(data) options.provision = ','.join(data.provision) options.irack_address = CONFIG.irack.address options.irack_username = CONFIG.irack.username options.irack_apikey = CONFIG.irack.apikey # options.clean = True options.no_sshkey = True if options.clean: options.selfip_internal = None options.selfip_external = None options.provision = None options.timezone = None result = confgen.delay(address=data.address.strip(), options=options, # @UndefinedVariable user_input=data) link = app.router.build('status', task_id=result.id) return dict(status=result.status, id=result.id, link=link)
def add_test_run(self, run, url): # LOG.info('Adding test run %s...', url) params = AttrDict() params.suite_id = self.suite_id #params.name = str(self.output.start) # to be replaced params.name = run.get('name') params.description = url params.include_all = False params.case_ids = [] return self.api.add_run(self.project_id, params)
def simple_decrypter_post(): LOG.info("DEOBFUSCATOR: Called") from Crypto.Cipher import DES from base64 import b64decode unpad = lambda s: s[0:-ord(s[-1])] # @IgnorePep8 data = AttrDict(bottle.request.json) try: i = data.input.decode('unicode_escape') ret = unpad(DES.new('GhVJDUfx').decrypt(b64decode(i))) except Exception, e: result = dict(input=str(e)) LOG.info("DEOBFUSCATOR: Exception when decoding: " + str(result)) return result
class DebugTask(celery.Task): abstract = True _meta = AttrDict() def AsyncResult(self, task_id): """Get AsyncResult instance for this kind of task. :param task_id: Task id to get result for. """ return MyAsyncResult(task_id, backend=self.backend, task_name=self.name) def clear_meta(self): self._meta.clear() def save_meta(self, **kwargs): self._meta.update(**kwargs) self.backend.set(MEMCACHED_META_PREFIX + self._id, self._meta) def on_failure(self, exc, task_id, args, kwargs, einfo): if self.request.is_eager: self.backend.mark_as_failure(task_id, exc, einfo.traceback) def on_success(self, retval, task_id, args, kwargs): if self.request.is_eager: self.backend.mark_as_done(task_id, retval) def __call__(self, *args, **kwargs): LOG.info("TASKS-Running Shiraz DebugTask _call") # XXX: See https://github.com/celery/celery/issues/1709 # This hack is required to allow ansible to run tasks otherwise it silently fails. current_process()._config['daemon'] = False self._id = self.request.id if not self.request.is_eager: self.update_state(state=celery.states.STARTED) # LOG.setLevel(level) if self.request.is_eager: logging.basicConfig(level=logging.INFO) self.clear_meta() handler = MyMemoryHandler(task=self, level=logging.INFO, capacity=2000) root_logger = logging.getLogger() root_logger.addHandler(handler) try: return super(DebugTask, self).__call__(*args, **kwargs) finally: root_logger.removeHandler(handler)
def setup(self): # Read policy file f = open(self.file_path) policy_body = f.read() # Encoded policy file with base64 algorithm policy_body = base64.b64encode(policy_body) # POST/import the policy to device payload = AttrDict() payload.file = policy_body payload.name = self.name payload.isBase64 = True resp = self.rstifc.post(URL_TM_ASM_IMPORT_POLICY, payload) # Policy can only be deleted after it's deactived and deassociated from ltm virtual, # thus delete its selflink in assign_asm_policy_to_virtual AsmTask().wait_status(self.rstifc, resp, interval=2, timeout=90, timeout_message="Import policy timed out after {0}, "\ "last status is {1.status}, "\ "result is \"{1.result}\"") resp = self.rstifc.get(resp.selfLink) policy_selflink = resp.result.policyReference.link resp = self.rstifc.get(policy_selflink) return resp
def setup(self): s = self.api # To Do: Validate xpath el rlist = [] using = None container = [] try: if self.xpath: using = "xpath" container = s.find_elements_by_xpath(self.xpath) elif self.css: using = "css" container = s.find_elements_by_css_selector(self.css) elif self.did: using = "id" container = s.find_elements_by_id(self.did) for el in container: dic_per_tag = AttrDict() if self.attr != []: for a_id in self.attr: dic_per_tag[a_id] = el.get_attribute(a_id) if self.prop != []: for p_id in self.prop: if p_id == 'text': if self.use_js: text = s.execute_script( "return arguments[0].innerHTML", el) else: text = el.text dic_per_tag['text'] = text elif p_id == 'id': dic_per_tag['id'] = el.id elif p_id == 'tag_name': dic_per_tag['tag'] = el.tag_name elif p_id == 'is_displayed': dic_per_tag['is_displayed'] = el.is_displayed() elif p_id == 'is_enabled': dic_per_tag['is_enabled'] = el.is_enabled() else: dic_per_tag[p_id] = el.get_attribute(p_id) rlist.append(dic_per_tag) LOG.debug('/webel_grab/.list returned: {0} dict(s) in list for ' '{2}: [{1}]. LIST=[{3}]'.format( len(rlist), self.xpath or self.css or self.did, using, rlist)) return rlist except NoSuchElementException: return rlist except StaleElementReferenceException: return rlist
def validate(): LOG.info("VALIDATE: called") data = AttrDict(bottle.request.json) LOG.info("VALIDATE: POST Request: " + str(data)) bottle.response.add_header('Cache-Control', 'no-cache') # print data is_valid = validators[data.type](**data) if is_valid is not True: bottle.response.status = 406 result = dict(message=is_valid) LOG.info("VALIDATE: Result: " + str(result)) return result # Nothing to do if it is valid, all is okay else: LOG.info("VALIDATE: Successful, no error.")
class DebugTask(celery.Task): abstract = True _meta = AttrDict() def AsyncResult(self, task_id): """Get AsyncResult instance for this kind of task. :param task_id: Task id to get result for. """ return MyAsyncResult(task_id, backend=self.backend, task_name=self.name) def clear_meta(self): self._meta.clear() def save_meta(self, **kwargs): self._meta.update(**kwargs) self.backend.set(MEMCACHED_META_PREFIX + self._id, self._meta) def on_failure(self, exc, task_id, args, kwargs, einfo): if self.request.is_eager: self.backend.mark_as_failure(task_id, exc, einfo.traceback) def on_success(self, retval, task_id, args, kwargs): if self.request.is_eager: self.backend.mark_as_done(task_id, retval) def __call__(self, *args, **kwargs): self._id = self.request.id if not self.request.is_eager: self.update_state(state=celery.states.STARTED) # LOG.setLevel(level) if self.request.is_eager: logging.basicConfig(level=logging.INFO) self.clear_meta() handler = MyMemoryHandler(task=self, level=logging.INFO, capacity=2000) root_logger = logging.getLogger() root_logger.addHandler(handler) try: return super(DebugTask, self).__call__(*args, **kwargs) finally: root_logger.removeHandler(handler)
def setup(self): payload = AttrDict() payload.name = self.name payload.destination = self.destination payload.partition = self.partition payload.profiles = [] for profile_name in self.profiles: profile = AttrDict() profile.name = profile_name payload.profiles.append(profile) try: resp = self.api.post(URL_TM_LTM_VIRTUAL, payload) except EmapiResourceError as e: if 'illegally shares destination address' in e.msg: ret = self.api.get(URL_TM_LTM_VIRTUAL) for item in ret["items"]: if item.destination.endswith(self.destination) \ and item.partition == self.partition: self.api.delete(item.selfLink) resp = self.api.post(URL_TM_LTM_VIRTUAL, payload) else: raise CommandError("Unexpected error: %s" % e.msg) return resp
def setup(self): # POST/apply the policy payload = AttrDict() policy_reference = AttrDict() policy_reference.link = self.policy.selfLink payload.policyReference = policy_reference resp = self.rstifc.post(URL_TM_ASM_APPLY_POLICY, payload) AsmTask().wait_status(self.rstifc, resp, interval=2, timeout=90, timeout_message="Apply policy timed out after {0}, "\ "last status is {1.status}") ret = resp # TODO: put in test # LOG.info("Putting response of apply_policy into self.garbage_bigip") # self.garbage_bigip[device].append(resp) # PATCH/patch the policy to make sure log daemon catch the right mapping # workround to BZ488306 payload = AttrDict() payload.description = "This is a description" resp = self.rstifc.patch(self.policy.selfLink, payload) return ret
def get_or_create_case(self, test): """Gets or creates a test case. Returns the case_id.""" address = test.id if address not in self.cases: section_id = self.get_or_create_section(test) params = AttrDict() params.title = address params.template_id = TEMPLATE.text params.type_id = TC_TYPE.automated params.priority_id = PRIORITY.medium params.custom_product_name = PRODUCT.bigip meta = test.meta params.custom_description = meta.description case = self.api.add_case(section_id, params) case_id = case.id self.cases[case.title] = case_id else: case_id = self.cases[address] return case_id
def emit(self, record): item = AttrDict() item.name = record.name item.levelname = record.levelname item.message = record.getMessage()[:MAX_LOG_LINE] # item.message = re.sub(URL_REGEX, r'<a href="\1">\1</a>', record.message) item.timestamp = time.strftime('%b %d %H:%M:%S', time.localtime(record.created)) # for x in item: # if x not in ('levelname', 'asctime', 'message'): # item.pop(x) self.buffer.append(item) self.tip += 1 self.task.save_meta(logs=list(self.buffer), tip=self.tip)
def __init__(self, *args, **kwargs): super(ApmAaaLdap, self).__init__(*args, **kwargs) self.setdefault('name', "ldap_5") self.setdefault('address', "1.1.1.1") self.setdefault('cleanupCache', "none") self.setdefault('adminDn', "admin") self.setdefault('adminEncryptedPassword', "admin") self.setdefault('groupCacheTtl', 30) self.setdefault('isLdaps', False) self.setdefault('locationSpecific', True) self.setdefault('port', 389) self.setdefault('timeout', 15) self.setdefault('usePool', "disabled") self.setdefault("schemaAttr", AttrDict()) self.schemaAttr.groupMember = "member" self.schemaAttr.groupMemberValue = "dn" self.schemaAttr.groupMemberof = "memberOf" self.schemaAttr.groupObjectClass = "group" self.schemaAttr.userMemberof = "memberOf" self.schemaAttr.userObjectClass = "user"
def tester_icontrol_post(): """Handles icontrol tester requests. """ data = AttrDict(bottle.request.json) options = AttrDict() options.username = ADMIN_USERNAME options.password = data.password options.json = True result = ictester.delay( address=data.address.strip(), method=data.method, # @UndefinedVariable options=options, params=data.arguments, user_input=data) # print arguments link = app.router.build('status', task_id=result.id) return dict(status=result.status, id=result.id, link=link)
def get_or_create_section(self, test): """Gets or creates a section. Returns the section_id.""" address = test.id context, _ = address.split(':') last_section = 0 for s in context.split('.'): if s not in self.sections: params = AttrDict() params.name = s params.description = 'autogenerated' params.suite_id = self.suite_id if last_section: params.parent_id = last_section section = self.api.add_section(self.project_id, params) last_section = section_id = section.id self.sections[section.name] = section_id else: last_section = section_id = self.sections[s] return section_id
def bvt_bigiq_post(): """Handles requests from BIGIP teams for BIGIQ BVT. All the logic needed to translate the user input into what makes sense to us happens right here. """ BVTINFO_PROJECT_PATTERN = '(\D+)?(\d+\.\d+\.\d+)-?(eng-?\w*|hf\d+|hf-\w+)?' CONFIG_FILE = 'config/shared/web_bvt_request_bigiq.yaml' # For people who don't like to set the application/json header. data = AttrDict(json.load(bottle.request.body)) data._referer = bottle.request.url our_config = AttrDict(yaml.load(open(get_harness('bigiq-tmos')).read())) # Prepare placeholders in our config our_config.update({'stages': {'main': {'setup': {'install-bigips': {'parameters': {}}}}}}) our_config.update({'plugins': {'email': {'to': [], 'variables': {}}}}) our_config.update({'plugins': {'bvtinfo': {'bigip': {}}}}) plugins = our_config.plugins # Set BVTInfo data plugins.bvtinfo.project = data['project'] plugins.bvtinfo.build = data['build'] plugins.bvtinfo.bigip.name = 'bigiq-bvt' # Append submitter's email to recipient list if data.get('submitted_by'): plugins.email.to.append(data['submitted_by']) plugins.email.to.extend(CONFIG.web.recipients) # Set version and build in the install stage params = our_config.stages.main.setup['install-bigips'].parameters match = re.match(BVTINFO_PROJECT_PATTERN, data['project']) if match: params['version'] = match.group(2) if match.group(3): params['hotfix'] = match.group(3) else: params['version'] = data['project'] params['build'] = data['build'] params['custom iso'] = data.get('custom_iso') params['custom hf iso'] = data.get('custom_hf_iso') params.product = 'bigip' if not min_version_validator(params.build, params.version, params.hotfix, params.product, min_ver=CONFIG.supported): # raise ValueError('Requested version not supported') bottle.response.status = 406 return dict(message='Requested version not supported') args = [] args[:] = NOSETESTS_ARGS args.append('--tc-file={VENV}/%s' % CONFIG_FILE) args.append('--tc=stages.enabled:1') # For chuckanut++ args.append('--eval-attr=rank >= 5 and rank <= 10') args.append('--with-email') args.append('--with-bvtinfo') args.append('--with-irack') args.append('{VENV}/%s' % CONFIG.paths.tc) v = plugins.email.variables v.args = args v.project = data['project'] v.version = params.version v.build = params.build # return dict(config=our_config, args=args) result = nosetests.delay(our_config, args, data) # @UndefinedVariable link = app.router.build('status', task_id=result.id) return dict(status=result.status, id=result.id, link=link)
def __init__(self, *args, **kwargs): super(DeviceManagerTask, self).__init__(*args, **kwargs) self.setdefault("devices", AttrDict()) device = AttrDict() device.setdefault('deviceIp') device.setdefault('deviceUsername', 'admin') device.setdefault('devicePassword', 'admin') device.setdefault('automaticallyUpdateFramework', True) device.setdefault('rootUser', 'root') device.setdefault('rootPassword', 'default') self.devices = [device]
def bvt_basic_post(): """Handles requests from BIGIP teams. All the logic needed to translate the user input into what makes sense to us happens right here. """ BVTINFO_PROJECT_PATTERN = '(\D+)?(\d+\.\d+\.\d+)-?(eng-?\w*|hf\d+|hf-\w+)?' TESTS_DEBUG = 'tests/solar/bvt/integration/filesystem/' CONFIG_FILE = 'config/shared/web_bvt_request.yaml' # For people who don't like to set the application/json header. data = AttrDict(json.load(bottle.request.body)) data._referer = bottle.request.url # data = bottle.request.json # BUG: The iRack reservation-based picker is flawed. It'll always select # the nearest available harness, stacking all workers on just one. # with IrackInterface(address=CONFIG.irack.address, # timeout=30, # username=CONFIG.irack.username, # password=CONFIG.irack.apikey, # ssl=False) as irack: # config_dir = os.path.dirname(CONFIG_WEB_FILE) # harness_files = [os.path.join(config_dir, x) for x in CONFIG.web.harnesses] # our_config = RCMD.irack.pick_best_harness(harness_files, ifc=irack) our_config = AttrDict(yaml.load(open(get_harness('em')).read())) # Prepare placeholders in our config our_config.update({'stages': {'main': {'setup': {'install-bigips': {'parameters': {}}}}}}) our_config.update({'plugins': {'email': {'to': [], 'variables': {}}}}) our_config.update({'plugins': {'bvtinfo': {}}}) plugins = our_config.plugins # Set BVTInfo data plugins.bvtinfo.project = data['project'] plugins.bvtinfo.build = data['build'] # Append submitter's email to recipient list if data.get('submitted_by'): plugins.email.to.append(data['submitted_by']) plugins.email.to.extend(CONFIG.web.recipients) # Set version and build in the install stage params = our_config.stages.main.setup['install-bigips'].parameters match = re.match(BVTINFO_PROJECT_PATTERN, data['project']) if match: params['version'] = match.group(2) if match.group(3): params['hotfix'] = match.group(3) else: params['version'] = data['project'] params['build'] = data['build'] params['custom iso'] = data.get('custom_iso') params['custom hf iso'] = data.get('custom_hf_iso') params.product = 'bigip' if not min_version_validator(params.build, params.version, params.hotfix, params.product, min_ver=CONFIG.supported): # raise ValueError('Requested version not supported') bottle.response.status = 406 return dict(message='Requested version not supported') args = [] args[:] = NOSETESTS_ARGS args.append('--tc-file={VENV}/%s' % CONFIG_FILE) if data.get('debug'): args.append('--tc=stages.enabled:1') tests = [os.path.join('{VENV}', x) for x in re.split('\s+', (data.get('tests') or TESTS_DEBUG).strip())] args.extend(tests) else: args.append('--tc=stages.enabled:1') args.append('--eval-attr=rank > 0 and rank < 11') args.append('--with-email') args.append('--with-bvtinfo') args.append('--with-irack') args.append('{VENV}/%s' % CONFIG.paths.em) v = plugins.email.variables v.args = args v.project = data['project'] v.version = params.version v.build = params.build result = nosetests.delay(our_config, args, data) # @UndefinedVariable link = app.router.build('status', task_id=result.id) return dict(status=result.status, id=result.id, link=link)
def bvt_deviso_post(): """Handles requests from Dev team for user builds ISOs. """ # BVTINFO_PROJECT_PATTERN = '(\D+)?(\d+\.\d+\.\d+)-?(hf\d+)?' DEFAULT_SUITE = 'bvt' SUITES = {'bvt': '%s/' % CONFIG.paths.current, 'dev': '%s/cloud/external/devtest_wrapper.py' % CONFIG.paths.current, 'dev-cloud': '%s/cloud/external/restservicebus.py' % CONFIG.paths.current } CONFIG_FILE = 'config/shared/web_deviso_request.yaml' # For people who don't like to set the application/json header. data = AttrDict(json.load(bottle.request.body)) # data = bottle.request.json data._referer = bottle.request.url our_config = AttrDict(yaml.load(open(get_harness('bigiq')).read())) # Prepare placeholders in our config our_config.update({'stages': {'main': {'setup': {'install': {'parameters': {}}}}}}) our_config.update({'stages': {'main': {'setup': {'install-bigips': {'parameters': {}}}}}}) our_config.update({'plugins': {'email': {'to': [], 'variables': {}}}}) plugins = our_config.plugins # Append submitter's email to recipient list if data.get('email'): plugins.email.to.append(data['email']) plugins.email.to.extend(CONFIG.web.recipients) # Set version and build in the install stage v = None if data.get('iso'): params = our_config.stages.main.setup['install'].parameters params['custom iso'] = data['iso'] v = version_from_metadata(data['iso']) if data.get('hfiso'): params = our_config.stages.main.setup['install'].parameters params['custom hf iso'] = data['hfiso'] v = version_from_metadata(data['hfiso']) # Find the RTM ISO that goes with this HF image. if not data.get('iso'): params['custom iso'] = isofile(v.version, product=str(v.product)) args = [] args[:] = NOSETESTS_ARGS rank = Literal('rank') expr = (rank > Literal(0)) & (rank < Literal(11)) # Include all migrated tests, example: functional/standalone/security/migrated/... # Assumption is that all tests are rank=505 expr |= rank == Literal(505) # Only Greenflash tests have extended attributes if v is None or v >= 'bigiq 4.5': # build hamode argument if data.ha: hamode = Literal('hamode') expr2 = Or() for x in data.ha: if x != 'standalone': expr2 += [In(String(x.upper()), hamode)] if 'standalone' in data.ha: expr &= (~hamode | expr2) else: expr &= hamode & expr2 if data.ui: uimode = Literal('uimode') if data.ui == 'api': expr &= ~uimode elif data.ui == 'ui': expr &= uimode & (uimode > Literal(0)) else: raise ValueError('Unknown value {}'.format(data.ui)) if data.module: module = Literal('module') expr2 = Or() for x in data.module: expr2 += [In(String(x.upper()), module)] expr &= (module & expr2) args.append('--tc-file={VENV}/%s' % CONFIG_FILE) # Default is our BVT suite. if v: suite = os.path.join(CONFIG.suites.root, CONFIG.suites[v.version]) else: suite = SUITES[data.get('suite', DEFAULT_SUITE)] args.append('--tc=stages.enabled:1') # XXX: No quotes around the long argument value! args.append('--eval-attr={}'.format(str(expr))) args.append('--with-email') # args.append('--collect-only') args.append('--with-irack') args.append('{VENV}/%s' % suite) v = plugins.email.variables v.args = args v.iso = data.iso v.module = data.module result = nosetests.delay(our_config, args, data) # @UndefinedVariable link = app.router.build('status', task_id=result.id) return dict(status=result.status, id=result.id, link=link)
def bvt_emdeviso_post(): """Handles requests from BIGIP teams. All the logic needed to translate the user input into what makes sense to us happens right here. """ CONFIG_FILE = 'config/shared/web_emdeviso_request.yaml' # For people who don't like to set the application/json header. data = AttrDict(json.load(bottle.request.body)) data._referer = bottle.request.url our_config = AttrDict(yaml.load(open(get_harness('em')).read())) # Prepare placeholders in our config our_config.update({'stages': {'main': {'setup': {'install': {'parameters': {}}}}}}) our_config.update({'plugins': {'email': {'to': [], 'variables': {}}}}) plugins = our_config.plugins # Append submitter's email to recipient list if data.get('email'): plugins.email.to.append(data['email']) plugins.email.to.extend(CONFIG.web.recipients) # Set version and build in the install stage v = None if data.get('iso'): params = our_config.stages.main.setup['install'].parameters params['custom iso'] = data['iso'] v = version_from_metadata(data['iso']) if data.get('hfiso'): params = our_config.stages.main.setup['install'].parameters params['custom hf iso'] = data['hfiso'] v = version_from_metadata(data['hfiso']) # Find the RTM ISO that goes with this HF image. if not data.get('iso'): params['custom iso'] = isofile(v.version, product=str(v.product)) args = [] args[:] = NOSETESTS_ARGS args.append('--tc-file={VENV}/%s' % CONFIG_FILE) args.append('--tc=stages.enabled:1') args.append('--eval-attr=rank > 0 and rank < 11') args.append('--with-email') #args.append('--with-bvtinfo') args.append('--with-irack') args.append('{VENV}/%s' % CONFIG.paths.em) result = nosetests.delay(our_config, args, data) # @UndefinedVariable link = app.router.build('status', task_id=result.id) return dict(status=result.status, id=result.id, link=link)
def install_post(): """Handles install requests. """ data = AttrDict(bottle.request.json) options = AttrDict() options.admin_password = data.admin_password options.root_password = data.root_password options.product = data.product options.pversion = data.version options.pbuild = data.build or None options.phf = data.hotfix options.image = data.customiso if data.format == 'volumes': options.format_volumes = True elif data.format == 'partitions': options.format_partitions = True options.timeout = 900 if data.config == 'essential': options.essential_config = True result = install.delay(address=data.address.strip(), options=options, # @UndefinedVariable user_input=data) link = app.router.build('status', task_id=result.id) return dict(status=result.status, id=result.id, link=link)
def bvt_basic_post2(): """Handles EM BVT requests. """ HOOK_NAME = 'em-bvt' TESTS_DEBUG = 'tests/solar/bvt/integration/filesystem/' CONFIG_FILE = 'config/shared/web_bvt_request.yaml' data = AttrDict(json.load(bottle.request.body)) data._referer = bottle.request.url our_config = AttrDict(yaml.load(open(get_harness('em')).read())) # Prepare placeholders in our config our_config.update({'stages': {'main': {'setup': {'install-bigips': {'parameters': {}}}}}}) our_config.update({'plugins': {'email': {'to': [], 'variables': {}}}}) our_config.update({'plugins': {'atom': {'bigip': {}}, 'bvtinfo': {}}}) plugins = our_config.plugins # Set ATOM data plugins.atom.bigip.request_id = data.content.id plugins.atom.bigip.name = HOOK_NAME # Append submitter's email to recipient list if data.content.requestor.email: plugins.email.to.append(data.content.requestor.email) plugins.email.to.extend(CONFIG.web.recipients) # Set version and build in the install stage params = our_config.stages.main.setup['install-bigips'].parameters branch = data.content.build.branch version = data.content.build.version params['version'] = branch.name params['build'] = version.primary if int(version.level): params['hotfix'] = version.level params['custom hf iso'] = sanitize_atom_path(data.content.build.iso) else: params['custom iso'] = sanitize_atom_path(data.content.build.iso) params.product = 'bigip' # TODO: Remove this when bvtinfo goes offline # Set BVTInfo data plugins.bvtinfo.project = branch.name plugins.bvtinfo.build = version.old_build_number args = [] args[:] = NOSETESTS_ARGS args.append('--tc-file={VENV}/%s' % CONFIG_FILE) if data.get('debug'): args.append('--tc=stages.enabled:1') tests = [os.path.join('{VENV}', x) for x in re.split('\s+', (data.get('tests') or TESTS_DEBUG).strip())] args.extend(tests) else: args.append('--tc=stages.enabled:1') args.append('--eval-attr=rank > 0 and rank < 11') args.append('--with-email') args.append('--with-atom') args.append('--with-bvtinfo') if not min_version_validator(params.build, params.version, params.hotfix, params.product, iso=data.content.build.iso, min_ver=CONFIG.supported): args.append('--with-atom-no-go=The requested product/version is not supported by this test suite.') args.append('--with-irack') # args.append('--with-qkview=never') # args.append('{VENV}/tests/solar/bvt/') args.append('{VENV}/%s' % CONFIG.paths.em) v = plugins.email.variables v.args = args v.project = data.content.build.branch.name v.version = data.content.build.version.version v.build = data.content.build.version.build result = nosetests.delay(our_config, args, data) # @UndefinedVariable link = app.router.build('status', task_id=result.id) return dict(status=result.status, id=result.id, link=link)
def bvt_bigiq_post2(): """Handles requests from BIGIP teams for BIGIQ BVT. All the logic needed to translate the user input into what makes sense to us happens right here. """ HOOK_NAME = 'big-iq-bvt' CONFIG_FILE = 'config/shared/web_bvt_request_bigiq.yaml' data = AttrDict(json.load(bottle.request.body)) data._referer = bottle.request.url our_config = AttrDict(yaml.load(open(get_harness('bigiq-tmos')).read())) # Prepare placeholders in our config our_config.update({'stages': {'main': {'setup': {'install-bigips': {'parameters': {}}}}}}) our_config.update({'plugins': {'email': {'to': [], 'variables': {}}}}) our_config.update({'plugins': {'atom': {'bigip': {}}, 'bvtinfo': {}}}) plugins = our_config.plugins # Set ATOM data plugins.atom.bigip.request_id = data.content.id plugins.atom.bigip.name = HOOK_NAME # Append submitter's email to recipient list if data.content.requestor.email: plugins.email.to.append(data.content.requestor.email) plugins.email.to.extend(CONFIG.web.recipients) # Set version and build in the install stage params = our_config.stages.main.setup['install-bigips'].parameters branch = data.content.build.branch version = data.content.build.version params['version'] = branch.name params['build'] = version.primary if int(version.level): params['hotfix'] = version.level params['custom hf iso'] = sanitize_atom_path(data.content.build.iso) else: params['custom iso'] = sanitize_atom_path(data.content.build.iso) params.product = 'bigip' # TODO: Remove this when bvtinfo goes offline # Set BVTInfo data plugins.bvtinfo.project = branch.name plugins.bvtinfo.build = version.old_build_number args = [] args[:] = NOSETESTS_ARGS args.append('--tc-file={VENV}/%s' % CONFIG_FILE) args.append('--tc=stages.enabled:1') # For chuckanut++ args.append('--eval-attr=rank >= 5 and rank <= 10') args.append('--with-email') args.append('--with-atom') args.append('--with-bvtinfo') if not min_version_validator(params.build, params.version, params.hotfix, params.product, iso=data.content.build.iso, min_ver=CONFIG.supported): args.append('--with-atom-no-go=The requested product/version is not supported by this test suite.') args.append('--with-irack') # args.append('--with-qkview=never') args.append('{VENV}/%s' % CONFIG.paths.tc) # args.append('{VENV}/tests/firestone/functional/standalone/adc/api/') v = plugins.email.variables v.args = args v.project = data.content.build.branch.name v.version = data.content.build.version.version v.build = data.content.build.version.build # return dict(config=our_config, args=args) result = nosetests.delay(our_config, args, data) # @UndefinedVariable link = app.router.build('status', task_id=result.id) return dict(status=result.status, id=result.id, link=link)