def create(self, r_schema): service_instances_object = self.read() service_instances_array = service_instances_object.serviceInstanceArray required_service_instance = '' # check for the default service instance created for host based # service insertion, if the service instance is not created for # management based service insertion then create a new one using # post call for s in service_instances_array: if r_schema._serviceid == s.service.objectId: # check for defined r_schema._serviceid self.log.debug("Service Instance is %s" % s.objectId) required_service_instance = s.objectId if required_service_instance != '' and required_service_instance != None: result_obj = result.Result() result_obj.set_status_code(200) result_obj.set_response_data(required_service_instance) else: # make a create call, if the service instance is not there self.schema_class = 'service_instance_schema.ServiceInstanceSchema' self.log.debug("Make POST call to create service instance") self.response = self.request( 'POST', self.create_endpoint, r_schema.get_data_without_empty_tags(self.content_type)) result_obj = result.Result() self.set_result(self.response, result_obj) return result_obj
def __init__(self, config): self.path = config.get_doc_path() self.filename = config.get_doc_filename() self.result = result.Result(config) extensions = [ 'extra', 'nl2br', 'toc', 'smarty', 'meta', ] self.md = markdown.Markdown(extensions=extensions) jinja2_loader = jinja2.FileSystemLoader('./templates/') self.jinja2_env = jinja2.Environment(loader=jinja2_loader) self.jinja2_env.filters['euro'] = euro self.jinja2_env.filters['thsep'] = thsep self.jinja2_env.filters['percent'] = percent self.jinja2_env.filters['twodp'] = twodp img_path = os.path.join(self.path, 'img') if not os.path.exists(img_path): os.makedirs(img_path) self.result.write_coeffs_as_csv(self.path)
def create(self, r_schema): # Check for flag to get service profile. It its set then find # service profile id of the respective service profile name # If it is not set, create new service profile if r_schema._getserviceprofileflag == 'true': service_profile_response = self.read() service_profiles_array = service_profile_response.serviceProfileArray required_service_profile_name = "" found_profile = False for p in service_profiles_array: if r_schema._serviceprofilename == p.name: self.log.debug("Service Profile is %s" % p.objectId) found_profile = True required_service_profile_name = p.objectId result_obj = result.Result() if found_profile == False: self.log.error("Unable to find default service profile") result_obj.set_response_data(None) result_obj.set_status_code(404) else: result_obj.set_response_data(required_service_profile_name) result_obj.set_status_code(200) return result_obj else: self.schema_class = 'service_profile_schema.ServiceProfileSchema' self.log.debug("Make POST call to create service profile") result_obj = super(ServiceProfile, self).create(r_schema) return result_obj[0]
def args(self, argv): show_ep = None quality = None torrent_client = str() show_name = None url_RSS = 'https://nyaa.si/rss?c=1_2&q=%title%' try: opts, args = getopt.getopt( argv, "hc:u:s:n:q:", ["help", "client=", "url=", "show=", "num=", "quality="]) except getopt.GetoptError: print(help) sys.exit(2) for opt, arg in opts: if opt == "": self.prompt() elif opt in ("-h", "help"): print(help) sys.exit() elif opt in ("-c", "--client"): torrent_client = arg elif opt in ("-u", "--url"): url_RSS = str(arg) elif opt in ("-s", "--show"): show_name = arg elif opt in ("-n", "--num"): show_ep = int(arg) elif opt in ("-q", "--quality"): quality = arg if show_name == None: show_name = input('enter show name') self.results = result.Result(url_RSS, show_name, show_ep, quality)
def delete(self, schema_object=None): """ Overriding the base_client delete method to perform DELETE operation """ (sec_layer, section_id, rule_id) = re.split('_', self.id, 2) if (sec_layer == "L3" or sec_layer == "L2" and self.scope == UNIVERSAL_SCOPE): section_get_endpoint = "/firewall/config/sections/" + section_id self.response = self.request('GET', section_get_endpoint) payload_schema = self.response.read() if payload_schema != None and payload_schema != "": sec_schema_obj = section_schema.SectionSchema() sec_schema_obj.set_data(payload_schema, self.accept_type) else: self.log.debug("GET Section failed for %s" % section_id) return None self.if_match = sec_schema_obj._tag_generationNumber self.log.debug("Generation Number from GET Section %s" % self.if_match) if ((sec_layer == "L3" or sec_layer == "L2") and self.scope == UNIVERSAL_SCOPE): delete_endpoint = "/firewall/config/sections/%s/rules/%s" % ( section_id, rule_id) self.log.debug("delete_endpoint is %s " % delete_endpoint) self.log.debug("endpoint id is %s " % self.id) self.response = self.request('DELETE', delete_endpoint, "") result_obj = result.Result() self.set_result(self.response, result_obj) return result_obj
def bulk_create(template_obj, py_dict_array): """ Function to bulk create components @param py_dict_array reference to array of python dictionary which contains \ spec needed to create schema object """ time_start = datetime.now() pool = eventlet.GreenPool(30) options = {} result_array = [] for py_dict in py_dict_array: options['py_dict'] = py_dict options['template_obj'] = template_obj options['result_array'] = result_array template_obj.log.debug("py_dict = %s" % py_dict) if ('VDNET_PYLIB_THREADS' not in os.environ.keys()): schema_object = template_obj.get_schema_object(py_dict) result_obj = result.Result() result_obj = template_obj.create(schema_object) result_array.append(result_obj) else: pool.spawn(create_with_threads, options) if pool.running(): pool.waitall() time_end = datetime.now() total_time = time_end - time_start template_obj.log.debug("Attempt to create %s components " % len(py_dict_array)) template_obj.log.debug("Time taken to create components %s " % total_time.seconds) return result_array
def __init__(self, parent): Gtk.Box.__init__(self, False, 0) self.parent = parent try: current_locale, encoding = locale.getdefaultlocale() locale_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'locale') translate = gettext.translation (cn.App.application_shortname, locale_path, [current_locale] ) _ = translate.gettext except FileNotFoundError: _ = str chart = ch.Chart() self.function = fn.Function() algo = al.Algorithm() self.algo_parameters = apm.AlgorithmParameters(self.function, algo, chart) self.func_parameters = fpm.FunctionParameters(self.function, algo, chart) self.result = rsl.Result(self.function, algo) hpaned = Gtk.Paned() hpaned.set_position(800) hpaned.add1(chart.sw) vbox = Gtk.VBox() vbox.add(self.algo_parameters.frame) vbox.add(self.func_parameters.frame) vbox.add(self.result.frame) hpaned.add2(vbox) self.pack_start(hpaned, True, True, 0)
def create(self, schema_object): """ Client method to perform create operation @param schema_object instance of BaseSchema class @return result object """ self.response = self.request\ ('POST', self.create_endpoint, schema_object.get_data_without_empty_tags(self.content_type)) result_obj = result.Result() self.set_result(self.response, result_obj) response = result_obj.get_response() location = response.getheader("Location") self.log.debug("Location header is %s" % location) self.location_header = location #Firewall rule ID is fetched from the Location Header if location is not None: self.id = location.split('/')[-1] result_obj.set_response_data(self.id) else: pylogger.error( "Error in getting location from HTTP Response Header") return result_obj
def run(self): self.active = True # set root result entry d = datetime.datetime.now() self.results_id = '%d-%02d-%02d_%02d-%02d-%02d-%03d' % ( d.year, d.month, d.day, d.hour, d.minute, d.second, d.microsecond / 1000) # create results directory and results file if self.results_name is not None: self.results_id += '__' + result_file_name(self.results_name) self.results_dir = os.path.join(self.svp_dir, RESULTS_DIR, self.results_id) makedirs(self.results_dir) self.results_file = os.path.join(self.results_dir, self.results_id + RESULTS_EXT) self.files_dir = os.path.join(self.svp_dir, FILES_DIR) if self.results_tree: self.results = self.results_tree self.active_result = self.results self.update_result(name=self.results_id) self.active_result.results_index = 0 self.svp_file = None result = self.active_result.next_result() if result is not None: self.svp_file = result.file() self.active_result = result else: self.results = rslt.Result(name=self.results_id, type=rslt.RESULT_TYPE_RESULT) self.active_result = self.results self.update_result() # start self.run_next()
def create(self, schema_object=None, url_parameters=None): """ Client method to perform create operation @param schema_object instance of BaseSchema class @return result object """ self.log.debug("In create base_client...") if self.create_as_put: self.log.error('This is workaround, File a PR, '+ 'create should not be a PUT call') self.response = self.request('PUT', self.create_endpoint, schema_object.get_data(self.content_type), url_parameters=url_parameters) else: if schema_object is not None: self.response = self.request('POST', self.create_endpoint, schema_object.get_data(self.content_type), url_parameters=url_parameters) else: self.response = self.request('POST', self.create_endpoint, url_parameters=url_parameters) result_obj = result.Result() self.set_result(self.response, result_obj) return result_obj
def getResults(self): """ Gets the models associated with this project (Models need to be loaded.) """ emptyResults = [] outputString = "" if (self.id != None): payload = {'id': self.id} headers = {'APIKey': self.APIKey} r = requests.get(config.URL + "multipleresults.json", params=payload, headers=headers) count = 0 if (r.status_code == requests.codes.ok): data = r.text if (data != config.INVALID_API_KEY): jsonList = json.loads(data) for x in jsonList: res = result.Result(x.encode('ascii'), self, x.encode('ascii')) emptyResults.append(res) outputString += "(" + repr( count) + ") " + res.filename + " " count = count + 1 else: raise ValueError("'" + self.APIKey + "'" + " is not a valid API key.") else: print "This simulation has no id." print outputString return emptyResults
def create(self, schema_object): """ Client method to perform create operation @param schema_object instance of BaseSchema class @return result object """ if self.create_as_put: self.log.error('This is workaround, File a PR, ' + 'create should not be a PUT call') self.response = self.request( 'PUT', self.create_endpoint, schema_object.get_data_without_empty_tags(self.content_type)) else: if schema_object is not None: self.response = self.request( 'POST', self.create_endpoint, schema_object.get_data_without_empty_tags( self.content_type)) else: self.response = self.request('POST', self.create_endpoint) result_obj = result.Result() self.set_result(self.response, result_obj) return result_obj
def createResultsObject(self, result): r = pod_result.Result(self.name, self.data_sets, result) r.parameters = self.parameters r.parameter_names = self.parameter_names r.fitted_parameters = result[0] r.cov_matrix = result[1] info_dict = result[2] r.num_func_evals = info_dict['nfev'] r.residuals = info_dict['fvec'] r.message = result[3] r.success = result[4] in [1, 2, 3, 4] if (result[0] < 0).any(): r.success = False if not r.success: return r r.n = self._calcNumberOfObservations() r.deg_freedom_t = self._calcDegOfFreedomT(r.n) r.deg_freedom_e = self._calcDegOfFreedomE(r.n, len(r.fitted_parameters)) r.ss_tot = self._calcSS_total() r.ss_err = self._calcSS_err(r.residuals) r.r2 = self._calcR2(r.ss_err, r.ss_tot) r.r2_adj = self._calcR2(r.ss_err / r.deg_freedom_e, r.ss_tot / r.deg_freedom_t) r.std_dev = self._calcStdDev(r.ss_err, r.residuals) r.std_err = self._calcStdErr(r.cov_matrix, r.ss_err, r.n, len(r.fitted_parameters)) return r
def get_trad_simpleV_mileageCost_noFre(self): total_dis_tmp = 0 total_car_fre = 0 for i in range(self.pro_num): dis = self.my_data.get_dis(x1=self.machine_x, y1=self.machine_y, x2=self.x[i], y2=self.y[i]) now_set = self.info[i] total_v = 0 for part in now_set: total_v += part[1] / 1000 * part[2] / 1000 * part[ 3] / 1000 * self.car_num * part[5] / part[4] total_car_fre += math.ceil( total_v / (self.car_x * self.car_y * self.car_z * self.car_coe)) total_dis_tmp += dis * 2 * math.ceil( total_v / (self.car_x * self.car_y * self.car_z * self.car_coe)) # print('test', i, total_v) # if i == 191: # print(now_set) res = result.Result() res.total_dis = total_dis_tmp res.total_car_fre = total_car_fre return res
def update(self, py_dict, override_merge=False): """ Client method to perform update operation overrides the update method in base_client.py @param py_dict dictionary object which contains schema attributes to be updated @param override_merge @return status http response status """ self.log.debug("update input = %s" % py_dict) update_object = self.get_schema_object(py_dict) schema_object = None if override_merge is False: schema_object = self.read() self.log.debug("schema_object after read:") schema_object.print_object() self.log.debug("schema_object from input:") update_object.print_object() try: self.merge_objects(schema_object, update_object) except: tb = traceback.format_exc() self.log.debug("tb %s" % tb) else: schema_object = update_object self.log.debug("schema object after merge:") schema_object.print_object() (sec_layer, section_id, rule_id) = re.split('_', self.id, 2) section_get_endpoint = "/firewall/config/sections/" + section_id self.response = self.request('GET', section_get_endpoint) payload_schema = self.response.read() if payload_schema != None and payload_schema != "": sec_schema_obj = section_schema.SectionSchema() sec_schema_obj.set_data(payload_schema, self.accept_type) else: self.log.debug("GET Section failed for %s" % section_id) return None self.if_match = sec_schema_obj._tag_generationNumber self.log.debug("Generation Number from GET Section %s" % self.if_match) self.response = self.request( 'PUT', self.read_endpoint + "/" + rule_id, schema_object.get_data_without_empty_tags(self.content_type)) result_obj = result.Result() self.set_result(self.response, result_obj) if result_obj.status_code != 200: return result_obj res_data = result_obj.get_response_data() rule_id = re.findall(r'rule id="\d+"', res_data)[0].split(r'"')[1] self.log.debug("Rule Id %s successfully created for section %s" % (rule_id, section_id)) self.id = sec_layer + '_' + section_id + '_' + rule_id result_obj.response_data = self.id return result_obj
def update(self, filename): file = open(filename, 'rb') data = file.read() self.response = self.request('PUT', self.create_endpoint, data) result_obj = result.Result() self.set_result(self.response, result_obj) return result_obj
def process(file_name): print(file_name) # setting fixation_event = None calibration_flag = False result = rt.Result() print('Loading data file') df = pd.read_excel(DATA_PATH + '/' + file_name) for row in df.itertuples(): fixation = ft.Fixation(row) if (fixation.event == 'Eye tracker Calibration end'): print('calibration completed') calibration_flag = True if not calibration_flag: continue if fixation.is_event(): print(fixation.event) if(fixation.event == 'ImageStimulusStart'): fixation_event = ft.FixationEvent(row) elif(fixation.event == 'ImageStimulusEnd'): if(fixation_event.event_value != 'black'): fixation_position = fixation_event.average_position() # write data on result.xlsx result.add_data(id=fixation_event.get_stimules_id(), participant_name=fixation.participant_name, fixation_ave_x=fixation_position[0], fixation_ave_y=fixation_position[1]) else: if fixation.is_fixation(): required_data = True if(MODE == 1): if(fixation.timestamp - fixation_event.start_time > 5000): required_data = False elif(MODE == 2): if(fixation.timestamp - fixation_event.start_time > 3000): required_data = False elif(MODE == 3): if(fixation.timestamp - fixation_event.start_time < 5000): required_data = False elif(MODE == 4): if(fixation.timestamp - fixation_event.start_time > 6000 or fixation.timestamp - fixation_event.start_time < 3000): required_data = False elif(MODE == 5): if(fixation.timestamp - fixation_event.start_time < 6000): required_data = False elif(MODE == 6): if(fixation.timestamp - fixation_event.start_time < 8000): required_data = False if(required_data): fixation_event.add_fixation(row) result.save_sheet()
def create(self, schema_object): self.create_endpoint = self.create_endpoint + "/members/" + schema_object._member_id self.response = self.request('POST', self.create_endpoint, schema_object.get_data(self.content_type)) self.id = schema_object._member_id result_obj = result.Result() self.set_result(self.response, result_obj) result_obj.set_response_data(schema_object._member_id) return result_obj
def judgeSubmission(): """ Judge a submission if there are no submission to judge return false else return true """ internal_error = False sub = judge_api.getSubmission() if not sub: return False logger.info("Judging solution %d", sub['submission']['id']) # Downloading files downloads = { "checker": (sub["checker_id"], "checker", WDIR), "solution": (sub["submission"]["solution_id"], "solution", WDIR), "zip": (sub["zip_id"], str(sub["zip_id"]), SDIR, ".zip", True) } logger.info("Testing submission %s, stage: %s", sub['submission']['id'], "PUBLIC" if sub['public_stage'] else "NONPUBLIC") results = result.Result(sub['submission']['id'], sub['testset_id'], sub['testset_update'], sub['public_stage']) test_params = { 'lang': sub['submission']['type'], 'time_limit': sub['time_limit'], 'memory_limit': int(sub['memory_limit']) * 1024, # Downloaded files 'checker': None, 'solution': None, 'zip': None, 'use_files': sub['use_files'], 'input_file': sub['input_file'], 'output_file': sub['output_file'], 'tests': sub['tests'] } for name, params in downloads.items(): file_path = judge_api.downloadFile(*params) test_params[name] = file_path if not file_path: logger.error("File download failed with params %s", str(params)) results.setDownloadFail( "Downlaod failed for %s, check if added to task" % name) internal_error = True break if not internal_error: submission_judge = task.Task(test_params, results) submission_judge() judge_api.submitResult(results.getResult()) return True
def update(self, py_dict, override_merge=False): # set endpoint # 'create_endpoint': 'si/serviceinstance/serviceinstance-17/runtimeinfo' # /si/serviceinstance/<serviceinstance-Id>/runtimeinfo/<sir-Id>/config?action=install endpoint = self.create_endpoint + '/' + str( self.id) + '/config?action=' + str(py_dict) if self.update_as_post: self.response = self.request('POST', endpoint) result_obj = result.Result() self.set_result(self.response, result_obj) return result_obj
def delete(self, py_dict): end_point_uri = self.delete_endpoint + '/' + str( self.id) + '?forceRemoval=true' self.response = self.request('DELETE', end_point_uri, "") result_obj = result.Result() self.set_result(self.response, result_obj) self.log.debug( "*** Sleeping for 120 sec due to PR 1043761 Takes time to free ip from pool after deleting controller***" ) time.sleep(120) return result_obj
def create_with_threads(options): template_obj = options['template_obj'] py_dict = options['py_dict'] result_array = options['result_array'] schema_object = template_obj.get_schema_object(py_dict) result_obj = result.Result() template_obj.log.debug("Running create call with threads...") result_obj = template_obj.create(schema_object) # No need to do mutex/semaphore for shared memory for greenlets # http://learn-gevent-socketio.readthedocs.org/en/latest/greenlets.html result_array.append(result_obj)
def delete(self, schema_object=None): """ Over riding delete method to perform DELETE operation """ self.delete_endpoint = self.delete_endpoint + '?clusters=' + str( schema_object) self.log.debug("delete_endpoint is %s " % self.delete_endpoint) self.log.debug("endpoint id is %s " % self.id) self.log.debug("schema_object to delete call is %s " % schema_object) end_point_uri = self.delete_endpoint self.response = self.request('DELETE', end_point_uri, "") result_obj = result.Result() self.set_result(self.response, result_obj) return result_obj
def create(self, schema_object): # Save the base endpoint temp_create_endpoint = self.create_endpoint self.create_endpoint = self.create_endpoint + "/members/" + schema_object._member_id # This is a product design bug for using PUT to add a member to applicationgroup self.response = self.request('PUT', self.create_endpoint, schema_object.get_data(self.content_type)) self.id = schema_object._member_id result_obj = result.Result() self.set_result(self.response, result_obj) result_obj.set_response_data(schema_object._member_id) # Restore the base endpoint self.create_endpoint = temp_create_endpoint return result_obj
def set_controller_ssl(self, pyDict): schema_class = 'vxlan_controller_config_schema.VXLANControllerConfigSchema' module, class_name = schema_class.split(".") some_module = importlib.import_module(module) loaded_schema_class = getattr(some_module, class_name) # creating an instance of schema class schema_object = loaded_schema_class(pyDict) read_endpoint = "/vdn/controller/cluster" self.response = self.request('PUT', read_endpoint, schema_object.get_data(self.content_type)) result_obj = result.Result() self.set_result(self.response, result_obj) return result_obj
def update(self, py_dict, override_merge=True): """ The py_dict is coming in the form of interfacesSchema from the user . The put call makes use of interface schema. So need to convert the pydict into an interface pydict. Here override_merge is also set to True since we don't want to merge the read schema and update schema objects """ interface_pydict = py_dict['interfaces'][0] self.log.debug("update input = %s" % interface_pydict) self.log.debug("updating interface index %s of edge id %s" % (str(self.id), self.edge)) self.schema_class = 'interface_schema.InterfaceSchema' update_object = self.get_schema_object(interface_pydict) schema_object = None if override_merge is False: schema_object = self.read() self.log.debug("schema_object after read:") schema_object.print_object() self.log.debug("schema_object from input:") update_object.print_object() try: self.merge_objects(schema_object, update_object) except: tb = traceback.format_exc() self.log.debug("tb %s" % tb) else: schema_object = update_object self.log.debug("schema object after merge:") schema_object.print_object() if self.update_as_post: self.response = self.request( 'POST', self.create_endpoint, schema_object.get_data(self.content_type)) else: if self.id is None: self.response = self.request( 'PUT', self.read_endpoint, schema_object.get_data(self.content_type)) else: self.response = self.request( 'PUT', self.read_endpoint + "/" + str(self.id), schema_object.get_data(self.content_type)) result_obj = result.Result() self.set_result(self.response, result_obj) self.schema_class = 'interfaces_schema.InterfacesSchema' return result_obj
def get(self, url_parameters=None): """ Method to perform a "GET" request on endpoint and return result object """ if self.id is not None: self.response = self.request('GET', self.read_endpoint + "/" + self.id, "", url_parameters=url_parameters) else: self.response = self.request('GET', self.read_endpoint, "",url_parameters=url_parameters) self.log.debug(self.response.status) result_object = result.Result() self.set_result(self.response,result_object) return result_object
def change_controller_password(self, password): pyDict = {'apipassword': password} schema_class = 'vxlan_controller_credential_schema.VXLANControllerCredentialSchema' module, class_name = schema_class.split(".") some_module = importlib.import_module(module) loaded_schema_class = getattr(some_module, class_name) # creating an instance of schema class schema_object = loaded_schema_class(pyDict) read_endpoint = "vdn/controller/credential" self.response = self.request('PUT', read_endpoint, schema_object.get_data(self.content_type)) result_obj = result.Result() self.set_result(self.response, result_obj) return result_obj
def create(self, schema_object): self.response = self.request( 'POST', self.create_endpoint, schema_object.get_data_without_empty_tags(self.content_type)) result_obj = result.Result() self.set_result(self.response, result_obj) response = result_obj.get_response() location = response.getheader("Location") self.log.debug("Location header is %s" % location) self.location_header = location if location is not None: self.id = location.split('/')[-1] result_obj.set_response_data(self.id) return result_obj
def update(self, py_dict, override_merge=False, url_parameters=None): """ Client method to perform update operation @param py_dict dictionary object which contains schema attributes to be updated @param override_merge @return status http response status """ self.log.debug("update input = %s" % py_dict) update_object = self.get_schema_object(py_dict) schema_object = None if override_merge is False: schema_object = self.read() self.log.debug("schema_object after read:") schema_object.print_object() self.log.debug("schema_object from input:") update_object.print_object() try: self.merge_objects(schema_object, update_object) except: tb = traceback.format_exc() self.log.debug("tb %s" % tb) else: schema_object = update_object self.log.debug("schema object after merge:") schema_object.print_object() if self.update_as_post: self.response = self.request('POST', self.create_endpoint, schema_object.get_data(self.content_type), url_parameters=url_parameters) else: if self.id is None: self.response = self.request('PUT', self.read_endpoint, schema_object.get_data(self.content_type), url_parameters=url_parameters) else: self.response = self.request('PUT', self.read_endpoint + "/" + str(self.id), schema_object.get_data(self.content_type), url_parameters=url_parameters) result_obj = result.Result() self.set_result(self.response, result_obj) return result_obj