def update_variable_store(variable_store_title, variables): conn_fac = HttpRequest({'url': 'http://localhost:5516'}, release.scriptUsername, release.scriptUserPassword) response = conn_fac.get('/configurations', contentType='application/json') data = json.loads(response.getResponse()) variable_store_ci = None for i in range(0, len(data)): if data[i][ 'type'] == 'vars.VariableStore' and variable_store_title == data[ i]['properties']['title']: variable_store_ci = data[i] break if not variable_store_ci: print "ERROR: Unable to find variable store '%s'" % ( variable_store_title) sys.exit(1) variable_store_ci['properties']['variablesJson'] = json.dumps(variables) response = conn_fac.put('/configurations/%s' % (variable_store_ci['id']), json.dumps(variable_store_ci), contentType='application/json') if not response.isSuccessful: print "ERROR: Unable to update variable store '%s':" % ( variable_store_title) response.errorDump() sys.exit(1)
def processData(self, data): logger.debug(data) if (self.getInputProperty("messageTemplate") != None): template = self.getInputProperty("messageTemplate") for key in template: data[key] = self.evaluateCycleExpression(template[key], data) for output in self.output: if (self.getOutputProperty(output, "messageTemplate") != None): template = self.getOutputProperty(output, "messageTemplate") for key in template: data[key] = self.evaluateCycleExpression(template[key], data) if "outputmodule" in self.output[output]: outputType = "plugin" else: outputType = self.output[output]["class"] #TODO: deal with ommited fields if outputType == "plugin": self.outputWriteDocument(output, data, False) if outputType == "stdout": codec = self.output[output]["codec"] if codec == "json_lines": print(json.dumps(data)) else: print(data) if outputType == "file": codec = self.output[output]["codec"] if codec == "json_lines": filename = self.output[output]["filename"] self.openfiles[filename].write(json.dumps(data).encode('UTF-8')) self.openfiles[filename].write("\n")
def process(self, build_file): """Process an individual build file and output JSON of result to stdout.""" # Reset build_env for each build file so that the variables declared in the # build file or the files in includes through include_defs() don't pollute # the namespace for subsequent build files. build_env = copy.copy(self.root_build_env) relative_path_to_build_file = relpath(build_file, self.project_root).replace('\\', '/') build_env['BASE'] = relative_path_to_build_file[:self.len_suffix] build_env['BUILD_FILE_DIRECTORY'] = os.path.dirname(build_file) build_env['RULES'] = {} # Copy BUILD_FILE_SYMBOL_TABLE over. This is the only dict that we need # a sperate copy of since update_lazy_functions will modify it. build_env['BUILD_FILE_SYMBOL_TABLE'] = copy.copy( self.root_build_env['BUILD_FILE_SYMBOL_TABLE']) # Re-apply build_env to the rules added in this file with # @provide_for_build. update_lazy_functions(build_env['LAZY_FUNCTIONS'], build_env) execfile(os.path.join(self.project_root, build_file), build_env['BUILD_FILE_SYMBOL_TABLE']) values = build_env['RULES'].values() if self.strip_none: # Filter out keys with a value of "None" from the final rule definition. values = strip_none_entries(values) values.append({"__includes": [build_file] + build_env['INCLUDES']}) if self.server: print json.dumps(values) else: for value in values: print json.dumps(value)
def createIndex(self, indexName): if indexName not in self.runtime["indices"]: if self.runtime["client"].admin().indices().prepareExists(indexName).execute().actionGet().exists: logger.debug("Index \"%s\" already exists", indexName) self.runtime["indices"][indexName] = time.time() return False else: logger.info("Creating index %s", indexName) if "index_settings" in self.config: self.config["indexSettings"] = self.config["index_settings"] if "type_mapping" in self.config: self.config["typeMapping"] = self.config["type_mapping"] try: if "indexSettings" in self.config: settingsJsonStr = json.dumps(self.config["indexSettings"]) logger.info("Index settings: %s", settingsJsonStr) self.runtime["client"].admin().indices().prepareCreate(indexName).setSettings(settingsJsonStr).execute().actionGet() else: self.runtime["client"].admin().indices().prepareCreate(indexName).execute().actionGet() except IndexAlreadyExistsException, ex: logger.warning(ex) logger.warning("Index %s already exists, this should be harmless", indexName) if "typeMapping" in self.config: mappingJsonStr = json.dumps(self.config["typeMapping"]) logger.info("Setting mapping for %s/%s - %s", indexName, self.config["type"], mappingJsonStr) self.runtime["client"].admin().indices().preparePutMapping().setIndices(indexName).setType(self.config["type"]).setSource(mappingJsonStr).execute().actionGet() self.runtime["indices"][indexName] = time.time() logger.debug("Created index: \"%s\"", indexName) return True
def process(self, build_file): """Process an individual build file and output JSON of result to stdout.""" # Reset build_env for each build file so that the variables declared in the # build file or the files in includes through include_defs() don't pollute # the namespace for subsequent build files. build_env = copy.copy(self.root_build_env) relative_path_to_build_file = relpath(build_file, self.project_root).replace( '\\', '/') build_env['BASE'] = relative_path_to_build_file[:self.len_suffix] build_env['BUILD_FILE_DIRECTORY'] = os.path.dirname(build_file) build_env['RULES'] = {} # Copy BUILD_FILE_SYMBOL_TABLE over. This is the only dict that we need # a sperate copy of since update_lazy_functions will modify it. build_env['BUILD_FILE_SYMBOL_TABLE'] = copy.copy( self.root_build_env['BUILD_FILE_SYMBOL_TABLE']) # Re-apply build_env to the rules added in this file with # @provide_for_build. update_lazy_functions(build_env['LAZY_FUNCTIONS'], build_env) execfile(os.path.join(self.project_root, build_file), build_env['BUILD_FILE_SYMBOL_TABLE']) values = build_env['RULES'].values() values.append({"__includes": [build_file] + build_env['INCLUDES']}) if self.server: print json.dumps(values) else: for value in values: print json.dumps(value)
def wait_for_deploy(self, deploymentId): url = '/api/deployments/%s' % deploymentId response = self.httpRequest.get(url, headers=self.headers) if response.getStatus() not in HTTP_SUCCESS: self.throw_error(response) deployment_details = json.loads(response.getResponse()) print(json.dumps(deployment_details)) taskUrl = deployment_details["Links"]["Task"] time.sleep(5) task_details = self.get_task_details(taskUrl) while not task_details["IsCompleted"]: task_details = self.get_task_details(taskUrl) print(json.dumps(task_details)) time.sleep(5) if task_details["FinishedSuccessfully"]: print("Deployment finished successfully.") else: msg = "Deployment failed, errors: [%s]" % task_details[ "ErrorMessage"] print(msg) sys.exit(msg)
def start_deploy(self, releaseId, environment): environmentId = self.getEnvironmentId(environment) url = '/api/deployments' data = { "ReleaseId": releaseId, "EnvironmentId": environmentId, "TenantId": None, "SkipActions": [], "QueueTime": None, "QueueTimeExpiry": None, "FormValues": {}, "ForcePackageDownload": False, "UseGuidedFailure": False, "SpecificMachineIds": [], "ExcludedMachineIds": [], "ForcePackageRedeployment": False } print("data = %s" % data) response = self.httpRequest.post(url, headers=self.headers, body=json.dumps(data)) if response.getStatus() in HTTP_SUCCESS: data = json.loads(response.getResponse()) print(json.dumps(data)) return data["Id"] self.throw_error(response)
def ping(self, project_name): # random valid endpoint that verifies we're logged in / have a valid token url = '/projects/%s' % project_name response = self.httpRequest.get(url, headers=self.headers) if response.getStatus() in HTTP_SUCCESS: data = json.loads(response.getResponse()) print json.dumps(data) else: self.throw_error(response)
def LgaIdGraphWriter(id, timestamp, outlink_ids_set): result = dict() result['id'] = id result['timestamp'] = timestamp result['outlink_ids'] = [x[0] for x in outlink_ids_set] result_json = json.dumps(result) return result_json
def LgaIdMapWriter(id, url, surt_url): result = dict() result['url'] = url result['surt_url'] = surt_url result['id'] = id result_json = json.dumps(result) return result_json
def gen_score_json(f_num, fe, matching_neg_ins, matching_pos_ins, num_matching_class_negatives, num_matching_class_positives, feature_codes, trimmed_instances, header_map = {}): if len(feature_codes)>0: try: feature_num = get_num_att(trimmed_instances.attribute(f_num).name()) feature_name = feature_codes["feature_codes"][str(feature_num)]["name"] except KeyError: print "Warning -- key %s not found in code file"%feature_name feature_name = trimmed_instances.attribute(f_num).name() else: feature_name = trimmed_instances.attribute(f_num).name() feature_num = get_num_att(trimmed_instances.attribute(f_num).name()) master_feature_num = header_map.get(str(f_num+1)) score_dict = { "feature_num":feature_num, "master_feature_num":master_feature_num, "feature": feature_name, "score": '%.4f' % fe.evaluateAttribute(f_num), "rank" : f_num, "missing": trimmed_instances.attributeStats(f_num).missingCount if trimmed_instances.attributeStats(f_num) else 0 , "freq+" : trimmed_instances.attributeStats(f_num).nominalCounts[1] if trimmed_instances.attributeStats(f_num).nominalCounts else 0, "freq-":trimmed_instances.attributeStats(f_num).nominalCounts[0] if trimmed_instances.attributeStats(f_num).nominalCounts else 0, "aligned_pos": num_matching_class_positives, "aligned_neg" : num_matching_class_negatives} if debug: score_dict["pos_ins_list"] = matching_pos_ins score_dict["neg_ins_list"] = matching_neg_ins # "pos_ins_list" : matching_pos_ins, "neg_ins_list" : matching_neg_ins } return json.dumps(score_dict)
def update_record(self, table_name, sys_id, content, xlr_task_id): if self.useServicenowApp: payload_header = self.create_payload_header( table_name=table_name, action="update", identifier=sys_id, xlr_task_id=xlr_task_id) payload = self.create_payload(header=payload_header, data=content) data = self.request(method='POST', url=SERVICE_NOW_CREATE_URL, body=payload.encode('utf-8'), headers=self.headers)[0] if data["sys_row_error"] != "": raise RuntimeError(data["sys_row_error"]) return data else: servicenow_api_url = '/api/now/table/%s/%s?%s' % ( table_name, sys_id, self.sysparms) body = json.dumps(content) data = self.request(method='PUT', url=servicenow_api_url, body=body.encode('utf-8'), headers=self.headers) if 'sys_id' in data: data['target_sys_id'] = data['sys_id'] if 'number' in data: data['target_record_number'] = data['number'] return data
def send_error(self, code, message=None): """ Override send_error to always return JSON. """ # copied and pasted lots of this from the base class # but had to override due to html escaping messing up # the json format of the message try: short, long = self.responses[code] except KeyError: short, long = '???', '???' if message is None: message = short explain = long self.log_error("code %d, message %s", code, message) content = json.dumps({'status': 'error', 'code': code, 'message': message, 'explain': explain}) self.send_response(code, message) self.send_header("Content-Type", self.error_content_type) self.send_header('Connection', 'close') self.end_headers() if self.command != 'HEAD' and code >= 200 and code not in (204, 304): self.wfile.write(content)
def do_POST(self): if "content-length" in self.headers.dict: length = int(self.headers.dict["content-length"]) else: logger.warn("content length required") self.send_error(400, "content length required for post") return if "content-type" not in self.headers.dict or self.headers.dict["content-type"] != "text/json": logger.warn("content type missing or non-json") body = self.rfile.read(length) try: logger.debug("received: [%s]" % body) data_in = json.loads(body) except: logger.warn("content does not parse") self.send_error(400, "content does not parse as valid json") return try: data_out = handle_request(data_in, self.server) reply = json.dumps(data_out) except TouchFormsBadRequest, e: self.send_error(400, str(e)) return
def do_POST(self): if 'content-length' in self.headers.dict: length = int(self.headers.dict['content-length']) else: logger.warn('content length required') self.send_error(400, 'content length required for post') return if 'content-type' not in self.headers.dict or self.headers.dict['content-type'] != 'text/json': logger.warn('content type missing or non-json') body = self.rfile.read(length) try: logger.debug('received: [%s]' % body) data_in = json.loads(body) except: logger.warn('content does not parse') self.send_error(400, 'content does not parse as valid json') return try: data_out = handle_request(data_in, self.server) reply = json.dumps(data_out) except TouchFormsBadRequest, e: self.send_error(400, str(e)) return
def sendMetrics(P_error): now = datetime.datetime.today() defect_id = issue.getKey() JiraCreationDate = datetime.datetime.strptime( dateCreationJira, '%Y-%m-%d %H:%M:%S').strftime('%Y-%m-%dT%H:%M:%S') + '+01:00' params = {} params["criticality"] = criticite params["date"] = now.strftime('%Y-%m-%dT%H:%M') + '+01:00' params["defect_id"] = defect_id params["defect_project"] = str(project_id) params["error_message"] = P_error params["type"] = str(provenance) params["status"] = 1 params["date_created_jira"] = JiraCreationDate params = json.dumps(params) headers = {} headers['Content-Type'] = "application/json" headers['Accept'] = "application/json" try: conn = httplib.HTTPConnection('cd.pagesjaunes.fr') conn.request('PUT', '/dashboard-cd/api/measure/defect', params, headers) response = conn.getresponse() log.info('Envoi des indicateurs bypass Trigger : ' + response.reason) except: log.warn('Impossible d\'envoyer les métriques') pass
def outputWriteDocument(self, output, data, force): if self.getInputProperty("transform") != None: matches = re.findall(self.regexTransform, self.getInputProperty("transform")) if matches: out = self.getInputProperty("transform") for match in matches: substitution = None (dictionary,key) = match.split(".") if dictionary == "$cycle": substitution = self.getCycleProperty(key) if dictionary == "$config": substitution = self.getInputProperty(key) if dictionary == "$data": if key in data: substitution = data[key] else: logger.warning("Found no key named \"%s\" in %s, your data was discarded", key, dictionary) logger.warning(data) return None if substitution == None: logger.warning("Found no value for %s.%s, your data was discarded", dictionary, key) logger.warning(data) return None out = out.replace(str(match), str(substitution)) data = out if "codec" in self.outplugin[output]["config"]: codec = self.outplugin[output]["config"]["codec"] if codec == "json_lines": data = json.dumps(data).encode('UTF-8') return self.outplugin[output]["instance"].writeDocument(data, force)
def body_handler(body): if resp.status_code == 200: favs['favorites'] = json.loads(body.to_string()) else: print "Failed to fetch favorites: %s" % body.to_string() EventBus.send('log.event', "user.favorites.list.result") EventBus.send('user.favorites.list.result', json.dumps(favs))
def sendMetrics(P_error): global project_id global result now = datetime.datetime.today() JiraCreationDate = datetime.datetime.strptime(dateCreationJira, '%Y-%m-%d %H:%M:%S').strftime('%Y-%m-%dT%H:%M:%S') + '+01:00' params = {} params["criticality"] = criticite params["date"] = now.strftime('%Y-%m-%dT%H:%M') + '+01:00' params["defect_id"] = defect_id params["defect_project"] = str(project_id) params["error_message"] = P_error params["status"] = result params["testcase_id"] = workitem_id params["testrun_id"] = testrun_id params["date_created_jira"] = JiraCreationDate params["date_created_test"] = WorkitemCreationDate params["date_updated_test"] = WorkitemUpdatedDate params["type"] = provenance params = json.dumps(params) #print params headers = {} headers['Content-Type'] = "application/json" headers['Accept'] = "application/json" try: conn = httplib.HTTPConnection(ip_dashboard) conn.request('PUT', '/dashboard-cd/api/measure/defect', params, headers) response = conn.getresponse() log.info('[' + defect_id + '] Envoi des indicateurs : ' + response.reason ) except Exception, e: log.warn(u'Impossible d\'envoyer les métriques : ' + str(e)) pass
def update_ci_to_repo(storeName, data): Base.info("writing ci: %s to repo" % storeName) global StorageTimestamp # get the store store = load_ci_from_repo(storeName, __ciType) # set the properties on the ci to be updated for k, v in data.items(): store.setProperty(k, json.dumps(v)) store.setProperty('modTime', time_stamp()) # write back to xlr if get_counter_timestamp(storeName) == StorageTimestamp: try: __repositoryService.update(store) return True except com.xebialabs.deployit.jcr.RuntimeRepositoryException as e: Base.error('Error detected while saving %s' % storeName) Base.error('Error: %s' % e) return False except com.xebialabs.deployit.repository.ItemConflictException as e: Base.error('Error detected while saving %s' % storeName) Base.error('Error: %s' % e) return False else: Base.error('deadlock collision detected while saving %s' % storeName) return False
def send_error(self, code, message=None, error_type=None, human_readable_message=None): """ Override send_error to always return JSON. """ # copied and pasted lots of this from the base class # but had to override due to html escaping messing up # the json format of the message try: short, long = self.responses[code] except KeyError: short, long = '???', '???' if message is None: message = short if human_readable_message is None: human_readable_message = message explain = long logger.exception("Status Code: %d, Message %s" % (code, message)) content = json.dumps({'status': 'error', 'error_type': error_type, 'code': code, 'message': message, 'human_readable_message': human_readable_message, 'explain': explain}) # if this is more than one line it messes up the response content message = message.split("\n")[0] if message else "" self.send_response(code, message.encode("ascii", "xmlcharrefreplace")) self.send_header("Content-Type", self.error_content_type) self.cross_origin_header() self.send_header('Connection', 'close') self.end_headers() if self.command != 'HEAD' and code >= 200 and code not in (204, 304): self.wfile.write(content.encode("utf-8"))
def execute(self, expr, forest) : sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try : sock.connect((self.HOST, self.PORT)) f = sock.makefile() print str(expr) f.write(str(expr) + "\n") f.write("\n") f.flush() #print type(forest) print forest.toDict() f.write("Batch 1.0 JSON 1.0\n") f.write(json.dumps(forest.toDict()) + "\n") header = f.readline() received = f.readline() #print "Header " + str(header) #print "Received " + str(received) f.close() finally : sock.close() # Possible no dictionary received, so hold off on loading if received : received = json.loads(received) else : received = {} new_forest = Forest(received) #print str(new_forest) return new_forest # Return the forest
def main(): parser = optparse.OptionParser() parser.add_option('--project_root', action='store', type='string', dest='project_root') parser.add_option('--include', action='append', dest='include') parser.add_option('--ignore_path', action='append', dest='ignore_paths') (options, args) = parser.parse_args() project_root = options.project_root len_suffix = -len('/' + BUILD_RULES_FILE_NAME) build_files = None if args: # The user has specified which build files to parse. build_files = args else: # Find all of the build files in the project root. Symlinks will not be traversed. # Search must be done top-down so that directory filtering works as desired. ignore_paths = [posixpath.join(project_root, d) for d in options.ignore_paths or []] build_files = [] for dirpath, dirnames, filenames in os.walk(project_root, topdown=True, followlinks=False): # Do not walk directories that contain generated/non-source files. # All modifications to dirnames must occur in-place. dirnames[:] = [d for d in dirnames if not (posixpath.join(dirpath, d) in ignore_paths)] if BUILD_RULES_FILE_NAME in filenames: build_file = os.path.join(dirpath, BUILD_RULES_FILE_NAME) build_files.append(build_file) for build_file in build_files: # Reset build_env for each build file so that the variables declared in the build file # or the files in includes through include_defs() don't pollute the namespace for # subsequent build files. build_env = {} relative_path_to_build_file = relpath(build_file, project_root) build_env['BASE'] = relative_path_to_build_file[:len_suffix] build_env['BUILD_FILE_DIRECTORY'] = os.path.dirname(build_file) build_env['PROJECT_ROOT'] = project_root build_env['RULES'] = {} build_env['BUILD_FILE_SYMBOL_TABLE'] = make_build_file_symbol_table(build_env) # If there are any default includes, evaluate those first to populate the build_env. includes = options.include or [] for include in includes: include_defs(include, build_env) execfile(os.path.join(project_root, build_file), build_env['BUILD_FILE_SYMBOL_TABLE']) for _, value in build_env['RULES'].items(): print json.dumps(value)
def update_ci_property(self, ci_id, ci_property, property_value): if self.check_CI_exist(ci_id): ci = self.get_ci(ci_id, 'json') data = json.loads(ci) data[ci_property] = property_value self.update_ci(ci_id, json.dumps(data), 'json') else: raise Exception("Did not find ci with id [%s]" % ci_id)
def main(wT, wB, T_opt, B_opt, budget, max_T_heat, max_B_light): return json.dumps( run(wT, wB, T_opt, B_opt, budget, max_T_heat, max_B_light), sort_keys=True, indent=4, separators=(",", ": "), ensure_ascii=False, ).encode("utf8")
def generate_multiplot_payload(self): metrics_count = min( self.config['max_multiplot_metrics'], random.randint(0, self.config['ingest_metrics_per_tenant'])) metrics_list = [ generate_metric_name(i, self.config) for i in range(metrics_count) ] return json.dumps(metrics_list)
def ping(self): url = '/api/serverstatus' response = self.httpRequest.get(url, headers=self.headers) if response.getStatus() in HTTP_SUCCESS: data = json.loads(response.getResponse()) print(json.dumps(data)) else: self.throw_error(response)
def cache_set(key, value): if key is None: raise KeyError if settings.USES_POSTGRES: postgres_set_session(key, value) else: with open(cache_get_file_path(key), 'w') as f: f.write(json.dumps(value).encode('utf8'))
def _rpc_request(self, component, service, *args): req = { 'id': '%i'%self.simulator_service_id, 'component': component, 'service': service, 'args': ', '.join(json.dumps(arg) for arg in args), } self.simulator_service_id += 1 return req
def add_link(self, container_id, source_task_id, target_task_id): xlr_api_url = '/planning/links/%s' % container_id content = { "sourceId" : source_task_id, "targetId" : target_task_id } xlr_response = self.http_request.post(xlr_api_url, json.dumps(content), contentType='application/json') if xlr_response.isSuccessful(): print "Added task link\n" else: print "Failed to task link\n" print xlr_response.errorDump() sys.exit(1)
def list_curators(message): global fetching if curators is None: if not fetching.testandset(): return consumer = Consumer(api_endpoint="https://api.twitter.com/", consumer_key=config['consumer_key'], consumer_secret=config['consumer_secret'], oauth_token=config['oauth_token'], oauth_token_secret=config['oauth_token_secret']) consumer.get("/1.1/lists/members.json", {'slug': config['curatorslist'], 'owner_screen_name': config['account']}, response_handler) else: EventBus.send('log.event', "curators.list.result (Cached)") EventBus.send('curators.list.result', json.dumps(curators))
def update_variable_store(variable_store_title, variables): conn_fac = HttpRequest({'url': 'http://localhost:5516'}, release.scriptUsername, release.scriptUserPassword) response = conn_fac.get('/configurations', contentType = 'application/json') data = json.loads(response.getResponse()) variable_store_ci = None for i in range(0, len(data)): if data[i]['type'] == 'vars.VariableStore' and variable_store_title == data[i]['properties']['title']: variable_store_ci = data[i] break if not variable_store_ci: print "ERROR: Unable to find variable store '%s'" % (variable_store_title) sys.exit(1) variable_store_ci['properties']['variablesJson'] = json.dumps(variables) response = conn_fac.put('/configurations/%s' % (variable_store_ci['id']), json.dumps(variable_store_ci), contentType = 'application/json') if not response.isSuccessful: print "ERROR: Unable to update variable store '%s':" % (variable_store_title) response.errorDump() sys.exit(1)
def body_handler(body): global curators if resp.status_code == 200: data = json.loads(body.to_string()) curators = [] for user in data['users']: curators.append({'screen_name': user['screen_name'], 'id': user['id']}) fetching.unlock() EventBus.send('log.event', "curators.list.result") EventBus.send('curators.list.result', json.dumps(curators))
def postgres_update_session_command(cursor, key, value): upd_sql = replace_table( "UPDATE %(table)s SET sess_json = ? , last_modified =? " "WHERE sess_id = ?", POSTGRES_TABLE) upd_params = [ json.dumps(value).encode('utf8'), datetime.utcnow(), str(key) ] cursor.execute(upd_sql, upd_params)
def update_task(self, updated_task): xlr_api_url = '/tasks/%s' % updated_task['id'] content = updated_task xlr_response = self.http_request.put(xlr_api_url, json.dumps(content), contentType='application/json') if xlr_response.isSuccessful(): print "Updated task %s\n" % updated_task['title'] else: print "Failed to update task\n" % updated_task['title'] print xlr_response.errorDump() sys.exit(1)
def postgres_insert_session_command(cursor, key, value): ins_sql = replace_table( "INSERT INTO %(table)s (sess_id, sess_json, last_modified, date_created) " "VALUES (?, ?, ?, ?)", POSTGRES_TABLE) ins_params = [ str(key), json.dumps(value).encode('utf8'), datetime.utcnow(), datetime.utcnow() ] cursor.execute(ins_sql, ins_params)
def writeDocument(self, data, force): bulkRequest = self.runtime["bulkRequest"] client = self.runtime["client"] if data != None: indexName = self.getIndexName(data) if "_id" in data: _id = data["_id"] del data["_id"] bulkRequest.add( client.prepareIndex(indexName, self.config["type"], _id).setSource(json.dumps(data))) else: bulkRequest.add( client.prepareIndex(indexName, self.config["type"]).setSource( json.dumps(data))) self.runtime[ "requestsPending"] = self.runtime["requestsPending"] + 1 #TIME TO FLUSH if (self.runtime["requestsPending"] > 0) and ( (self.runtime["requestsPending"] >= self.config["bulkActions"]) or (force == True)): logger.info("Flushing %d records", self.runtime["requestsPending"]) #TODO: handle failure: org.elasticsearch.client.transport.NoNodeAvailableException #TODO: use JodaTime instead of jython's datetime/time bulkReady = False while not bulkReady: try: bulkResponse = bulkRequest.execute().actionGet() bulkReady = True except NoNodeAvailableException, ex: logger.error(ex) logger.warning( "Bad bulk response, sleeping %d seconds before retrying, execution paused", self.config["actionRetryTimeout"]) time.sleep(self.config["actionRetryTimeout"]) raise if bulkResponse.hasFailures(): logger.warning("Failures indexing!") logger.warning(bulkResponse.buildFailureMessage()) self.readyBulk()
def sendData(hook, layer): url = 'http://localhost:9000/geoserver/' + hook features = [] for feature in layer.features(): features.append(json.loads(writeJSON(feature))) req = urllib2.Request(url, json.dumps(features), {'Content-Type': 'application/json'}) handler = urllib2.urlopen(req) handler.read() handler.close()
def add_new_task(self, new_task_title, new_task_type, container_id): xlr_api_url = '/tasks/%s' % container_id content = { "title" : new_task_title, "taskType" : new_task_type } xlr_response = self.http_request.post(xlr_api_url, json.dumps(content), contentType='application/json') if xlr_response.isSuccessful(): new_task = json.loads(xlr_response.getResponse()) print "Created %s\n" % new_task_title else: print "Failed to create %s\n" % new_task_title print xlr_response.errorDump() sys.exit(1) return new_task
def add_link(self, container_id, source_task_id, target_task_id): xlr_api_url = '/planning/links/%s' % container_id content = {"sourceId": source_task_id, "targetId": target_task_id} xlr_response = self.http_request.post(xlr_api_url, json.dumps(content), contentType='application/json') if xlr_response.isSuccessful(): print "Added task link\n" else: print "Failed to task link\n" print xlr_response.errorDump() sys.exit(1)
def start_deploy(self, webhook_id, commit_sha, message, branch="master"): url = "/integrations/generic/%s" % webhook_id body = { "deploy": { "branch": branch, "commit": { "sha": commit_sha, "message": message } } } print "body = %s" % json.dumps(body) # example response: {"deploy_ids":[],"messages":"INFO: Branch master is release branch: true\nINFO: Deploying to 0 stages\n"} response = self.httpRequest.post(url, headers=self.headers, body=json.dumps(body)) if response.getStatus() in HTTP_SUCCESS: data = json.loads(response.getResponse()) print json.dumps(data["deploy_ids"]) print json.dumps(data["messages"]) return data else: self.throw_error(response)
def process(self, build_file): # Reset build_env for each build file so that the variables declared in the build file # or the files in includes through include_defs() don't pollute the namespace for # subsequent build files. build_env = {} relative_path_to_build_file = relpath(build_file, self.project_root) build_env['BASE'] = relative_path_to_build_file[:self.len_suffix] build_env['BUILD_FILE_DIRECTORY'] = os.path.dirname(build_file) build_env['PROJECT_ROOT'] = self.project_root build_env['RULES'] = {} build_env['BUILD_FILE_SYMBOL_TABLE'] = make_build_file_symbol_table(build_env) # If there are any default includes, evaluate those first to populate the build_env. for include in self.includes: include_defs(include, build_env) execfile(os.path.join(self.project_root, build_file), build_env['BUILD_FILE_SYMBOL_TABLE']) values = build_env['RULES'].values() if self.server: print json.dumps(values) else: for value in values: print json.dumps(value)
def set_learned_threshold_service(request): body = { "threshold":.5, } json_body = json.dumps(body) result = request.POST(WEBROOT+"/problem-plugin/set-learned-threshold",json_body) data = json.loads(result.getText()) if data["success"] == False: grinder.logger.error("set-learned-threshold -- " +str(data["error"])) else: grinder.logger.info("set-learned-threshold -- " + str(data["success"]))
def add_new_task(self, new_task_title, new_task_type, container_id): xlr_api_url = '/tasks/%s' % container_id content = {"title": new_task_title, "taskType": new_task_type} xlr_response = self.http_request.post(xlr_api_url, json.dumps(content), contentType='application/json') if xlr_response.isSuccessful(): new_task = json.loads(xlr_response.getResponse()) print "Created %s\n" % new_task_title else: print "Failed to create %s\n" % new_task_title print xlr_response.errorDump() sys.exit(1) return new_task
def get_jobs(self, ): addParams = self.getAdditionalParams() saucelabs_api_url = "/rest/v1/%s/jobs?full=true%s" % ( self.configUsername, addParams) jobs_response = self.http_request.get(saucelabs_api_url, contentType='application/json') if not jobs_response.isSuccessful(): raise Exception( "Failed to get jobs. Server return [%s], with content [%s] when calling [%s]" % (jobs_response.status, jobs_response.response, saucelabs_api_url)) data = json.loads(jobs_response.getResponse()) filteredJobs = self.filterList(data) jobs = {} nothingFound = "No jobs matching the search criteria were found. Search URL - " + saucelabs_api_url if len(filteredJobs) > 0: print 'Description | Start Time | Build Id | Name | Passed | Link' print ':---: | :---: | :---: | :---: | :---: | :---:' else: print nothingFound jobs["0"] = nothingFound counter = 0 for job in filteredJobs: counter += 1 jobName = job["name"] if job["name"] else " " jobBuildId = job["build"] if job["build"] else " " description = job["os"] + "/" + job["browser"] + "-" + job[ "browser_short_version"] jobPassed = job["passed"] if job["passed"] else " " jobPassedConverted = " " if jobPassed != " ": if job["passed"]: jobPassedConverted = "Passed" else: jobPassedConverted = "Failed" ts = int(job["start_time"]) dateAndTime = datetime.utcfromtimestamp(ts).strftime( '%Y-%m-%d %H:%M:%S') link = "[View](" + self.http_connection.get( "url") + "/jobs/" + job["id"] + ")" print '%s|%s|%s|%s|%s|%s' % (description, dateAndTime, jobBuildId, jobName, jobPassed, link) jobs[str(job["id"])] = jobPassedConverted filteredJobsStr = json.dumps(filteredJobs) return filteredJobsStr, jobs
def sendData(hook, layer): url = 'http://localhost:8080/geowebsocket/websocket/geowebsocket' features = [] for feature in layer.features(): features.append(json.loads(writeJSON(feature))) body = dict() body['event'] = hook body['layer'] = layer.name body['features'] = features req = urllib2.Request(url, json.dumps(body), {'Content-Type': 'application/json'}) urllib2.urlopen(req)
def savePassage(self, testNumber, baseUrl, csrfToken, jsonArgs): headers = [NVPair('Accept', 'text/javascript, text/html, application/xml, text/xml, */*'), NVPair('Referer', '%s/oib/passage/create?fromManage=false' % baseUrl), NVPair('Cache-Control', 'no-cache'), NVPair('Content-Type', 'application/x-www-form-urlencoded; charset=UTF-8')] request = HTTPRequest(url=baseUrl, headers=headers) request = Test(testNumber, 'POST savePassage').wrap(request) contentJSON = json.dumps(jsonArgs) formData = (NVPair('ctoken', csrfToken), NVPair('contentJSON', contentJSON), NVPair('subject', 'ELA'), NVPair('poolId', '35')) result = request.POST('/oib/savePassage', formData) return result