def setTimeRestriction(self, timePosition, timeFrame): """Constructs the query, including the original subset""" if not self.isEnabled(): self.deleteTimeRestriction() return startTime = self.getStartTime(timePosition, timeFrame) endTime = self.getEndTime(timePosition, timeFrame) idioms_to_try = [QueryIdioms.SQL, QueryIdioms.OGR] if self.getDateType() in DateTypes.QDateTypes: idioms_to_try = [QueryIdioms.OGR] #if self.layer.dataProvider().storageType() in STORAGE_TYPES_WITH_SQL: # idioms_to_try = [QueryIdioms.SQL] tried = [] for idiom in idioms_to_try: subsetString = query_builder.build_query(startTime, endTime, self.fromTimeAttribute, self.toTimeAttribute, date_type = self.getDateType(), date_format=self.getTimeFormat(), query_idiom=idiom, acc=self.accumulateFeatures()) try: self.setSubsetString(subsetString) except SubstringException: tried.append(subsetString) # try the other one # not sure if trying several idioms could make the screen flash continue #info("Subsetstring:"+subsetString) return raise SubstringException("Could not update subset string for layer {}. Tried: {}".format(self.layer.name(), tried))
def filter_by_query(self, query): filtered_tracks = [] query_func = query_builder.build_query(query) for track in self.saved_tracks: print('Processing ' + track.to_simple_json()['name']) track.perform_audio_analysis() arg_strings = query_builder.operands(query) arg_vals = [] for el in arg_strings: try: arg_vals.append(float(el)) except ValueError: arg_vals.append(track.get_val(el)) if query_func(*arg_vals): filtered_tracks.append(track) return filtered_tracks
def setTimeRestriction(self, timePosition, timeFrame): """Construct the query, including the original subset""" if not self.isEnabled(): self.deleteTimeRestriction() return startTime = self.getStartTime(timePosition, timeFrame) endTime = self.getEndTime(timePosition, timeFrame) dateType = self.getDateType() # determine which idioms should be tried # SQL idioms_to_try = [ query_builder.QueryIdioms.SQL, query_builder.QueryIdioms.OGR ] # OGR if dateType in time_util.DateTypes.QDateTypes: idioms_to_try = [query_builder.QueryIdioms.OGR] # Postgres # use optimized query format for postgres + (timestamp|date) columns if self.layer.dataProvider().storageType( ) == POSTGRES_TYPE and dateType in time_util.DateTypes.QDateTypes: idioms_to_try = [query_builder.QueryIdioms.SQL] tried = [] # now try them for idiom in idioms_to_try: subsetString = query_builder.build_query( startTime, endTime, self.fromTimeAttribute, self.toTimeAttribute, date_type=dateType, date_format=self.getTimeFormat(), query_idiom=idiom, acc=self.accumulateFeatures()) try: self.setSubsetString(subsetString) except SubstringException: tried.append(subsetString) # try the other one # not sure if trying several idioms could make the screen flash continue # info("Subsetstring:"+subsetString) return raise SubstringException( "Could not update subset string for layer {}. Tried: {}".format( self.layer.name(), tried))
def setTimeRestriction(self, timePosition, timeFrame): """Constructs the query, including the original subset""" if not self.isEnabled(): self.deleteTimeRestriction() return startTime = self.getStartTime(timePosition, timeFrame) endTime = self.getEndTime(timePosition, timeFrame) idioms_to_try = [QueryIdioms.SQL, QueryIdioms.OGR] if self.getDateType() in DateTypes.QDateTypes: idioms_to_try = [QueryIdioms.OGR] # if self.layer.dataProvider().storageType() in STORAGE_TYPES_WITH_SQL: # idioms_to_try = [QueryIdioms.SQL] tried = [] for idiom in idioms_to_try: subsetString = query_builder.build_query( startTime, endTime, self.fromTimeAttribute, self.toTimeAttribute, date_type=self.getDateType(), date_format=self.getTimeFormat(), query_idiom=idiom, acc=self.accumulateFeatures()) try: self.setSubsetString(subsetString) except SubstringException: tried.append(subsetString) # try the other one # not sure if trying several idioms could make the screen flash continue # info("Subsetstring:"+subsetString) return raise SubstringException( "Could not update subset string for layer {}. Tried: {}".format( self.layer.name(), tried))
def get_products(self, category, filter_options, range_query, aggregation_options): query = query_builder.build_query(filter_options, range_query, aggregation_options) response = requests.get(consts.ES_URL + category + "/_search", data = json.dumps(query)) if response.status_code == 200: body = json.loads(response.text) products = [] aggregations = [] count = 0 if "hits" in body and "hits" in body["hits"] and len(body["hits"]["hits"]): count = body["hits"]["total"] for product in body["hits"]["hits"]: products.append(product['_source']) if "aggregations" in body: for key, value in body["aggregations"].iteritems(): filter_list = { key:[] } for bucket in body["aggregations"][key]["buckets"]: filter_list[key].append({"key":bucket["key"],"count":bucket["doc_count"],"show":bucket["doc_count"] >= 1}) aggregations.append(filter_list) response ={ "products":products, "filterList":aggregations, "count":count, "status":"success" } else: response = {"status":"error","reason":"problem fetching products"} return response
def setTimeRestriction(self, timePosition, timeFrame): """Constructs the query, including the original subset""" if not self.timeEnabled: self.deleteTimeRestriction() return startTime = timePosition + timedelta(seconds=self.offset) endTime = timePosition + timeFrame + timedelta(seconds=self.offset) idioms_to_try = [QueryIdioms.SQL, QueryIdioms.OGR] if self.getDateType() in DateTypes.QDateTypes: idioms_to_try = [QueryIdioms.OGR] if self.layer.dataProvider().storageType() in STORAGE_TYPES_WITH_SQL: idioms_to_try = [QueryIdioms.SQL] for idiom in idioms_to_try: subsetString = query_builder.build_query( startTime, endTime, self.fromTimeAttribute, self.toTimeAttribute, date_type=self.getDateType(), date_format=self.getTimeFormat(), query_idiom=idiom) try: self.setSubsetString(subsetString) except SubstringException: # try the other one # not sure if trying several idioms could make the screen flash continue return raise SubstringException
def processRequest(uuid, service_name, category_name, event_name, username, proxy_username, start_date, end_date, days, record_limit, show_history): (query_statement, query_values) = build_query(uuid, service_name, category_name, event_name, username, proxy_username, start_date, end_date, days, record_limit) log_info("SQL Query: ", str(query_statement)) log_info("SQL Query Values: ", str(query_values)) try: conn = MySQLdb.connect(host=AYLT_DB_HOST, user=AYLT_DB_USERNAME, passwd=AYLT_DB_PASSWORD, db=AYLT_DB_NAME, port=AYLT_DB_PORT) cursor = conn.cursor() cursor.execute(query_statement, tuple(query_values)) if record_limit == "all": results = cursor.fetchall() else: results = cursor.fetchmany(size=int(record_limit)) if (len(results) > 0): alldata = "{'result':{'status':'Success','records':[" for row in results: id_ = int(row[0]) uuid = int(row[1]) username = str(row[2]) proxy_username = str(row[3]) event_data = str(row[4]) request_ipaddress = str(row[5]) created_date = str(row[6]) event_name = str(row[7]) service_name = str(row[8]) service_link = str(row[9]) service_ipaddress = str(row[10]) service_group = str(row[11]) service_type = str(row[12]) service_version = str(row[13]) version_status = str(row[14]) if version_status == "A": version_status = "Active" else: version_status = "Inactive" category_name = str(row[15]) service_object_id = str(row[16]) object_name = str(row[17]) object_desc = str(row[18]) parent_uuid = str(row[19]) alldata += "{'UUID':'"+ str(uuid)+ "','Username':'******','Proxy username':'******','Event data':'" + event_data + \ "','Request IP address':'" + request_ipaddress + \ "','Event name':'" + event_name + \ "','Service name':'" + service_name + \ "','Service link':'" + service_link + \ "','Service IP address':'" + service_ipaddress + \ "','Service Group':'" + service_group + \ "','Category name':'" + category_name + \ "','Service Object ID':'" + service_object_id + \ "','Object Name':'" + object_name + \ "','Object Description':'" + object_desc + \ "','Parent UUID':'" + parent_uuid + \ "','Created Date':'" + created_date + \ "','Service Type':'" + service_type + \ "','Service Version':'" + service_version + \ "','Version Status':'" + version_status + "'}," if show_history == 1: cursor.execute(QUERY_HISTORY_PARENT_DATA % (id_)) histresults = cursor.fetchall() ph_data = "{'History':{'status':'Success','records':[" if (len(histresults) > 0): for hrow in histresults: provenance_parent_id = hrow[0] cursor.execute(QUERY_ALL_HIST_DATA % (provenance_parent_id)) phresults = cursor.fetchall() if (len(phresults) > 0): for pphrow in phresults: ph_uuid = pphrow[1] ph_username = pphrow[2] ph_proxy_username = pphrow[3] ph_event_data = pphrow[4] ph_request_ipaddress = pphrow[5] # UNUSED => ph_created_date = pphrow[6] ph_event_name = pphrow[7] ph_service_name = pphrow[8] ph_service_link = pphrow[9] ph_service_ipaddress = pphrow[10] ph_service_group = pphrow[11] ph_category_name = pphrow[12] ph_service_object_id = pphrow[13] ph_object_name = pphrow[14] ph_object_desc = pphrow[15] ph_parent_uuid = pphrow[16] # TODO - make this a string template or something ph_data += ("{'UUID':'" + ph_uuid+ "','Username':'******','Proxy username':'******','Event data':'"+ ph_event_data + "','Request IP address':'"+ ph_request_ipaddress + "','Event name':'" + ph_event_name + "','Service name':'"+ ph_service_name + "','Service link':'" + ph_service_link + "','Service IP address':'" + ph_service_ipaddress + "','Service Group':'" + ph_service_group + "','Category name':'" + ph_category_name + "','Service Object ID':'" + ph_service_object_id + "','Object Name':'" + ph_object_name + "','Object Description':'" + ph_object_desc + "','Parent UUID':'" + ph_parent_uuid + "','Service Type':'" + service_type + "','Service Version':'" + service_version + "','Version Status':'" + version_status+"'},") # This is heinous and should really bother people---^ ph_data = ph_data.strip(",") ph_data += "]}}" else: err_msg = "History Recorded but Provenance " + \ "row doesn't exist" + " " + "[" + id + "]" log_errors(err_msg) ph_data = "{'Null'}," # notify Support ph_data = ph_data.strip(",") ph_data += "]}}" else: ph_data = "{'No Records in History Table'}" ph_data += "]}}" alldata = alldata.strip(",") alldata += ph_data alldata += "]}}" json_data = json.dumps(eval(alldata), indent=4) info_msg = "Analytics + History: Success!" log_info(info_msg, json_data) cursor.close() webstatus = '200 OK' return (json_data, webstatus) alldata = alldata.strip(",") alldata += "]}}" json_data = json.dumps(eval(alldata), indent=4) info_msg = "Analytics: Success " log_info(info_msg, json_data) cursor.close() webstatus = '200 OK' return (json_data, webstatus) else: json_data = json.dumps({'result': {'Status': 'Success', 'Details':'No records found'}}, indent=4) info_msg = "Analytics: Success! " log_info(info_msg, json_data) cursor.close() webstatus = '200 OK' return (json_data, webstatus) except Exception as exc: err_msg = "EXCEPTION: " + str(exc) log_exception(err_msg) webstatus = '400 Bad Request' json_data = json.dumps({'result': {'Status': 'Failed', 'Details': 'Analytics call failed'}}, indent=4) return (json_data, webstatus)