def get(self): if not self.request.arguments: self.render('index.html') return email = '' phone = '' direction = '' if 'email' in self.request.arguments: email = self.request.arguments['email'][0] if 'phone' in self.request.arguments: phone = self.request.arguments['phone'][0] if 'direction' in self.request.arguments: direction = self.request.arguments['direction'][0] print email email_results = {} phone_results = {} if not phone == '': phone_results = QueryHandler.get_sms(phone, direction) if not email == '': email_results = QueryHandler.get_email(email, direction) output = {'email': email_results, 'phone': phone_results} self.write(output) self.flush() self.finish()
def post(self): location = request.form['location'] query = QueryHandler(location) data = query.get_map_data() mymap = MapHandler(location, data) total_violations = len(data.keys()) return render_template('index.html', mymap=mymap.map, details=data, total=total_violations, submit_flag=1)
def send_query(self): query = self.query queryhandler = QueryHandler(query) postfix = queryhandler.toPostfix() self.keywordmap = queryhandler.getKeywordMapping() print(postfix) self.build_request(queryhandler, postfix)
def determineRequest(self): if self.request == "GetCapabilities": self.findKeywords() self.findInformation() self.findStations() self.template = "GetCapabilities.xml" self.results = self.renderCapabilities() elif self.request == "DescribeSensor": self.sensor = models.Sensors.query.filter_by(urn = self.procedure).first() if self.sensor: self.template = "DescribeSensor.xml" else: self.template = "DescribeSensorException.xml" elif self.request == "GetObservation": self.template = "GetObservation.xml" try: self.fromtime = self.eventTime.split('/')[0] self.totime = self.eventTime.split('/')[1] except: self.exception = True self.exceptionDetails['exceptionName'] = 'InvalidRequest' self.exceptionDetails['locator'] = 'eventTime' self.exceptionDetails['exceptionMessage'] = 'eventTime is not in the correct format' self.exceptionDetails['exceptionFile'] = 'GetObservationException.xml' self.template = 'GetObservationException.xml' # give dummy values in case of exception self.fromtime = '' self.totime = '' # raise ValueError('eventTime is not in the correct format') try: self.URN = self.offering.split(':')[-1] except: self.exception = True self.exceptionDetails['exceptionName'] = 'InvalidRequest' self.exceptionDetails['locator'] = 'offering' self.exceptionDetails['exceptionMessage'] = 'offering is not in the correct format' self.exceptionDetails['exceptionFile'] = 'GetObservationException.xml' self.template = 'GetObservationException.xml' #raise ValueError('offering is not in the correct format') q = QueryHandler(self.observedProperty, self.fromtime, self.totime, 'csv', self.URN, self.procedure, '', '', self.page) self.results = q.submitQuery() return GetObservationResponse(self.observedProperty, self.procedure, self.offering, self.fromtime, self.totime, self.page, self.results, self.exception, self.exceptionDetails, self.template)
def __init__(self, slug_map: Slug.ToInfoMap, default_column_slug_info: ColumnSlugInfo, ignored_ips: List[ipaddress.IPv4Network], uses_html: bool): """This initialization should only be called by SessionInfoGenerator above.""" super(SessionInfoImpl, self).__init__() self._ignored_ips = ignored_ips self._query_handler = QueryHandler(self, slug_map, default_column_slug_info, uses_html) self._uses_html = uses_html # The previous value of types when downloading a collection self._previous_product_info_type = None
def main(argv): global PATH port = 3003 debug = False addr = '0.0.0.0' try: opts, args = getopt.getopt(argv,"hvp:f:a:",["port=","folder=","addr="]) except getopt.GetoptError: print ('PythonServer.py -p <port> -f <folder>') sys.exit(2) for opt, arg in opts: if opt == '-h': print ('PythonServer.py -p <port> -f <folder>') sys.exit() elif opt in ("-a", "--addr"): addr = arg elif opt in ("-p", "--port"): port = int(arg) elif opt in ("-f", "--folder"): PATH = arg elif opt in ("-v"): debug = True PATH = os.path.normpath(PATH) global QH QH = QueryHandler(PATH, filetypes=['csv']) APP.run(host=addr, port=port, debug=debug)
def get(self): if not self.request.arguments: self.render('index.html') return address = self.request.arguments['address'][0] bed = self.request.arguments['bed'][0] food = self.request.arguments['food'][0] medical = self.request.arguments['medical'][0] mental_health = self.request.arguments['mental-health'][0] substance_abuse = self.request.arguments['substance-abuse'][0] transportation = self.request.arguments['transportation'][0] data = { 'address': address, 'bed': bed, 'food': food, 'medical': medical, 'mental-health': mental_health, 'substance-abuse': substance_abuse, 'transportation': transportation } output = QueryHandler.get_map(data) self.write(output) self.flush() self.finish()
def get(self): if not self.request.arguments: self.render('index.html') return address = self.request.arguments['address'][0] bed = self.request.arguments['bed'][0] food = self.request.arguments['food'][0] medical = self.request.arguments['medical'][0] mental_health = self.request.arguments['mental-health'][0] substance_abuse = self.request.arguments['substance-abuse'][0] transportation = self.request.arguments['transportation'][0] distance = self.request.arguments['distance'][0] data = { 'address': address, 'bed': bed, 'food': food, 'medical': medical, 'mental-health': mental_health, 'substance-abuse': substance_abuse, 'transportation': transportation, 'distance': distance } output = QueryHandler.get_map(data) self.write(output) self.flush() self.finish()
def __init__(self, slug_info: Slug.ToInfoMap, ignored_ips: List[ipaddress.IPv4Network]): """ :param slug_info: Information about the slugs we expect to see in the URL :param ignored_ips: A list representing hosts that we want to ignore """ self._slug_map = slug_info self._default_column_slug_info = QueryHandler.get_column_slug_info( self.DEFAULT_COLUMN_INFO, slug_info) self._ignored_ips = ignored_ips
def get(self): if not self.request.arguments or self.request.arguments == {}: self.render('index.html') return if not 'address' in self.request.arguments.keys(): self.render('index.html') return address = self.request.arguments['address'][0] data = {'address': address} output = QueryHandler.get_addresses(data) self.write(output) self.flush() self.finish()
def get(self): if not self.request.arguments: self.render('index.html') return email = '' phone = '' direction = '' if 'email' in self.request.arguments: email = self.request.arguments['email'][0] if 'phone' in self.request.arguments: phone = self.request.arguments['phone'][0] if 'direction' in self.request.arguments: direction = self.request.arguments['direction'][0] print email email_results = {} phone_results = {} if not phone=='': phone_results = QueryHandler.get_sms(phone, direction) if not email=='': email_results = QueryHandler.get_email(email, direction) output = { 'email': email_results, 'phone': phone_results } self.write(output) self.flush() self.finish()
def get(self): if not self.request.arguments or self.request.arguments=={}: self.render('index.html') return if not 'address' in self.request.arguments.keys(): self.render('index.html') return address = self.request.arguments['address'][0] data = { 'address': address } output = QueryHandler.get_addresses(data) self.write(output) self.flush() self.finish()
def available_identifiers(self): self.available_ids = {} for sensor in models.Sensors.query.all(): self.available_ids[sensor.quantity.name + '/' + sensor.short_name]= {} self.start_month = sensor.first_measurement.month self.start_year = sensor.first_measurement.year self.end_month = sensor.last_measurement.month self.end_year = sensor.last_measurement.year for year, month in self.month_year_iter(self.start_month, self.start_year, self.end_month, self.end_year): self.available_ids[sensor.quantity.name + '/' + sensor.short_name][calendar.month_name[month] + ' ' + str(year)] = {} for filter in self.filters: self.available_ids[sensor.quantity.name + '/' + sensor.short_name][calendar.month_name[month] + ' ' + str(year)][filter] = {} for day in range(1,calendar.monthrange(year,month)[1] +1 ): #fromtime and totime in datetime format fromtime = datetime(year,month,day,0,0,0) totime = datetime(year,month,day,23,59,59) #fromtime and totime in string format, so as they can be submited in QueryHandler string_fromtime = datetime.strftime(fromtime,"%Y-%m-%d %H:%M:%S") string_totime = datetime.strftime(totime,"%Y-%m-%d %H:%M:%S") #format 'csv' doesnt make any difference heres #If granularity is larger than the requested time(1d) I take average of the requested time try: value = QueryHandler(sensor.quantity.name, string_fromtime, string_totime, 'csv', self.station.name, sensor.urn, filter, "2d").submitQuery() except: db.session.rollback() value = models.Measurements() value.value = "NA" value.station_id = self.station.id value.quantity_id = sensor.quantity.id value.sensor_id = sensor.id value.timestamp = fromtime self.available_ids[sensor.quantity.name + '/' + sensor.short_name][calendar.month_name[month] + ' ' + str(year)][filter][day] = value return self.available_ids
class SessionInfoImpl(SessionInfo): _ignored_ips: List[ipaddress.IPv4Network] _previous_product_info_type: Optional[List[str]] _query_handler: QueryHandler def __init__(self, slug_map: Slug.ToInfoMap, default_column_slug_info: ColumnSlugInfo, ignored_ips: List[ipaddress.IPv4Network], uses_html: bool): """This initialization should only be called by SessionInfoGenerator above.""" super(SessionInfoImpl, self).__init__() self._ignored_ips = ignored_ips self._query_handler = QueryHandler(self, slug_map, default_column_slug_info, uses_html) self._uses_html = uses_html # The previous value of types when downloading a collection self._previous_product_info_type = None def parse_log_entry(self, entry: LogEntry) -> SESSION_INFO: """Parses a log record within the context of the current session.""" # We ignore all sorts of log entries. if entry.method != 'GET' or entry.status != 200: return [], None if entry.agent and "bot" in entry.agent.lower(): return [], None path = entry.url.path if not path.startswith('/opus/__'): return [], None if any(entry.host_ip in ipNetwork for ipNetwork in self._ignored_ips): return [], None # See if the path matches one of our patterns. path = path[5:] for (pattern, method) in ForPattern.PATTERNS: match = re.match(pattern, path) if match: # raw_query will match a key to a list of values for that key. Opus only uses each key once # (values are separated by commas), so we convert the raw query to a more useful form. raw_query = urllib.parse.parse_qs(entry.url.query) query = { key: value[0] for key, value in raw_query.items() if isinstance(value, list) and len(value) == 1 } return method(self, query, match) return [], None # # API # @ForPattern(r'/__api/(data)\.json') @ForPattern(r'/__api/(images)\.(.*)') @ForPattern(r'/__api/(dataimages)\.json') @ForPattern(r'/__api/meta/(result_count)\.json') def _api_data(self, query: Dict[str, str], match: Match[str]) -> SESSION_INFO: return self._query_handler.handle_query(query, match.group(1)) @ForPattern(r'/__api/image/med/(.*)\.json') def _view_metadata(self, query: Dict[str, str], match: Match[str]) -> SESSION_INFO: metadata = match.group(1) return [f'View Metadata: {metadata}'], self.__create_opus_url(metadata) @ForPattern(r'/__api/data\.csv') def _download_results_csv(self, query: Dict[str, str], match: Match[str]) -> SESSION_INFO: self.performed_download() return ["Download Search Results CSV"], None @ForPattern(r'/__api/(download)/(.*)\.zip') @ForPattern(r'/__api/(metadata)/(.*)\.csv') def _download_one_zip(self, query: Dict[str, str], match: Match[str]) -> SESSION_INFO: self.performed_download() call_type, opus_id = match.groups() text = 'Download Single OPUSID' if call_type == 'download' else 'Download CSV for OPUSID' if self._uses_html: return [format_html('{}: {}', text, opus_id)], self.__create_opus_url(opus_id) else: return [f'{text}: { opus_id }'], None # # Collections # @ForPattern(r'/__collections/reset.html') def _reset_selections(self, query: Dict[str, str], match: Match[str]) -> SESSION_INFO: return ['Reset Selections'], None @ForPattern(r'/__collections(/default)?/(add|remove)\.json') def _change_selections(self, query: Dict[str, str], match: Match[str]) -> SESSION_INFO: opus_id = query.get('opus_id') selection = match.group(2).title() if self._uses_html and opus_id: return [ format_html('Selections {}: {}', selection.title(), opus_id) ], self.__create_opus_url(opus_id) else: return [ f'Selections {selection.title() + ":":<7} {opus_id or "???"}' ], None @ForPattern(r'/__collections(/default)?/view(|\.json)') def collections_view(self, query: Dict[str, str], match: Match[str]) -> SESSION_INFO: return ['View Selections'], None @ForPattern(r'/__collections/default/addrange.json') def _add_range_selections(self, query: Dict[str, str], match: Match[str]) -> SESSION_INFO: query_range = query.get('range', '???').replace(',', ', ') return [f'Selections Add Range: {query_range}'], None @ForPattern(r'/__collections(/default)?/download.zip') @ForPattern(r'/__collections(/default)?/download.json') @ForPattern(r'/__collections/download/default.zip') def _create_zip_file(self, query: Dict[str, str], match: Match[str]) -> SESSION_INFO: self.performed_download() types = query.get('types') if types is None: output = '???' else: output = self.quote_and_join_list(types.split(',')) return [f'Create Zip File: {output}'], None @ForPattern(r'/__collections/download/info(|\.json)') def _download_product_types(self, query: Dict[str, str], match: Match[str]) -> SESSION_INFO: self.performed_download() ptypes_field = query.get('types', None) new_ptypes = ptypes_field.split(',') if ptypes_field else [] old_ptypes = self._previous_product_info_type self._previous_product_info_type = new_ptypes if old_ptypes is None: joined_new_ptypes = self.quote_and_join_list(new_ptypes) plural = '' if len(new_ptypes) == 1 else 's' return [f'Download Product Type{plural}: {joined_new_ptypes}' ], None result = [] def show(verb: str, items: List[str]) -> None: if items: plural = 's' if len(items) > 1 else '' joined_items = self.quote_and_join_list(items) result.append( f'{verb.title()} Product Type{plural}: {joined_items}') show('add', [ptype for ptype in new_ptypes if ptype not in old_ptypes]) show('remove', [ptype for ptype in old_ptypes if ptype not in new_ptypes]) if not result: result.append('Product Types are unchanged') return result, None @ForPattern(r'/__collections/data.csv') def _download_selections_csv(self, query: Dict[str, str], match: Match[str]) -> SESSION_INFO: self.performed_download() return ["Download Selections CSV"], None # # FORMS # @ForPattern(r'/__forms/column_chooser\.html') def _column_chooser(self, query: Dict[str, str], match: Match[str]) -> SESSION_INFO: return ['Column Chooser'], None # # INIT DETAIL # @ForPattern(r'/__initdetail/(.*)\.html') def _initialize_detail(self, query: Dict[str, str], match: Match[str]) -> SESSION_INFO: opus_id = match.group(1) if self._uses_html: return [format_html('View Detail: {}', opus_id)], self.__create_opus_url(opus_id) else: return [f'View Detail: { opus_id }'], None # # Various utilities # def __create_opus_url(self, opus_id: str) -> str: return format_html('/opus/#/view=detail&detail={0}', opus_id)
from QueryHandler import QueryHandler #query_handler = QueryHandler('10.42.0.251', 9050) query_handler = QueryHandler('192.168.129.196', 9050) #query_handler = QueryHandler('localhost', 9999) def get_genres(): response = query_handler.send_request("GETGENRES", None) return response def get_regions(): response = query_handler.send_request("GETTOWNS", None) return response def get_all_books(login=''): response = query_handler.send_request("GETBOOKALL", {'Login': login}) return response def get_books_by_genre(genre_id, login=''): response = query_handler.send_request("GETBOOK", { "GenreId": genre_id, "Login": login }) return response def get_users_book(user_id):
def main(args): ''' Main function, performs a query to zimon and ships the results to BDS.''' # Collector connection, collector="127.0.0.1" collectorPort=9084 # Logstash connection. logstash="127.0.0.1" logstashPort=10522 # Query options. bucketSize=60 numBuckets=10 metrics="cpu_system,cpu_user,mem_active,gpfs_ns_bytes_read,gpfs_ns_bytes_written,gpfs_ns_tot_queue_wait_rd ,gpfs_ns_tot_queue_wait_wr" try: opts, optargs = getopt.getopt(args[1:], SHORT_OPTS, LONG_OPTS) except getopt.GetoptError as err: print("Invalid option detected: %s", err) print(HELP_STR) return 1 for o,a in opts: if o in ("-h", "--help"): print(HELP_STR) return 1 elif o in ("--collector="): collector = a elif o in ("--collector-port="): collectorPort = a elif o in ("--logstash="): logstash = a elif o in ("--logstash-port="): logstashPort = a elif o in ("--metrics="): metrics = a elif o in ("--bucket-size="): bucketSize = a elif o in ("--num-buckets="): numBuckets = a # Build the query handler. qh = QueryHandler(collector, collectorPort, logger) # Construct the query. query=Query() query.normalize_rates = False query.setBucketSize(bucketSize) query.setTime(num_buckets=numBuckets) query.addMetric(metrics) # Run the query, clean the data. qr = qh.runQuery(query) if qr is None: print("QueryResult had no data.") return 3 qr.remove_rows_with_no_data() sensorData={} rowNum=0 # Pre construct the map. for columnInfo in qr.columnInfos: hostName=columnInfo.keys[0][0] sensorData[hostName]=[] # Iterate over the rows recieved. for row in qr.rows: colNum=0 # Initialize the objects for this row. for host in sensorData: sensorData[host].append({ "source" : host, "type" : "counters-gpfs", "timestamp": row.tstamp, "data": {}}) # Parse the column info on this pass. for columnInfo in qr.columnInfos: sensorName=columnInfo.name hostName=columnInfo.keys[0][0] sensorData[hostName][rowNum]["data"][sensorName] = row.values[colNum] # Convert none to 0. if row.values[colNum] is None: sensorData[hostName][rowNum]["data"][sensorName] = 0 colNum+=1 rowNum+=1 # Payload to be written to big data store, payload="" for host in sensorData: for row in sensorData[host]: payload+=json.dumps(row) + '\n' # Write the generated payload to the socket. try: logstash_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) logstash_socket.connect((logstash, logstashPort)) logstash_socket.send(payload) logstash_socket.close() except socket.error, msg: print("Socket had error: %s", msg) return 2
from http.server import BaseHTTPRequestHandler from QueryHandler import QueryHandler query_handler = QueryHandler() class OERHTTPHandler(BaseHTTPRequestHandler): def __init__(self, request, client_address, server): BaseHTTPRequestHandler.__init__(self, request, client_address, server) def do_GET(self): response = query_handler.handle_get_request(self.path) if response[1] is None: self.__send_404_response() self.wfile.write(bytes("404", "UTF-8")) else: self.__send_200_response(response[0]) self.wfile.write(response[1]) def do_POST(self): content_length = int(self.headers['Content-Length']) post_data = str(self.rfile.read(content_length), "UTF-8") response = query_handler.handle_post_request(self.path, post_data) if response[1] is None: self.__send_500_response() self.wfile.write(bytes("500", "UTF-8")) else: self.__send_200_response(response[0]) self.wfile.write(response[1])