def test_init_db(self): res = data.query("SELECT s.name, a.name, a.IATA FROM Airports AS a JOIN States AS s ON a.state_id=s.Id WHERE a.IATA='DTW'") self.assertEqual(res,[("Michigan","Detroit Metropolitan Wayne County Airport","DTW",)]) res = data.query("SELECT s.name, a.name, a.IATA FROM Airports AS a JOIN States AS s ON a.state_id=s.Id WHERE a.IATA='SEA'") self.assertEqual(res,[("Washington","Seattle Tacoma International Airport","SEA",)]) res = data.query("SELECT s.name, a.name, a.IATA FROM Airports AS a JOIN States AS s ON a.state_id=s.Id WHERE a.IATA='JFK'") self.assertEqual(res,[("New-York","John F Kennedy International Airport","JFK",)])
def query(request): if not auth.check_login(request): return HttpResponse("error: access denied", content_type="application/json") #request_data = request.REQUEST.get('request') request_data = request.POST['request'] print request_data response = "error" if None != request_data: try: request = json.loads(request_data) return_count = vornone(request, 'return_count') return_count = False if None == return_count else return_count response = json.dumps(data.query(vornone(request, 'input_src'), vornone(request, 'input_sport'), vornone(request, 'input_dst'), vornone(request, 'input_dport'), vornone(request, 'tech_syn_flood'), vornone(request, 'tech_ack_flood'), vornone(request, 'tech_udp_flood'), vornone(request, 'tech_icmp_flood'), vornone(request, 'tech_connection_flood'), vornone(request, 'tech_stream_flood'), vornone(request, 'tech_content_drop'), vornone(request, 'tech_udp_dns_flood'), vornone(request, 'flag'), vornone(request, 'start_time'), vornone(request, 'end_time'), vornone(request, 'page'), return_count)) except Exception , e: response = "error:" + "search error"
async def get_positions() -> dict: await asyncio.sleep(0) positions = {} cur = query("SELECT * FROM positions") for row in cur.fetchall(): positions.update({row[1]: 0}) return positions
async def get_skills(): await asyncio.sleep(0) skills = {} cur = query("SELECT * FROM skills") for row in cur.fetchall(): skills.update({row[1]: 0}) return skills
async def get_skills_info(): await asyncio.sleep(0) skills = [] cur = query('SELECT id, name FROM skills ORDER BY name') for row in cur.fetchall(): skills.append({'id': row[0], 'name': row[1]}) return skills
def get_user(username: str) -> User: users = [] cur = query( f"select username, password from aqa where username like '{username}'") for row in cur.fetchall(): users.append(User(username=row[0], password=row[1])) return users[0]
async def get_ways() -> dict: await asyncio.sleep(0) ways = {} cur = query("SELECT * FROM ways") for row in cur.fetchall(): ways.update({row[1]: 0}) return ways
def __init__(self, pokemon): self.name = pokemon['name'] try: uri = pokemon['descriptions'][-1]['resource_uri'] self.data = data.query(uri)['description'] except: self.data = "Description of Pokémon goes here. API is down! :("
def save_vacancies(values: list, vac_skills: dict): # Get current date date_collected = time.strftime('%Y-%m-%d') # Delete previous data by current date query(f"Delete FROM vacancies WHERE date_collected = '{date_collected}'") vac_list = [] for result in values: # Save vacancies if not result: continue skills = [_['skills'] for _ in vac_skills if _['link'] in result['vacancy_link']][0] vac_list.append( (result['vacancy_title'], result['vacancy_link'], result['company_title'], result['company_link'], result['city_title'], date_collected, skills)) insert_query = "Insert into vacancies (vacancy, url, company, company_url, city, date_collected, skills) " \ "values (%s, %s, %s, %s, %s, %s, %s);" query(insert_query, parameters=vac_list, many=True)
async def get_statistics_by_skill(skill_id: int) -> List[Statistic]: await asyncio.sleep(0) stats = [] cur = query(f"SELECT skill_id, skill_count, skill_percent, date_collected from statistics " f"WHERE skill_id = {skill_id} ORDER BY date_collected DESC") for row in cur.fetchall(): stats.append(Statistic(count=row[1], percent=row[2], date_collected=str(row[3]), skill_id=row[0])) return stats
async def get_ways_statistics_by_date(date_collected: date) -> List[WayStatistic]: await asyncio.sleep(0) ways_stat = [] cur = query(f"SELECT w.name, ws.count FROM ways_statistics as ws JOIN ways as w on ws.ways_id = w.id " f"WHERE date_collected = '{date_collected}' ORDER BY ws.count DESC;") for row in cur.fetchall(): ways_stat.append(WayStatistic(title=row[0], count=row[1])) return ways_stat
async def get_vacancies_statistics_by_date(date_collected: date) -> List[VacanciesStatistic]: await asyncio.sleep(0) vacancies_stat = [] cur = query(f"SELECT * FROM vacancies WHERE date_collected = '{date_collected}';") for row in cur.fetchall(): vacancies_stat.append(VacanciesStatistic(vacancy_title=row[1], vacancy_link=row[4], company_title=row[2], company_link=row[6], city_title=row[3])) return vacancies_stat
async def get_categories() -> List[Category]: await asyncio.sleep(0) categories = [] cur = query("Select * from categories") for row in cur.fetchall(): categories.append( Category(name=row[1], category_id=row[0], icon=row[3])) return categories
def save_ways(values: dict): # Get way list ways = {} cur = query("SELECT * FROM ways") for row in cur.fetchall(): ways.update({row[1]: row[0]}) # Get current date date_collected = time.strftime('%Y-%m-%d') # Delete previous data by current date query(f"Delete FROM ways_statistics WHERE date_collected = '{date_collected}'") ways_list = [] for result in values: # Get way id way_id = ways.get(result) # Save statistic by way ways_list.append((way_id, values.get(result), date_collected)) insert_query = "Insert into ways_statistics (ways_id, count, date_collected) values (%s, %s, %s);" query(insert_query, parameters=ways_list, many=True)
async def get_statistics_by_date(date_collected: date) -> List[Statistic]: await asyncio.sleep(0) stat = [] cur = query( f"SELECT sk.name, st.skill_percent, st.skill_count, st.date_collected, sk.id FROM statistics as st " f"JOIN skills as sk on st.skill_id = sk.id WHERE date_collected = '{date_collected}'" f" ORDER BY st.skill_count DESC;") for row in cur.fetchall(): stat.append(Statistic(title=row[0], percent=row[1], count=row[2], date_collected=str(row[3]), stat_id=row[4])) return stat
def save_statistics(percent: dict, count: dict): # Get skill list skills = {} cur = query("SELECT * FROM skills") for row in cur.fetchall(): skills.update({row[1]: row[0]}) # Get current date date_collected = time.strftime('%Y-%m-%d') # Delete previous data by current date query(f"Delete FROM statistics WHERE date_collected = '{date_collected}'") stats_list = [] for result in percent: # Get skill id skill_id = skills.get(result) # Save statistic by skill stats_list.append((skill_id, count[result], percent.get(result), date_collected)) insert_query = "Insert into statistics (skill_id, skill_count, skill_percent, date_collected) " \ "values (%s, %s, %s, %s);" query(insert_query, parameters=stats_list, many=True)
def get_task_state(self) -> str: tasks = [] cur = query( f"SELECT task_state FROM tasks WHERE task_key like '{self.task_key}';" ) for row in cur.fetchall(): tasks.append(row[0]) if not tasks: return self.get_task_state() return tasks[0]
def search_flight(airport1, airport2): name1 = data.query( "SELECT name FROM Airports WHERE IATA='{}'".format(airport1))[0][0] name2 = data.query( "SELECT name FROM Airports WHERE IATA='{}'".format(airport2))[0][0] month = '' order = '' res = [] no_result = False if request.method == 'POST': month = request.form['month'] order = request.form['order'] res = data.get_data(airport1, airport2, request.form['month'], request.form['order']) if not res: no_result = True #airports = data.get_airports(state.replace(u'\xa0', u' ')) return render_template('search_flight.html', airport1=airport1 ,airport2=airport2, name1=name1, name2=name2,\ month=month, order=order, data=res, no_result=no_result)
def query(cls, *, inputs: Callable, inputs_args: Dict, on_always: Callable, on_always_args: Dict = {}, client: GatewayClients = GatewayClients.GRPC, num_clients: int = 1, request_size: int = 100, execution_time: int = 10, top_k: int = 10): data.query(inputs=inputs, inputs_args=inputs_args, on_always=on_always, on_always_args=on_always_args, client=client, execution_time=execution_time, num_clients=num_clients, request_size=request_size, top_k=top_k)
async def get_positions_statistics_by_date( date_collected: date) -> List[PositionStatistic]: await asyncio.sleep(0) positions_stat = [] cur = query( f"SELECT w.name, ps.count FROM positions_statistics as ps " f"JOIN positions as w on ps.position_id = w.id WHERE date_collected = '{date_collected}' " f"ORDER BY ps.count DESC;") for row in cur.fetchall(): positions_stat.append(PositionStatistic(title=row[0], count=row[1])) return positions_stat
def politie_map(): coordslist = [] pmapsql = query( "SELECT latitude, longitude, naam " "FROM police_stations", "sqlite:///data/Opendata.db") for i in pmapsql: if (i.latitude is None) or (i.longitude is None): continue coordslist.append([i.latitude, i.longitude, i.naam]) return coordslist
def get_vacancies_by_skill(date_collected: str, skill): vac = {'skill': skill} vacancies = [] if skill in ('JS', 'javascript'): for js_skill in ('JS', 'javascript'): cur = query( f"SELECT distinct vacancy, url, company, city FROM vacancies WHERE date_collected = '{date_collected}' " f"and (skills like '%|{js_skill}|%' OR skills like '{js_skill}|%' " f"OR skills like '%|{js_skill}' OR skills like '{js_skill}')") for row in cur.fetchall(): vacancies.append({'vacancy': row[0], 'url': row[1], 'company': row[2], 'city': row[3]}) else: cur = query( f"SELECT distinct vacancy, url, company, city FROM vacancies WHERE date_collected = '{date_collected}' " f"and (skills like '%|{skill}|%' OR skills like '{skill}|%' " f"OR skills like '%|{skill}' OR skills like '{skill}')") for row in cur.fetchall(): vacancies.append({'vacancy': row[0], 'url': row[1], 'company': row[2], 'city': row[3]}) vac['vacancies'] = vacancies return vac
def save_positions(values: dict): # Get position list positions = {} cur = query("SELECT * FROM positions") for row in cur.fetchall(): positions.update({row[1]: row[0]}) # Get current date date_collected = time.strftime('%Y-%m-%d') # Delete previous data by current date query( f"Delete FROM positions_statistics WHERE date_collected = '{date_collected}'" ) positions_list = [] for result in values: # Get position id position_id = positions.get(result) # Save statistic by position positions_list.append( (position_id, values.get(result), date_collected)) insert_query = "Insert into positions_statistics (position_id, count, date_collected) values (%s, %s, %s);" query(insert_query, parameters=positions_list, many=True)
def create_topic(self): # Async gathering of data ioloop = asyncio.new_event_loop() asyncio.set_event_loop(ioloop) async_values = ioloop.run_until_complete( asyncio.gather(*[get_topics()])) topics = async_values[0] for topic in topics: if self.name.lower() == topic.name.lower() or self.link.lower( ) == topic.link.lower(): return f'Topic already added with title "{topic.name}"' try: if 'dou' in self.link: headers = { 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_3) AppleWebKit/537.36 ' '(KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36 OPR/50.0.2762.67', 'Referer': 'https://dou.ua', 'X-Requested-With': 'XMLHttpRequest' } response = requests.get(self.link, headers=headers) else: response = requests.get(self.link) except Exception as exception: return f'Bad url - {exception.args[0]}' if str(response.status_code)[0] == '2' or check_link_in_whitelist( self.link): query( 'Insert into topics (category_id, title, link, added_date) values ({},"{}","{}","{}")' .format(self.category_id, reformat_text(self.name), reformat_text(self.link), date.today())) else: message = f'Link "{self.link}" is broken | Code = {response.status_code}' query( f"Insert into error_log (text, date) values ('{message}', '{date.today()}')" ) return message
def search_topics(search_request: str) -> List[Topic]: topics = [] cur = query(f"""select t.title, t.link, c.title, c.id from topics as t join categories as c on t.category_id = c.id where t.title like '% {search_request}%' or t.title like '{search_request}%' or t.title like '[{search_request}%' order by c.id, t.title""") for row in cur.fetchall(): topics.append( Topic(name=row[0], link=row[1], category_id=row[3], category_name=row[2])) return topics
async def get_topics() -> List[Topic]: await asyncio.sleep(0) topics = [] cur = query( """Select t.id, t.category_id, t.title, t.link, t.added_date, c.title from topics as t JOIN categories as c on t.category_id = c.id order by category_id, id DESC""") for row in cur.fetchall(): topics.append( Topic(topic_id=row[0], category_id=row[1], name=row[2], link=row[3], date_added=row[4], category_name=row[5], is_new=Topic.check_if_topic_is_new(row[4]))) return topics
def device_data(device_id, device_type): # Set min and max range values, starting with a default of 0 (no range). range_min = 0 range_max = 0 # If min and max values are set in the GET parameters, convert them to # milliseconds. if ('range_min' in request.args and 'range_max' in request.args): # Convert milliseconds to seconds. range_min = int(request.args['range_min']) / 1000 range_max = int(request.args['range_max']) / 1000 # Query the data and return it as JSON. results = data.query(device_id, device_type, range_min, range_max) return json.dumps(results)
def device_data(device_id, device_type): # Set min and max range values, starting with the past X seconds (from settings) from right now. range_max = int(time.time()) range_min = range_max - settings['range_seconds'] # If min and max values are set in the GET parameters, convert them to # milliseconds. if ('range_min' in request.args and 'range_max' in request.args): # Convert milliseconds to seconds. range_min = int(request.args['range_min']) / 1000 range_max = int(request.args['range_max']) / 1000 # Query the data and return it as JSON. results = data.query(device_id, device_type, range_min, range_max) return json.dumps(results)
def query(request): if not auth.check_login(request): return HttpResponse("error: access denied", content_type = "application/json") request_data = request.REQUEST.get('request') response = "error" if None != request_data: try: print request_data request = json.loads(request_data) return_count = vornone(request, 'return_count') return_count = False if None == return_count else return_count response = json.dumps(data.query(vornone(request, 'name'), vornone(request, 'sid'), vornone(request, 'idcard'), vornone(request, 'page'), return_count)) except Exception, e: response = "error:" + str(e) print e
def plot1(): # Query to obtain the required information for the first plot plot1sql = query( "SELECT strftime('%Y', begindatum) AS jaar, strftime('%m', begindatum) AS maand, maandnaam, count(*) AS count_1 " "FROM(SELECT begindatum, maandnaam" " FROM Straatroven)" "GROUP BY jaar, maand", "sqlite:///data/Opendata.db" ) # Create an empty list list_dict = [] for i in plot1sql: # Add the SQL results to a dictionary dict = {'date': i.maandnaam + "-" + i.jaar, 'value': i.count_1} # Append the dictionary to the (initially empty) list_dict list list_dict.append(dict) # Return the list for further use in the application return list_dict
def map(): # Create an empty list to store the coordinates in latlong = [] # Assign the query to a variable mapsql = query( "SELECT latitude, longitude " "FROM Straatroven", "sqlite:///data/Opendata.db") for i in mapsql: # Skip the row if either the latitude or longitude coordinates are 'None'/missing if (i.latitude is None) or (i.longitude is None): continue # Add the coordinates to the dictionary dict = {'lat': i.latitude, 'lng': i.longitude} # Append the dictionary to the (initially empty) latlong list latlong.append(dict) # Assign the latlong list to the data propery retDict = {'data': latlong} return retDict
def device_data(device_id, device_type): # Set min and max range values, starting with the past X seconds (from settings) from right now. range_max = int(time.time()) range_min = range_max - settings['range_seconds'] # If min and max values are set in the GET parameters, convert them to # milliseconds. if ('range_min' in request.args and 'range_max' in request.args): # Convert milliseconds to seconds. range_min = int(request.args['range_min']) / 1000 range_max = int(request.args['range_max']) / 1000 # Query the data and return it as JSON. results = data.query(device_id, device_type, range_min, range_max) logger.info("/data/device/{}/{}: {}".format(device_id, device_type, json.dumps(results[:10]))) # /data/device/1/temperature: [[1460737810000, 21.19], [1460738101000, 21.5], ... return json.dumps(results)
def plot2(): list_dict = [] dLast = 0 d = 50 while d <= 3000: plot2sql = query( "SELECT count(*) AS count_1 " "FROM Straatroven " "WHERE distance_pol > {0} AND distance_pol < {1}".format(dLast, d), "sqlite:///data/Opendata.db" ) for i in plot2sql: print(d, i.count_1) dict = {'date': str(dLast) + "-" + str(d), 'value': i.count_1} list_dict.append(dict) dLast = d d = d + 50 return list_dict
def get_firmwares(self, fg): print "-" * 50 print "Getting list of Firmwares for: " + self.stn print "-" * 50 firm = data.query("select * from firmwares where node_base = '" + fg + "'") if len(firm) == 0: print 'Error, Firmware tables empty' else: width = firm[0][0] firmware = firm[0][1] fans = firm[0][2] chassis = firm[0][11] block_qty = firm[0][9] syscfg = firm[0][3] logging.info("-" * 50) logging.info('Rack Width: %s\n' % width + 'Firmware: %s\n' % firmware + 'Fans:%s\n' % fans + 'Chassis:%s\n' % chassis + 'Blocks: %s\n' % block_qty + "Sysconfig: %s\n" % syscfg) logging.info("-" * 50) print "-" * 50 print 'Rack Width: %s\n' % width + 'Firmware: %s\n' % firmware + 'Fans:%s\n' % fans + 'Chassis:%s\n' % chassis + 'Blocks: %s\n' % block_qty print "-" * 50 return width, firmware, fans, chassis, block_qty, syscfg