def working(): data = update.get() parse.parse_data(data=data) logger.info("WAITING...") time.sleep(interval) while(1): data = update.get() parse.parse_new_data(data=data) logger.info("WAITING...") time.sleep(interval)
def __init__(self,key,city,state): self.key = key self.city = city self.state = state self.data = parse_data(self.key,"forecast10day",self.city,self.state) self.rjecnik = {} if 'error' in self.data["response"]: print "Error! Check your API key or city/state name" else: try: for day in self.data['forecast']['simpleforecast']['forecastday']: if 'day' in day['date']: x=day['date']['day'] self.rjecnik[x]=[day['conditions']] self.rjecnik[x].append(day['high']['celsius']) self.rjecnik[x].append(day['high']['fahrenheit']) self.rjecnik[x].append(day['low']['celsius']) self.rjecnik[x].append(day['low']['fahrenheit']) self.rjecnik[x].append(day['qpf_allday']['mm']) self.rjecnik[x].append(day['avewind']['kph']) self.rjecnik[x].append(day['avewind']['dir']) self.rjecnik[x].append(day['avehumidity']) self.rjecnik[x].append(day['date']['weekday']) except (ValueError, KeyError, TypeError) as e: print type(e) print "Check is data for that city available"
def __init__(self,key,city,state): self.key = key self.city = city self.state = state self.data = parse_data(self.key,"conditions",self.city,self.state) self.rjecnik = {} if 'error' in self.data["response"]: print "Error! Check your API key or city/state name" else: try: self.rjecnik['location'] = self.data["current_observation"]["display_location"]["full"] self.rjecnik['weather'] = self.data["current_observation"]["weather"] self.rjecnik['local time'] = self.data["current_observation"]["local_time_rfc822"] self.rjecnik['temperature(C)'] = self.data["current_observation"]["temp_c"] self.rjecnik['temperature(F)'] = self.data["current_observation"]["temp_f"] self.rjecnik['humidity'] = self.data["current_observation"]["relative_humidity"] self.rjecnik['wind direction'] = self.data["current_observation"]["wind_dir"] self.rjecnik['wind speed(kph)'] = self.data["current_observation"]["wind_kph"] self.rjecnik['pressure'] = self.data["current_observation"]["pressure_mb"] self.rjecnik['precipitation(mm)'] = self.data["current_observation"]["precip_today_metric"] except (ValueError, KeyError, TypeError) as e: print type(e) print "Check is data for that city available"
def __init__(self,key,city,state): self.key = key self.city = city self.state = state self.data = parse_data(self.key,"astronomy",self.city,self.state) self.rjecnik = {} if 'error' in self.data["response"]: print "Error! Check your API key or city/state name" else: try: sunset = self.data["sun_phase"]["sunset"]["hour"] + ':' + self.data["sun_phase"]["sunset"]["minute"] sunrise = self.data["sun_phase"]["sunrise"]["hour"] + ':' + self.data["sun_phase"]["sunrise"]["minute"] self.rjecnik['sunrise'] = sunrise self.rjecnik['sunset'] = sunset except (ValueError, KeyError, TypeError) as e: print type(e) print "Check is data for that city available"
def fifo_scheduling(fruitline_spider_variable): url_list = fruitline_spider_variable.get_url_list() url = url_list.next() url_model = UrlModel(url) fruitline_spider_variable.global_url_queue.put(url_model) while exit_condition(fruitline_spider_variable): if fruitline_spider_variable.html_content_queue.qsize() > 0: html_content = fruitline_spider_variable.html_content_queue.get() parse_data(html_content.html, fruitline_spider_variable) try: url = url_list.next() url_model = UrlModel(url) fruitline_spider_variable.global_url_queue.put(url_model) except StopIteration, e: pass
def main(): rec: Mission_Log #global full_path#, log_dir_path #full_path, filename = os.path.split(os.path.realpath(__file__)) print('Running main module') general_init() conf = load_cfg() msg = f'Loaded the following configuration:\n{conf}' logger.debug(msg) if conf['settings']['load_objects_n_score']: objects_n_score_path = os.path.join( full_path, conf['settings']['objects_n_score_path']) load_objects_n_score(objects_n_score_path) # sys.exit() # tst_user() log_dir_path = os.path.join(full_path, conf['settings']['log_path']) # process log files files = get_files_lst(log_dir_path, conf['settings']['log_ptrn']) """msg = f'files to process list:\n{files}' logger.debug(msg)""" for rec in files: if not rec.is_processed: check_mission(rec.name) file_path = 'missionReport(' + str(rec.name) + ')[' + str( rec.miss_log_id) + '].txt' work_fl = os.path.join(log_dir_path, file_path) logger.debug(f'processing file: [{work_fl}]') parse_data(work_fl) rec.is_processed = True rec.save() print("\nThat's all folks")
def depth_first_scheduling(fruitline_spider_variable): url_model = UrlModel(fruitline_spider_variable.start_url, "", -1) fruitline_spider_variable.global_url_queue.put(url_model) while exit_condition(fruitline_spider_variable): if fruitline_spider_variable.html_content_queue.qsize() > 0: html_content = fruitline_spider_variable.html_content_queue.get() url_list = select_url(html_content.url, html_content.html, fruitline_spider_variable) parse_data(html_content.html, fruitline_spider_variable) for d in url_list: url = d['url'] method = d['method'] referer = html_content.url depth = html_content.depth url_model = UrlModel(url, referer, depth, method) if url_model.depth <= fruitline_spider_variable.depth: fruitline_spider_variable.global_url_queue.put(url_model) else: fruitline_spider_variable.refuse_count += 1
from request_data import get_data from parse import parse_data if __name__ == "__main__": goods = [] for i in range(10): body = get_data(i) parse_data(body, goods)
def eval_content(self, content): """ evaluate content recursively, take effect on each variable and function in content. content may be in any data structure, include dict, list, tuple, number, string, etc. """ return parse.parse_data(content, self.test_variables_mapping, self.FUNCTIONS_MAPPING)