def start_download(place='london'): latlong = utils.read_latlong() if place not in latlong: print(f'{place} not in {latlong.keys()}') return data_dir = utils.setup_dir(place) key = utils.read_key() lat, long = latlong[place] start = pd.to_datetime('2019 01 01') stop = pd.to_datetime('2020 01 01') date_range = pd.date_range(start, stop) for day in tqdm(date_range): day_str = day.strftime('%Y%m%d') day_path = f'{data_dir}/{day_str}.csv' if os.path.exists(day_path): continue url = utils.construct_url(key, lat, long, day) response = requests.get(url) if response.status_code != 200: print(response.text) return df = utils.response_to_df(response) df.to_csv(day_path,index=False)
def get_measurements_list(start_ts, stop_ts, mt_num): page_size = 500 measurements_url = utils.url_join( [utils.ripeatlas_base_url, "/measurements/"]) params = {} params["format"] = "json" params["page_size"] = str(page_size) params["is_public"] = "true" params["type"] = "traceroute" params["af"] = "4" params["start_time__gte"] = start_ts params["stop_time_lte"] = stop_ts #resource list. resources = [''] result_list = [] #first page url = utils.construct_url(measurements_url, params) page = download_ripe_atlas_list_worker(url) page_num = int(math.ceil(float(page["count"]) / page_size)) result_list.extend(page["results"]) temp_list = ["" for i in range(page_num + 1)] #build argv_list argv = [] #for i in range(2,page_num+1): for i in range(2, 3): #debug params["page"] = str(i) url = utils.construct_url(measurements_url, params) arg = (url, temp_list, i) argv.append(arg) #run with multi thread. multi_thread.run_with_multi_thread(download_ripe_atlas_list_wrapper, argv, resources, mt_num) for i in range(2, page_num + 1): result_list.extend(temp_list[i]) return result_list
def get_document(self, primitive=False): if self.document_id.startswith("current"): #it must be '/cart/current<userUrlHash>' or '/cart/current<userUrlHash>/cartItems' cart_subject = utils.construct_url(self.request_hostname, self.tenant, self.namespace, self.document_id) cart = rdf_json.RDF_JSON_Document({ cart_subject:{ RDF+"type": rdf_json.URI("http://setupshop.me/ns#Cart"), CE+'user': rdf_json.URI(self.user) } }, cart_subject) self.complete_result_document(cart) return 200, [], cart return super(Domain_Logic, self).get_document()
def do_GET(self): self.send_response(200) self.send_header('Content-type', 'application/json') self.end_headers() path_part_URI = self.path print(path_part_URI) if path_part_URI == "/favicon.ico": print("Отработан запрос к серверу favicon") print() else: try: from_currency, to_currency, ante = parse_string(path_part_URI) except: # любые ошибки вызова .parse_string print() print("Ошибка парсинга Query-компонента URI") report = {"response code": "0", "error": "URI parsing error"} else: absolute_URL = construct_url(BASE_URL, APIKEY, from_currency, (to_currency)) print() print(f"Приложение запрашивает ресурс: \n{absolute_URL}") rate = get_rate(absolute_URL, from_currency, to_currency) if isinstance(rate, float): total = ante * rate report = { "response code": "200", "currency exchange": f"{from_currency} to {to_currency}", "requested value": f"{ante}", "rate": f"{rate}", "exchange result": f"{total:.2f}", "error": "" } else: # возникла ошибка при запросе к API обмена валют code = rate[0] text = rate[1] report = {"response code": f"{code}", "error": f"{text}"} finally: # при любых ошибках отправляет ответ пользователю print() print("Результат запроса ресурса:") response = json.dumps(report, ensure_ascii=False).encode("utf-8") print(response.decode()) print() self.wfile.write(response)
def execute_action(self, body): #TODO: check if post location is valid and body is a form form = body if 'file_to_import' in form: item = form["file_to_import"] if item.file and (item.headers['Content-type'] == 'application/vnd.ms-excel' or item.headers['Content-type'] == 'text/csv'): status, headers, document = self.recursive_get_document() if status == 200: if document.getValue(RDF+'type') == URI(SUS+'BackOffice'): store = document.getValue(SUS+'store') cat_categories_post_url = str(document.getValue(SUS+'categories', None, store)) cat_products_post_url = str(utils.construct_url(self.request_hostname, self.tenant, self.namespace, 'products')) id_prefix = self.document_id + '-' thread = Thread(target = threaded_import_products, args = (item, cat_categories_post_url, cat_products_post_url, id_prefix, self.user)) thread.start() return (202, [], 'Product import started ...') return super(Domain_Logic, self).execute_action(body)
def default_resource_group(self): if self.namespace == 'cart': return utils.construct_url(self.request_hostname, self.tenant, self.namespace) return super(Domain_Logic, self).default_resource_group()