def request_data(url, retry=0, auto_proxy=False, delay=0, **kwargs): """ Get请求爬取源代码 :param url: 目标网站 :param retry: 是否重试 :param auto_proxy: 是否使用代理ip :param delay: 延迟时间 :param kwargs: requests.get参数 :return: text """ if delay: time.sleep(delay) if retry: sess = requests.Session() sess.mount('http://', HTTPAdapter(max_retries=retry)) sess.mount('https://', HTTPAdapter(max_retries=retry)) method = functools.partial(sess.request, method='get') else: method = requests.get if auto_proxy: kwargs.update({'proxies': {'http': 'http://{}'.format(get_proxy())}}) try: res = method(url=url, headers=get_header(), **kwargs) if res.status_code == 200: logging.debug("Request Data - {0} - {1}".format( res.status_code, url)) return res.text logging.info("Request Data - {0} - {1}".format(res.status_code, url)) except requests.exceptions.RequestException as e: logging.error("Request ERROR: {0}, url: {1}".format(e, url))
def reqDistance(self): try: self.__write_i2c(0x51) except IOError: log.error("IO error. Sensor: {} request distance problem".format( self.position)) return 30
def compare_osm_wikidata(file: str) -> str: file_path = Path(file) with open(file_path, "r", encoding="utf-8") as json_file: try: data = json.load(json_file) wikidata_items = get_wikidata() for feature in data["features"]: osm_coords = feature["geometry"]["coordinates"] prop = feature["properties"] for key, value in wikidata_items.items(): wikidata_item = value["coordinates"] distance = round( geopy.distance.great_circle(osm_coords, wikidata_item).meters, 2) if distance < 10: keys = [key] names = [value["name"]] prop["wid"] = keys prop["wn"] = names except ValueError as e: logging.error(f"Value Error: {e}") return None with open(file_path, "w", encoding="utf-8") as json_file: json.dump(data, json_file) print(file) return file
def filter_osm_geojson(file: str, tags: bool = True, coords: bool = True) -> str: file_path = Path(file) with open(file_path, "r", encoding="utf-8") as json_file: try: data = json.load(json_file) for feature in data["features"]: if coords: feature["geometry"]["coordinates"] = format_coordinates( feature["geometry"]["coordinates"][0], feature["geometry"]["coordinates"][1], ) if tags: # Format other values for key, value in list( feature["properties"]["tags"].items()): prop = feature["properties"] if key == "name": if (validate_input(value, pow_filter_values, ["kościoła"]) or value.isupper()): prop["n"] = 1 if validate_input(value, pow_filter_short_values, ["kościoła"]): prop["s"] = 1 if key == "religion": prop["r"] = religion_mapping[value] if key == "building" and value == "yes": prop["b"] = 1 if key in ["opening_hours", "service_times" ] and validate_input( value, hours_filter_values): prop["o"] = 1 if "religion" not in prop["tags"]: prop["r"] = 1 if "denomination" not in prop["tags"]: prop["d"] = 1 if "diocese" not in prop["tags"]: prop["i"] = 1 if "deanery" not in prop["tags"]: prop["e"] = 1 except ValueError as e: logging.error(f"Value Error: {e}") return None with open(file_path, "w", encoding="utf-8") as json_file: json.dump(data, json_file) return file
def geojson_to_mongodb(import_file: str, target_db: str, target_col: str, osm=True): # Based on: https://github.com/rtbigdata/geojson-mongo-import.py | MIT License client = MongoClient(uri) db = client[target_db] collection = db[target_col] import_file_path = Path(import_file) if not import_file_path.is_file(): logging.error(f"Finish: Import file {import_file} does not exist.") return None with open(import_file_path, "r") as f: logging.info(f"Start: Opening GeoJSON file {import_file}.") try: geojson_file = json.loads(f.read()) except ValueError as e: logging.error(f"JSON file {import_file} is invalid. Reason: {e}") return None if target_col in db.list_collection_names(): logging.info(f"Start: Dropping existing collection {target_col}.") collection.drop() # create 2dsphere index and text indexes collection.create_index([("geometry", GEOSPHERE)]) if osm: collection.create_index([("properties.type", TEXT), ("properties.id", TEXT)]) bulk = collection.initialize_unordered_bulk_op() for feature in geojson_file["features"]: bulk.insert(feature) logging.info("Finish: Features loaded to object.") try: logging.info(f"Start: Loading features to collection {target_col}.") result = bulk.execute() logging.info( f'Finish: Number of Features successully inserted: {result["nInserted"]} ' ) except BulkWriteError as bwe: n_inserted = bwe.details["nInserted"] err_msg = bwe.details["writeErrors"] logging.info("Errors encountered inserting features") logging.info(f"Number of Features successully inserted: {n_inserted} ") logging.info("The following errors were found:") for item in err_msg: print(f'Index of feature: {item["index"]}') print(f'Error code: {item["code"]}') logging.info( f'Message(truncated due to data length): {item["errmsg"][0:120]}' )
def run(self): try: GpioMgmt().init_pwm() self.ultrasonic.init_pwm() self.detect_to_turn() self.listen() except Exception as e: GpioMgmt().init_pin() logging.error(str(e))
def ReadSmembers(self, name): status = "" try: name_data = name.encode("utf-8") status = self.db.smembers(name_data) except Exception as e: logging.error("smembers_data_error: %s" % e) finally: return status
def run(self): try: logging.debug('Start smartcar in mode auto_pilot with sensors ...') GpioMgmt().init_pwm() self.ultrasonic.init_pwm() self._run() except Exception as e: GpioMgmt().init_pin() logging.error(str(e))
def get_proxy(): pool = ProxyPool.get_proxy_pool() while pool: proxy = random.choice(pool) if ProxyPool.is_valid_proxy(proxy): return proxy ProxyPool.expire(proxy) pool = ProxyPool.get_proxy_pool() logging.error("@get_proxy Error: no available proxy.")
def acs_stats_multi(self): try: if isinstance(self.list_of_csv, list): logging.info( f'{__class__.__name__ } [Processing CSVs: \n [{self.list_of_csv}]' ) for single_csv in self.list_of_csv: logging.info( f'{__class__.__name__ } [Start - Processing report - {single_csv}]' ) server = str(single_csv).replace('.csv', '') acs_sockets1 = [ 'acs_port_8080', 'acs_port_80', 'timestampt' ] acss1 = Analysis(single_csv, acs_sockets1, f'{server.upper()} port 80 vs 8080', f'{server}_8080_vs_80.png') acss1.multi_kpi_vs_time('8080', '80') acs_sockets2 = [ 'acs_port_8181', 'acs_port_8182', 'timestampt' ] acss2 = Analysis(single_csv, acs_sockets2, f'{server.upper()} port 8181 vs 8182', f'{server}_8181_vs_8182.png') acss2.multi_kpi_vs_time('8181', '8182') acs_sockets3 = [ 'acs_port_443', 'acs_port_8443', 'timestampt' ] acss2 = Analysis(single_csv, acs_sockets3, f'{server.upper()} port 443 vs 8443', f'{server}_443_vs_8443.png') acss2.multi_kpi_vs_time('443', '8443') ram = ['total_ram', 'used_ram', 'timestampt'] used_ram = Analysis(single_csv, ram, f'{server.upper()} Used RAM', f'{server}_used_ram.png') used_ram.multi_kpi_vs_time('total', 'used') logging.info( f'{__class__.__name__ } [Finish - Processing report - {single_csv}]' ) else: logging.error( f'{__class__.__name__ } [{self.list_of_csv} is not type of list!' ) except Exception as e: logging.exception(f'{__class__.__name__ } [Exception: {e}', exc_info=1)
def filter_osm_geojson(file: str) -> str: file_path = Path(file) with open(file_path, "r", encoding="utf-8") as json_file: try: data = json.load(json_file) for feature in data["features"]: # Format other values for key, value in list(feature["properties"]["tags"].items()): prop = feature["properties"] if key == "name": if (validate_input( value, pow_filter_values, ("kościoła", "Kościoła", "Apostolat", "świetlica"), ) or value.isupper()): prop["n"] = 1 if validate_input(value, pow_filter_short_values, ("kościoła")): prop["s"] = 1 if key == "religion": try: prop["r"] = religion_mapping[value] except KeyError: prop["r"] = 'other' if key == "building" and value == "yes": prop["b"] = 1 if key in ["opening_hours", "service_times"] and validate_input( value, hours_filter_values): prop["o"] = 1 if "religion" not in prop["tags"]: prop["r"] = 1 if "denomination" not in prop["tags"]: prop["d"] = 1 if "diocese" not in prop["tags"]: prop["i"] = 1 if "deanery" not in prop["tags"]: prop["e"] = 1 except ValueError as e: logging.error(f"Value Error: {e}") return None with open(file_path, "w", encoding="utf-8") as json_file: json.dump(data, json_file) return file
def ReqUid(self): (sn, queryStr) = self.GetSn() url = self.url + queryStr + "&sn={}".format(sn) try: response = requests.get(url, timeout=(5, 15)) data = json.loads(response.content.decode('utf-8')) print("2222") self.ParseData(data) except Exception as e: logging.error('Solve_error:%s' % e)
def run(self): try: logging.debug('Start smartcar in mode auto_pilot with camera ...') GpioMgmt().init_pwm() # self.ultrasonic.init_pwm() self._run() except KeyboardInterrupt as e: GpioMgmt().release() logging.info("[+] Exiting") raise e except Exception as e: GpioMgmt().init_pin() logging.error(str(e))
def notify(self, message): try: bot = telegram.Bot(token=settings.TELEGRAM_TOKEN) except telegram.error.InvalidToken: logging.error( "Invalid Token. Check your Telegram token configuration.") return try: logging.debug(bot.getMe()) except telegram.error.Unauthorized: logging.error("Unauthorized. Check your Telegram credentials.") return bot_updates = bot.getUpdates() if not bot_updates or not bot_updates[-1].message.chat_id: logging.error( "We need your telegram chat id. Please, send any message to your bot." ) return try: bot.sendMessage(chat_id=bot_updates[-1].message.chat_id, text=self.device_name + message) except telegram.TelegramError: logging.error( "An error raised sending the Telegram message. " + "Please, send a new message to your bot and try again. " + "This way we check if the chat_id is not updated.")
def _add_basics(self) -> (Element, Element): logging.info(f'{type(self).__name__} | Render started') xml_tree: Optional[Element] = self.get_template() element_root = xml_tree.find(self.root_element_name) element_root.set('enabled', str(self.is_enabled).lower()) element_root.set('testname', self.name) element_root.set('element_type', str(type(self).__name__)) elem_list = element_root.findall('stringProp') for element in elem_list: try: if element.attrib['name'] == 'TestPlan.comments': element.text = self.comments break except Exception: logging.error('Unable to add comment') return element_root, xml_tree
def readDistanceResponse(self): try: range_byte_1 = self.__read_i2c(2) range_byte_2 = self.__read_i2c(3) distance = int((range_byte_1 << 8) + range_byte_2) self.lastReading = distance return distance except IOError: log.error("IO error. Sensor: {} read distance problem".format( self.position)) return 30
def update_data(self, queryName, xmin, ymin, xmax, ymax): sql = "UPDATE BaiDuMapClassfyLatLng SET status = 2 WHERE queryName = {} and LdLat={} and LdLng={} and RuLat={} and Rulng={}".format(queryName, xmin, ymin, xmax, ymax) try: conn, cur = self.connect_db() except Exception as e: logging.info('error_connect%s:%s' % (sql,)) return try: cur.execute(sql) conn.commit() print("修改成功") except Exception as e: logging.error("select_data_error: %s" % e) conn.rollback() finally: self.free_close_db(cur, conn)
def __init__(self, debug=False, address=0x60, bus=1): self.address = address self.bus = None self.debug = debug self.sensorReadings = [] self.ma = moving_average(1) if not dummy: log.debug('Setting up SMBus') try: self.bus = smbus.SMBus(bus) except IOError: log.error( "IO error. Please check if compass module is connected correctly." )
def insert_lng_lat(self, item): try: print("item", item) conn, cur = self.connect_db() time.sleep(0.05) except Exception as e: logging.info('error_connect%s:%s' % (e, str(item))) return try: sql = "INSERT INTO BaiDuMapClassfyLatLng(queryName, LdLat, LdLng, RuLat, RuLng, status, createTime, secondId, detail_url,rs_1) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s,%s)" cur.execute(sql, (item[0], item[1], item[2], item[3], item[4], item[5], item[6], item[7], item[8], item[9])) conn.commit() print("插入成功!") except Exception as e: logging.error('insert_proxy_error%s:%s' % (e, str(item))) conn.rollback() finally: self.free_close_db(cur, conn)
def selectData(self, firstName): rows = set() try: conn, cur = self.connect_db() except Exception as e: logging.info('error_connect%s:%s' % (e, firstName)) return try: sql = "SELECT queryName, LdLat, LdLng, RuLat, Rulng FROM BaiDuMapClassfyLatLng WHERE queryName LIKE '%%%%%s%%%%' and status = 1;" % firstName cur.execute(sql) conn.commit() rows = cur.fetchall() except Exception as e: logging.error("select_data_error: %s" % e) conn.rollback() finally: self.free_close_db(cur, conn) return rows
def getCount(self, seconid): count = 1 try: conn, cur = self.connect_db() except Exception as e: logging.info('error_connect%s:%s' % (e,)) return try: sql = "SELECT count(*) FROM BaiDuMapPoi WHERE secondClassfyId=%s;" cur.execute(sql, (seconid,)) conn.commit() count = cur.fetchall() print("插入成功!") except Exception as e: logging.error('insert_proxy_error%s:%s' % (e,)) conn.rollback() finally: self.free_close_db(cur, conn) return count
def insert(self, items): for item in items: try: conn, cur = self.connect_db() time.sleep(0.05) except Exception as e: logging.info('error_connect%s:%s' % (e, str(item))) return try: if item[9]: sql = "INSERT INTO BaiDuMapPoi(firstClassfyID, firstClassfyName, secondClassfyId, secondClassfyName, address, area, province, city, detail, lat, lng, name, tel, uid, street_id, detailUrl, baiduTag, baiduType, price, showHours, overallRating, testeRating, environmentRating, serviceRating, facilityRating, hygieneRating, technologyRating, imageNum, groupNum, discountNum, commentNum, favoriteNum, checkinNum, createTime) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);" cur.execute(sql, (item[0], item[1], item[2], item[3], item[4], item[5], item[6], item[7], item[8], item[9], item[10], item[11], item[12], item[13], item[14], item[15], item[16], item[17], item[18], item[19], item[20], item[21], item[22], item[23], item[24], item[25], item[26], item[27], item[28], item[29], item[30], item[31], item[32], item[33])) conn.commit() print("插入成功!") except Exception as e: logging.error('insert_proxy_error%s:%s' % (e, str(item))) conn.rollback() finally: self.free_close_db(cur, conn)
def simplify_geojson(file: str, simplify_ratio=0.3): file_path = Path(file) with open(file_path, "r", encoding="utf-8") as json_file: try: data = json.load(json_file) for feature in data["features"]: geo = feature["geometry"] feature["geometry"] = vw.simplify_geometry( geo, ratio=simplify_ratio) except ValueError as e: logging.error(f"Value Error: {e}") return None with open(file_path, "w", encoding="utf-8") as json_file: json.dump(data, json_file) return file
def listen(self, *args, **kwargs): ''' Function to control the bot using threading. Define the clientSocket in the module before use ''' try: with Listener(on_press=self._on_press, on_release=self._on_release) as listener: listener.join() except KeyboardInterrupt: GpioMgmt().release() self.headlight.off() logging.info("[+] Exiting") # clientSocket.close() raise e except Exception as e: GpioMgmt().release() self.headlight.off() logging.error(str(e)) # clientSocket.close() raise e
def simplify_geojson_geometry(file: str, fp: int = 5): file_path = Path(file) with open(file_path, "r", encoding="utf-8") as json_file: try: data = json.load(json_file) for feature in data["features"]: feature["geometry"]["coordinates"] = format_coordinates( feature["geometry"]["coordinates"][0], feature["geometry"]["coordinates"][1], fp, ) except ValueError as e: logging.error(f"Value Error: {e}") return None with open(file_path, "w", encoding="utf-8") as json_file: json.dump(data, json_file) return file
def notify(self, message): try: bot = telegram.Bot(token=settings.TELEGRAM_TOKEN) except telegram.error.InvalidToken: logging.error("Invalid Token. Check your Telegram token configuration.") return try: logging.debug(bot.getMe()) except telegram.error.Unauthorized: logging.error("Unauthorized. Check your Telegram credentials.") return # bot_updates = bot.getUpdates() # for update in bot_updates: # logging.debug("USER ID : " + str(update.message.from_user.username)) # logging.debug("CHAT ID : " + str(update.message.chat_id)) # if bot.getChat(16740022).username == "luca_cipi": # logging.debug("TROVATA") # if not bot_updates or not bot_updates[-1].message.chat_id: # logging.error("We need your telegram chat id. Please, send any message to your bot.") # return try: bot.sendMessage(chat_id=settings.TELEGRAM_CHAT_ID, text=self.device_name + message) except telegram.TelegramError: logging.error("An error raised sending the Telegram message. " + "Please, send a new message to your bot and try again. " + "This way we check if the chat_id is not updated.")
sensorLeft = Sensor(0x73, "left") sensors = [sensorLeft, sensorCenter, sensorRight] extra = RobotHelpers() log.info("Initialized main objects") if robot.checkStatus(): canRun = True else: canRun = False except Exception as error: canRun = False log.error("Could not create main objects.!!! Error: %s" % error) if canRun: while not startSwitch(): pass if sideSwitch() == "Orange": sideSelected = "Orange" else: sideSelected = "Green" if sideSelected == "Orange": log.info("Setting up init positions for 'Orange' site")
def error_404(*args): logging.error('404') logging.error(request.url) logging.error(str(dict(request.headers))) logging.error(log_object(request.json))
def CutChina(self, rect): print(self.queryList) if rect not in queryFilterUrl: if rect in avg_lat_lng: filterUrl.SetAdd(self.queryList, rect) global sign_rect sign_rect = rect xl_yl = rect.split(r',') xmin = Decimal(xl_yl[0]) ymin = Decimal(xl_yl[1]) xmax = Decimal(xl_yl[2]) ymax = Decimal(xl_yl[3]) bounds = str(xmin) + "," + str(ymin) + "," + str(xmax) + "," + str(ymax) (sn, queryStr) = self.GetSn(bounds) url = self.url + queryStr + "&sn={}".format(sn) data = self.DownHtml(url=url) try: jsonData = json.loads(data) if jsonData.get('message') == "天配额超限,限制访问": print("天配额超限,限制访问") filterUrl.removeData(self.queryList, sign_rect) # 若限额后程序直接阻塞 pool.wait() except Exception as e: logging.error("json_loads_error: %s" % url) jsonData = {} if jsonData.get('total'): count = int(jsonData["total"]) print(count) if count < 400: item = [] status = 1 createTime = datetime.now().strftime("%Y-%m-%d %H:%M:%S") item.append(self.queryList) item.append(xmin) item.append(ymin) item.append(xmax) item.append(ymax) item.append(status) item.append(createTime) item.append(self.firstClassfyId) item.append(url) item.append("港澳台") MysqlPool().insert_lng_lat(item) else: print("555") middleX = (xmin + xmax) / 2 middleY = (ymin + ymax) / 2 rect1 = str(xmin) + "," + str(ymin) + "," + str(middleX) + "," + str(middleY) rect2 = str(middleX) + "," + str(ymin) + "," + str(xmax) + "," + str(middleY) rect3 = str(xmin) + "," + str(middleY) + "," + str(middleX) + "," + str(ymax) rect4 = str(middleX) + "," + str(middleY) + "," + str(xmax) + "," + str(ymax) # 使用递归调用 time.sleep(0.1) # 休眠1秒 self.CutChina(rect1) time.sleep(0.1) # 休眠1秒 self.CutChina(rect=rect2) time.sleep(0.1) # 休眠1秒 self.CutChina(rect=rect3) time.sleep(0.1) # 休眠1秒 self.CutChina(rect=rect4) else: print("去除重复!")
try: robot = Driving() robot.changeAcc(1) if robot.checkStatus(): canRun = True log.info("Initialized main objects") else: canRun = False except Exception as error: canRun = False log.error("Could not create main objects.!!! Error: %s" % error) if canRun: try: start_time = time.time() robot.driveRobot(distance=50, speed=20, sensors=[]) robot.turnRobot(degrees=180, speed=20, direction=right) robot.driveRobot(distance=50, speed=20, sensors=[]) robot.turnRobot(degrees=180, speed=20, direction=left)