def __init__(self): self.name = 'hatch' # Neural target self.results = get_args() self.realsense = RealSense(constants.REALSENSE_SN) self.camera_provider = self.realsense time.sleep(1) # Let realsense run a bit before setting exposure self.realsense.set_exposure(10) if self.results.camera == 'pi': camera_provider = PICamera() logging.info('Using PI Camera provider') elif self.results.camera == 'realsense': logging.info('Using RealSense camera provider') camera_provider = self.realsense elif self.results.camera == 'cv': camera_provider = CVCamera(self.results.port) else: logging.error('Invalid camera provider, this shouldn\'t happen') sys.exit(1) self.display = Display(provider=camera_provider) if self.results.local: # self.tape_hsv_handler = Trackbars('2019_tape') self.cargo_hsv_handler = Trackbars('cargo_simple') self.tape_hsv_handler = FileHSV('2019_tape') else: self.tape_hsv_handler = FileHSV('2019_tape') self.cargo_hsv_handler = FileHSV('cargo_simple') if self.results.web: self.web = Web(self) self.web.start_thread() # Run web server if self.results.networktables: self.nt = nt_handler.NT('2019') self.stop = False
def book_comment(user, book_id, title, comment, proxy=None, headless=True): try: user = User(user) logger.info( '{} ver: {} author: {} worker: {} start book comment'.format( __title__, __version__, __author__, user.nickname)) with Web(proxy=proxy, headless=headless) as chrome: if user.cookies: try: LoginWithCookies(chrome, user).run() except Exception: Login(chrome, user).run() else: Login(chrome, user).run() conf = deepcopy(FUNCTION_BOOK_COMMENT_CONFIG) conf['book_id'] = book_id conf['title'] = title conf['comment'] = comment BookComment(chrome, user, config=conf).run() except Exception: logger.error('{}|{} -->'.format('Work', 'FAILED REASON'), exc_info=True)
def __init__(self): self.results = get_args() self.name = self.results.target # Check if requested target exists if not utils.is_target(self.name): return if self.results.camera == 'pi': camera_provider = PICamera() logging.info('Using PI Camera provider') elif self.results.camera == 'realsense': logging.info('Using RealSense camera provider') camera_provider = RealSense() elif self.results.camera == 'cv': camera_provider = CVCamera(self.results.port) else: logging.error('Invalid camera provider, this shouldn\'t happen') sys.exit(1) self.display = Display(provider=camera_provider) if self.results.local: self.hsv_handler = Trackbars(self.name) else: self.hsv_handler = FileHSV(self.name) if self.results.web: self.web = Web(self) self.web.start_thread() # Run web server if self.results.networktables: self.nt = nt_handler.NT(self.name) self.stop = False
def register_user(user_type='worker'): user_type_set = {'worker', 'marketer'} if user_type not in user_type_set: raise Exception('user_type Error') with Web(headless=False, proxy=get_proxy()) as chrome: Register(chrome, user_type=user_type).run() time.sleep(8)
def root_web(self): """Get root web""" if self.is_property_available('RootWeb'): return self.properties['RootWeb'] else: return Web( self.context, ResourcePathEntry(self.context, self.resource_path, "RootWeb"))
def add(self, web_creation_information): web_creation_information._include_metadata = self.include_metadata payload = web_creation_information.payload from web import Web web = Web(self.context) qry = ClientQuery(self.url + "/add", ActionType.PostMethod, payload) self.context.add_query(qry, web) self.add_child(web) return web
def __init__(self): self.text_items = [] self.chars = "AaBbCcDdEeFfGgHhIiJjKkLlMmNnOoPpQqRrSsTtUuVvWwXxYyZz0123456789~!@#$%^&*()_+-=[]\\{}E9|;':\",./<>?" self.char_index = 0 self.text_entry = "" self.current_tick = 0 self.ticks = 0 self.reason_for_waiting = None self.tape = None self.side = None self.do_monitoring = False self.thread = None self.command_thread = None self.process = None self.mid_line = "" self.bottom_line = "" self.show_track_listing = False self.filepath = None self.name = None self.nice_name = None self.recording = False self.killed_process = False self.partial_ticks = None self.conservative_mode = True self.ignore_next = False self.lock = False self.b1 = Button('GPIO21') self.b2 = Button('GPIO20') self.b3 = Button('GPIO16') self.b4 = Button('GPIO12') self.io1 = DigitalInputDevice('GPIO17') self.io1.when_deactivated = self.message_available self.display = Adafruit_SSD1306.SSD1306_128_32(rst=None) self.display.begin() self.display.clear() self.display.display() f = "/home/pi/Code/python/pytape/dos.ttf" self.normal_font = ImageFont.truetype(f, 8) self.big_font = ImageFont.truetype(f, 16) self.width = self.display.width self.height = self.display.height self.image = Image.new('1', (self.width, self.height)) self.draw = ImageDraw.Draw(self.image) self.tc = TapeControl() self.w = Web(owner=self) # self.start_command_monitoring() print "Go!"
def __init__(self, version): self.core = Core(self.log) self.web = Web(self.log) master = Tk() Frame.__init__(self, master) master.title('AV Downloader ' + version) root_path_config = config.read_config( 'config.ini', 'Paths', 'root_path') self.root_path = os.path.join( os.path.expanduser("~"), "downloads") if root_path_config is '' else root_path_config self.executor_ui = futures.ThreadPoolExecutor(1) self.window = master self.pack() self.createWidgets()
def __init__(self): """ Create all initial handlers based on parameters from get_args. camera_provider : CVCamera or PICamera or RealSense - the type of camera to be used by self.display """ self.results = get_args() self.name = self.results.target # Check if requested target exists if not utils.is_target(self.name): return # Set the camera provider if self.results.camera == 'pi': camera_provider = PICamera() logging.info('Using PI Camera provider') elif self.results.camera == 'realsense': logging.info('Using RealSense camera provider') camera_provider = RealSense() elif self.results.camera == 'cv': camera_provider = CVCamera(self.results.port) else: logging.error('Invalid camera provider, this shouldn\'t happen') sys.exit(1) # Create the display self.display = Display(provider=camera_provider) if self.results.local: self.hsv_handler = Trackbars(self.name) else: self.hsv_handler = FileHSV(self.name) # Create the web server if self.results.web: self.web = Web(self) self.web.start_thread() # Create the networktables server if self.results.networktables: self.nt = nt_handler.NT(self.name) self.logger = Logger(self) self.stop = False
def daily_work(headless=WEBDRIVER_HEADLESS): """ 日常工作的任务包括登录、收藏及阅读默认的书籍 :return: """ try: user = User(get_sequence_user()) logger.info('{} ver: {} author: {} worker: {} START DAILY WORK'.format( __title__, __version__, __author__, user.nickname)) with Web(headless=headless) as chrome: user_recomment_num = api_user_info(user, 'recomment') if user_recomment_num: login = Login(chrome, user) elif user.last_work_time and user.cookies: if is_today(user.last_work_time): login = LoginWithCookies(chrome, user) else: login = Login(chrome, user) else: login = Login(chrome, user) login.run() # 收藏 if user.is_collect == '0': func_collect = CollectBook(chrome, user) func_collect.run() # 推荐 if user_recomment_num: func_recommend = RecommendBook(chrome, user) func_recommend.run() # 阅读 func_read = ReadBook(chrome, user) func_read.run() except Exception: logger.error('{}|{} -->'.format('Work', 'FAILED REASON'), exc_info=True)
def main(): tw = Tweet() tweets = tw.get_tweets() exclude_list = generate_exclude_list() wordcloud_tweet = WC(tweets, exclude_list) wordcloud_tweet.generate_word_cloud("word_cloud_tweet.png") # wordcloud_tweet.generate_word_cloud("word_cloud_tweet_alpha.png", alpha=True) # wordcloud_tweet.generate_word_cloud("word_cloud_tweet_face_profile.png", mask="mask_photos/head-profile-of-young-male.png") # wordcloud_tweet.generate_word_cloud("word_cloud_tweet_face_profile_alpha.png", alpha=True, mask="mask_photos/head-profile-of-young-male.png") # wordcloud_tweet.generate_word_cloud("word_cloud_tweet_twitter_bird.png", mask="mask_photos/twitter.png") # wordcloud_tweet.generate_word_cloud("word_cloud_tweet_twitter_bird_alpha.png", alpha=True, mask="mask_photos/twitter.png") # wordcloud_tweet.overdraw_image() print("makarov: ") print(run_makarov(tweets)) wordcloud_trend = WC(tw.get_trends_tokyo(), []) wordcloud_trend.generate_word_cloud("trend_tokyo.png") web = Web(BASE_URL, WEB_EXCLUDE_LIST) wordcloud_blog_words = WC(web.get_text_by_base_url(), exclude_list) wordcloud_blog_words.generate_word_cloud("word_cloud_blog.png", alpha=False, mask="rect") wordcloud_blog_words.generate_word_cloud("word_cloud_blog_large.png", alpha=True, mask="rect_large") conv_webp("word_cloud_blog.png")
def __init__(self, topics, W2V, collect): self.processer = Web(topics, W2V) self.collect = collect
def main(): model_file_check() Web().run()
# Copyright (c) 2020 ruundii. All rights reserved. import asyncio import sys from signal import SIGINT import asyncio_glib from dasbus.connection import SystemMessageBus from adapter import BluetoothAdapter from bluetooth_devices import * from hid_devices import * from web import Web if __name__ == "__main__": asyncio.set_event_loop_policy(asyncio_glib.GLibEventLoopPolicy()) loop = asyncio.get_event_loop() loop.add_signal_handler(SIGINT, sys.exit) bus = SystemMessageBus() bluetooth_devices = BluetoothDeviceRegistry(bus, loop) hid_devices = HIDDeviceRegistry(loop) hid_devices.set_bluetooth_devices(bluetooth_devices) bluetooth_devices.set_hid_devices(hid_devices) adapter = BluetoothAdapter(bus, loop, bluetooth_devices, hid_devices) web = Web(loop, adapter, bluetooth_devices, hid_devices) loop.run_forever() #print(proxy)
def main(): Web().run()
# Hint: # 1. While your solution must handle the case for Web(size=123, degree=5) in # the test script, you may want to use different size and degree settings # for faster tests and for better test coverage. import time from crawler import WebCrawler from web import Web size = 1000 degree = 10 web = Web(size=size, degree=degree) crawler = WebCrawler() start = time.time() urls = crawler.crawl(web) finish = time.time() print("Time took to crawl the URLs: ", finish - start) print("Number of URLs found: ", len(urls)) assert len(urls) == size
from udpServer import UdpServer from main_thread import Main from multiprocessing import Queue import queue from web import Web import time if __name__ == '__main__': queue1 = Queue(maxsize=1000) queue2 = queue.Queue(maxsize=1000) carsQueue = queue.Queue(maxsize=2) def onChange(data): carsQueue.put(data) web = Web([queue2, carsQueue]) time.sleep(1) web.start() main = Main(onChange) udpServer = UdpServer(queue1).start() time.sleep(3) print('服务启动成功') while True: while not queue1.empty(): main.response(queue1.get()) while not queue2.empty(): main.response(queue2.get()) time.sleep(0.1)
def run(self): """ The main loop of the daemon. """ self.running = True web = Web() web.ioloop() # runs in this thread
import sys import time from config.constants import WEB_SIZE, WEB_DEGREE from crawler import WebCrawler from web import Web if __name__ == '__main__': if len(sys.argv) == 3: WEB_SIZE = int(sys.argv[1]) WEB_DEGREE = int(sys.argv[2]) web = Web(size=WEB_SIZE, degree=WEB_DEGREE) crawler = WebCrawler() start = time.time() urls = crawler.crawl(web) finish = time.time() print("Time took to crawl the URLs: ", finish - start) print("Number of URLs found: ", len(urls)) assert len(urls) == WEB_SIZE
def main(): auth_database = AuthDatabase() parser = argparse.ArgumentParser(description="aiohttp server example") parser.add_argument("--path") parser.add_argument("--port") Web(auth_database, parser.parse_args()).start()
from web import Web from queuer import Queuer from spider import Spider API_ENDPOINT = "http://localhost:10000/update" SEED_URL = "https://www.npr.org/sections/politics" QUEUE_FILE = os.path.join("data", "queue.json") DATA_FILE = os.path.join("data", "database.json") DATABASE = [] TALLY = 0 MAX_VALUE = 10 if __name__ == '__main__': try: print("[INITIALIZING]") web = Web(API_ENDPOINT) Q = Queuer() try: Q.load_state_from_json(QUEUE_FILE) print("[QUEUE] using QUEUE_FILE") except: Q._add([SEED_URL]) print("[QUEUE] using SEED_URL") print("[ENTERING LOOP]") while TALLY < MAX_VALUE: url = Q._next() # raises Error on failure spider = Spider(url) spider.lay() egg = spider.egg() status_code = web.POST(egg) DATABASE.append(egg)
def __init__(self, tag_search): self.tw = Twitter() self.web = Web() self.tag_search = tag_search
if __name__ == '__main__': # 记录器 实例 logname = "金蝶消息提醒" log = Logger(logname) logger = log.getlogger() # 解析器实例 conf = configparser.ConfigParser() path = 'leave.conf' assert os.path.exists(path), "{file}不存在".format(file=path) if platform.system() == 'Windows': conf.read(path, encoding="utf-8-sig") else: conf.read(path) # 数据库实例 stone = stoneobject() # # 服务器实例 # server = ServerQuery(conf=conf, stone=stone, logger=logger) # # 查询 # string = server.run(str('0201705003')) # 解析 # list = json.loads(string) # print(list) proxy_query = ProxyQuery(conf=conf, stone=stone, logger=logger) web = Web(proxy_handle=proxy_query, logger=logger, port=int(conf.get('web', 'port'))) web.get_web_install() pass
def add_web(self, thickness, division_count): """ Add web to rib. :param division_count: number of stiffeners + 1 :param thickness:thickness of web """ self.web = Web(self.y_left, self.y_right, division_count, thickness)
def __init__(self): #Setup UI self.main = tk.Tk() self.cnv = tk.Canvas(self.main, width=res, height=res, background="black") self.cnv.bind("<Button-1>", self.Click) self.cnv.pack(side="left") self.pnl = tk.Frame(self.main) self.pnl.pack(side="right") a = tk.Label(self.pnl, text="Detail") a.pack() self.db = tk.Entry(self.pnl) self.db.pack() self.db.insert(0, "10") a = tk.Label(self.pnl, text="Anchors") a.pack() self.ab = tk.Entry(self.pnl) self.ab.pack() self.ab.insert(0, "5") a = tk.Label(self.pnl, text="Anchor Deviancy") a.pack() self.devb = tk.Entry(self.pnl) self.devb.pack() self.devb.insert(0, "0.2") a = tk.Label(self.pnl, text="Radial Strings") a.pack() self.rb = tk.Entry(self.pnl) self.rb.pack() self.rb.insert(0, "5") a = tk.Label(self.pnl, text="Rings") a.pack() self.rib = tk.Entry(self.pnl) self.rib.pack() self.rib.insert(0, "40") a = tk.Label(self.pnl, text="Hang") a.pack() self.drb = tk.Entry(self.pnl) self.drb.pack() self.drb.insert(0, "0.7") a = tk.Label(self.pnl, text="AA") a.pack() self.aa = tk.Entry(self.pnl) self.aa.insert(0, "2") self.aa.pack() self.gen = tk.Button(self.pnl, command=self.Generate, text="Generate") self.gen.pack() self.lgen = tk.Button(self.pnl, command=self.LineGen, text="Regenerate Lines") self.lgen.pack() self.clr = tk.Button(self.pnl, command=self.Clear, text="Clear") self.clr.pack() self.rnd = tk.Button(self.pnl, command=self.Render, text="Render") self.rnd.pack() a = tk.Label(self.pnl, text="Filename") a.pack() self.name = tk.Entry(self.pnl) self.name.pack() self.name.insert(0, "web") self.sve = tk.Button(self.pnl, command=self.Save, text="Save") self.sve.pack() #setup web generator self.Eng = Web(res, TkRender(self.cnv)) self.Eng.Generate(int(self.ab.get()), float(self.devb.get()), int(self.rb.get()), int(self.rib.get()), float(self.drb.get()), int(self.db.get())) self.Eng.Render(rules)
from web import Web if __name__ == '__main__': web = Web() web.start()
backoff until we have the proper get() response or we exceed the number of retires. The reason we implement a separate function for this is because we want to increase the robustness of our crawler, since we cannot control the web_object.get() command. :param url: URL on which we do a web_object.get() :return: list(list_of_linked_url) or raises exception if we have fail for more than the allocated retries. """ try: # We use the below if we are SURE that the web_object.get() is not a blocking call. For the purposes of this # use case, this is sufficient. crawled_url = self._web_object.get(url) self._logger.debug("Successful GET on URL: %s" % str(url)) return crawled_url except Exception as e: # We raise the exception if web_object.get() fails. We can improve the Type of Exceptions as and when they # happen. For now I have gone with a general broad Exception class. self._logger.error("Unable to do a GET on URL: %s. Error: %s" % (url, str(e))) raise Exception if __name__ == '__main__': web = Web(size=100, degree=1) crawler = WebCrawler() urls = crawler.crawl(web) # urls1 = crawler.crawl(web) print(urls) print(len(urls))
def main(): """Test Function.""" web = Web(625, 1000, 3, 2.03) test = Stiffener(2.29, 22, 19.0, web) make_stiffener_header() test.make_row(289)
def run(self): while True: try: item = self.queue.get(False) if (item['type'] == 'ws_start'): break except queue.Empty: time.sleep(1) continue self.web = Web(self.base_url, self.cookies) self.refresh_units() self.web.back_alarm_units(self.units) self.refresh_missions() self.update_data() self.console.update(self.units, self.missions) loop_reset = 0 while True: try: if (loop_reset > 25): loop_reset = 0 logging.info('-- Big loop --') self.refresh_missions() self.refresh_units() self.update_data() loop_reset = loop_reset + 1 reserved_units = {} units_already_sent = [] mission_updates = self.mission_updates self.mission_updates = [] for update in mission_updates: if str(update['id']) not in self.missions: if ('type' in update and update['type'] == 'delete'): logging.info('Just tried to delete a mission which we never had? {}'.format(update['id'])) continue if (update['user_id'] != 2091): logging.info('mission of id {} is now owned by 2091 - {}'.format(update['id'], update['user_id'])) continue self.missions[str(update['id'])] = Mission(update) elif 'type' in update and update['type'] == 'delete': del self.missions[str(update['id'])] self.mission_ids.remove(str(update['id'])) else: self.missions[str(update['id'])].update(update) self.update_data() # Run the process queue (not related to mission) available_units_at_start = self.get_available_units([]) # for key, mission in self.missions.items(): for mission_id in self.mission_ids: mission = self.missions[mission_id] if (not self.web.mission_active(mission.id)): self.queue.put({ 'type': 'mission_delete', 'data': {'id': mission.id, 'type': 'delete'} }) mission.clear_units() logging.info("Mission ended: {}, {}".format(mission.id, mission.name)) continue self.console.update(self.units, self.missions) available_units = self.get_available_units(units_already_sent, available_units_at_start) required_units = mission.get_required_units(self.web) # logging.info('Required units: {}'.format(required_units)) sending_ids = [] for type, quantity in required_units.items(): # Does available units check each time type_sending_ids = [] if (type not in reserved_units): type_sending_ids = self.get_closest_units(mission.latitude, mission.longitude, type, quantity, available_units) sending_ids = sending_ids + type_sending_ids units_already_sent = units_already_sent + type_sending_ids if (len(type_sending_ids) < quantity): if (type not in reserved_units): reserved_units[type] = 0 reserved_units[type] = reserved_units[type] + (quantity - len(type_sending_ids)) if (len(sending_ids) > 0): logging.info('Dispatching units to {} - {} : {}'.format(mission.id, mission.name, sending_ids)) self.web.dispatch(mission.id, sending_ids) time.sleep(1) if (len(reserved_units) > 0): logging.info("Required to complete stack: {}".format(reserved_units)) time.sleep(1) except Exception as instance: logging.exception(instance) time.sleep(5) self.get_login_cookies() self.web = Web(self.base_url, self.cookies) self.refresh_missions() self.update_data() logging.info('Needed to relogin')