Ejemplo n.º 1
0
def getTweetDetailsWithId(id):
    try:
        headers = {"Authorization": "Bearer {}".format(BEARER_TOKEN)}
        inputs = {
            'ids': [id],
            'tweet.fields': 'author_id,public_metrics,entities',
            'expansions': 'entities.mentions.username'
        }
        response = requests.get("https://api.twitter.com/2/tweets",
                                headers=headers,
                                params=inputs)
        if response.status_code != 200:
            logging.warning("Cannot get stream (HTTP {}): {}".format(
                response.status_code, response.text))
            return []
        elif response.status_code == 429:
            time.wait(60 * 15)
            return getTweetDetailsWithId(id)

        data_root = response.json()
        if 'data' not in data_root:
            return []
        data_list = data_root['data']
        return data_list

    except Exception as e:
        logging.warning(e)
        return []
Ejemplo n.º 2
0
    def search(self):
        urls = []
        for page in range(0, self.pages):
            url = UrlGenerator(self.query, self.num, (self.start + (10 * page)), self.recent, self.site).web_url
            urls.append(url)

        for url in urls:
            if self.sleep:
                wait(1)
            html = requests.get(url, headers=self.headers).text
            soup = BeautifulSoup(html, "html.parser")
            self.big_soup.body.append(soup.body)

        results = self.scrape_search_result(self.big_soup)
        related_queries = self.scrape_related_queries(self.big_soup)

        raw_total_results = self.big_soup.find("div", attrs={"class": "sd"}).string
        total_results = int(raw_total_results.replace("About ", "").replace(" results", "").replace(",", ""))

        data = collections.OrderedDict()
        data["source"] = "google"
        data["expected_num"] = self.num * self.pages
        data["received_num"] = len(results)
        data["first_page_url"] = urls[0]
        data["related_queries"] = related_queries
        data["total_results"] = total_results
        data["results"] = results

        return data
Ejemplo n.º 3
0
def test_ap_ht40_scan(dev, apdev):
    """HT40 co-ex scan"""
    params = { "ssid": "test-ht40",
               "channel": "5",
               "ht_capab": "[HT40-]"}
    hapd = hostapd.add_ap(apdev[0]['ifname'], params)

    state = hapd.get_status_field("state")
    if state != "HT_SCAN":
        time.wait(0.1)
        state = hapd.get_status_field("state")
        if state != "HT_SCAN":
            raise Exception("Unexpected interface state - expected HT_SCAN")

    ev = hapd.wait_event(["AP-ENABLED"], timeout=10)
    if not ev:
        raise Exception("AP setup timed out")

    state = hapd.get_status_field("state")
    if state != "ENABLED":
        raise Exception("Unexpected interface state - expected ENABLED")

    freq = hapd.get_status_field("freq")
    if freq != "2432":
        raise Exception("Unexpected frequency")
    pri = hapd.get_status_field("channel")
    if pri != "5":
        raise Exception("Unexpected primary channel")
    sec = hapd.get_status_field("secondary_channel")
    if sec != "-1":
        raise Exception("Unexpected secondary channel")

    dev[0].connect("test-ht40", key_mgmt="NONE", scan_freq=freq)
Ejemplo n.º 4
0
    def change_profile_description(self, description):
        profile = self.driver.find_element_by_xpath(
            '//*[@id="side"]/header/div[1]/div/img')
        profile.click()

        wait(0.6)

        edit = self.driver.find_element_by_xpath(
            '//*[@id="app"]/div/div/div[2]/div[1]/span/div/div/div/div[4]/div[2]/div[1]/span[2]/div'
        )
        edit.click()

        wait(0.6)

        text = self.driver.find_element_by_xpath(
            '//*[@id="app"]/div/div/div[2]/div[1]/span/div/div/div/div[4]/div[2]/div[1]/div/div[2]'
        )
        text.send_keys(Keys.CONTROL, 'a', Keys.BACKSPACE)
        text.send_keys(description)
        text.send_keys(Keys.ENTER)

        returning = self.driver.find_element_by_xpath(
            '//*[@id="app"]/div/div/div[2]/div[1]/span/div/div/header/div/div[1]/button/span'
        )
        returning.click()
Ejemplo n.º 5
0
def savannah2(character, weapon, items, money):
    print("CHAPTER 1, PART 2: TO POTTERE TOWN")
    wait(3)
    cont = input("Press enter to continue: ")
    del cont
    ui.restartPage()
    print_text_file("StoryTexts/SavannahTexts/savannah3.txt")
    wait(10)
    dialogue_options = [
        "Yeah, sure, I'll try it!",
        "I don't know man, it seems a little shady",
        "I'm good on stats now, maybe another time!"
    ]
    counter = 1
    while True:
        for option in dialogue_options:
            print(f"({counter}) {option}")
        chose = input("Enter the number of your dialogue choice: ")
        while chose.isDigit(
        ) == False or chose.isDigit == True and chose > 3 or chose.isDigit(
        ) == True and chose < 1:
            print("\nINVALID INPUT")
            chose = input("Enter the number of your dialogue choice: ")
        chose = dialogue_options[chose - 1]
        ui.restartPage()
        print(chose)
        if chose == "Yeah, sure, I'll try it!":
            pass
        elif chose == "I don't know man, it seems a little shady":
            pass
        elif chose == "I'm good on stats now, maybe another time!":
            pass
Ejemplo n.º 6
0
def game1():
    #---Variable Setup---
    global my_weapons
    global my_items
    global money
    money = 0
    starter_weapon = Shop.all_weapons[0]
    my_weapons = [starter_weapon]
    my_items = Shop.Items
    #---Start Screen---
    print("WELCOME TO MIGHTY MAGIC ADVENTURES!")
    wait(2)
    cont = input("Press enter to continue: ")
    del cont
    ui.restartPage()
    #---Character Setup---
    protagonist = Character.set_up_character()
    ui.restartPage()
    print(protagonist)
    wait(3)
    cont = input("Press enter to continue: ")
    del cont
    ui.restartPage()
    #---Tutorial/Introduction---
    intro(protagonist, my_weapons[0], my_items, money)
    #---Savannah Levels---
    savannah1(protagonist, my_weapons[0], my_items, money)
    savannah2(protagonist, my_weapons[0], my_items, money)
Ejemplo n.º 7
0
def pan(servo_chan, servo_pl_limits):
    for i in range(servo_pl_limits[0], servo_pl_limits[1]+1, 16):
        pwm.set_pwm(servo_chan, 0, i)
        time.wait(0.005)
    for i in range(servo_pl_limits[1], servo_pl_limits[0]-1, -16):
        pwm.set_pwm(servo_chan, 0, i)
        time.wait(0.005)
Ejemplo n.º 8
0
	def send_requests(self, request_urls):
		'''Send the HTTP request.'''
		responses = []
		for request_url in requests_urls:
			response = requests.get(request_url)
			responses.append(response)
			time.wait(self.rate_limit)
Ejemplo n.º 9
0
def getLinks(domains, file):
   if urlset:
      dyndns  = 0   #just a dynamicly resolving ;)
      domains = url
   else:
      ## use n'th from random domains
      dyndns = int(random.randint(0,4))
      print("Setting " + str(dyndns) + ". domain from list")
      print(domains[dyndns])

   ##Add pageflip to start randomly at bottom

   req  = requests.get(domains[dyndns], headers=headers)
   ## Dont Loop that stuff
   soup = BeautifulSoup(req.content, 'lxml')

#ONLINE ^
#--- From here experimental to use
#OFLINE v  .get might still connect

   ## using  >>  "a, href" or "td, text"
   ## target packagenames  in AUR Repo
   for lines in soup.find_all('a', href=True):

       waiting = (0.421*(random.randint(1,7)))
       print("Waiting "+str(waiting)+" sec")
       time.wait(waiting)


       file.write(lines.get('href')+"\n")
Ejemplo n.º 10
0
def loader():
    proxyBar = Bar("Loading Proxies", max=100, suffix="%(percent)d%%")
    for num in range(101):
        wait(0.005)
        proxyBar.next()
    proxyBar.finish()
    print("\n")
Ejemplo n.º 11
0
def set_motor(io_type, port, settings):
    try:
        if io_type == 'large motor':
            i = ev3.LargeMotor(port)
        elif io_type == 'medium motor':
            i = ev3.MediumMotor(port)
        power = int(settings['power'])
        if settings['motor_mode'] == 'run forever':
            i.run_forever(duty_cycle_sp=power)
            time.wait(
                1
            )  # this will cause server 500 errors when you call run forever because time.wait doesn't exist
            # BUT this must be here because it allows run forever to work as it sets off to find something it can't; otherwise the motor just twitches
            # I believe this creates a new thread which is why the EV3 can still process new input; it just keeps the motor running
        elif settings['motor_mode'] == 'run timed':
            time_picked = settings['time']
            i.run_timed(time_sp=time_picked, duty_cycle_sp=power
                        )  # might also need the time.wait fix; didn't test
        elif settings['motor_mode'] == 'stop':
            stop_type = settings['stop_type']
            i.stop()
        elif settings[
                'motor_mode'] == 'reset':  # should reset motor encoders, aka I believe changes the position to 0, stops motors
            i.reset()
        elif settings['motor_mode'] == 'switch':
            i.duty_cycle_sp = i.duty_cycle_sp * -1
        return "successful set"
    except ValueError:
        return "Not found"
Ejemplo n.º 12
0
    def search(self):
        urls = []
        for page in range(0, self.pages):
            url = UrlGenerator(self.query, self.num, (self.start + (10*page)), self.recent, self.site).web_url
            urls.append(url)

        for url in urls:
            if self.sleep:
                wait(1)
            html = requests.get(url, headers=self.headers).text
            soup = BeautifulSoup(html, 'html.parser')
            self.big_soup.body.append(soup.body)

        results = self.scrape_search_result(self.big_soup)
        related_queries = self.scrape_related_queries(self.big_soup)

        raw_total_results = self.big_soup.find('div', attrs={'class': 'sd'}).string
        total_results = int(raw_total_results.replace('About ', '').replace(' results', '').replace(',', ''))

        data = dict()
        data['source'] = 'google'
        data['expected_num'] = self.num * self.pages
        data['received_num'] = len(results)
        data['first_page_url'] = urls[0]
        data['related_queries'] = related_queries
        data['total_results'] = total_results
        data['results'] = results

        return data
Ejemplo n.º 13
0
def set_motor(io_type, port, settings):
    try:
        if io_type == 'large motor':
            i = ev3.LargeMotor(port)
        elif io_type == 'medium motor':
            i = ev3.MediumMotor(port)
        power = int(settings['power'])
        if settings['motor_mode'] == 'run forever':
            i.run_forever(duty_cycle_sp=power)
            time.wait(1)  # this will cause server 500 errors when you call run forever because time.wait doesn't exist
            # BUT this must be here because it allows run forever to work as it sets off to find something it can't; otherwise the motor just twitches
            # I believe this creates a new thread which is why the EV3 can still process new input; it just keeps the motor running
        elif settings['motor_mode'] == 'run timed':
            time_picked = settings['time']
            i.run_timed(time_sp=time_picked, duty_cycle_sp=power)  # might also need the time.wait fix; didn't test
        elif settings['motor_mode'] == 'stop':
            stop_type = settings['stop_type']
            i.stop()
        elif settings['motor_mode'] == 'reset':  # should reset motor encoders, aka I believe changes the position to 0, stops motors
            i.reset()
        elif settings['motor_mode'] == 'switch':
            i.duty_cycle_sp = i.duty_cycle_sp * -1
        return "successful set"
    except ValueError:
        return "Not found"
Ejemplo n.º 14
0
    def search_news(query, num=10, start=0,sleep=True, recent=None, country_code=None, proxies=None):
        if sleep:
            wait(1)
        url = generate_news_url(query, str(num), str(start), country_code, recent)
        soup = BeautifulSoup(requests.get(url,proxies).text, "html.parser")
        results = Google.scrape_news_result(soup)

        raw_total_results = soup.find('div', attrs = {'class' : 'sd'}).string
        total_results = 0
        for i in raw_total_results:
            try:
                temp = int(i)
                total_results = total_results * 10 + temp
            except:
                continue

        temp = {'results' : results,
                'url' : url,
                'num' : num,
                'start' : start,
                'search_engine' : 'google',
                'total_results' : total_results,
                'country_code': country_code,
        }
        return temp
Ejemplo n.º 15
0
 def __init__(self):
     self.month = ['JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC']
     self.mnth_dic = dict((v, k) for k, v in enumerate(self.month))
     self._ = webdriver.Chrome('C:\\tools\\chromedriver.exe')
     self._.get('https://www.makemytrip.com/')
     self._.maximize_window()
     wait(10)
Ejemplo n.º 16
0
	def runFileScriptInSubprocess(self,cmd,logfilepath):
		# Running another FileScript as a subprocess
		apDisplay.printMsg('running FileScript:')
		apDisplay.printMsg('------------------------------------------------')
		apDisplay.printMsg(cmd)
		# stderr=subprocess.PIPE only works with shell=True with python 2.4.
		# works on python 2.6.  Use shell=True now but shell=True does not
		# work with path changed by appionwrapper.  It behaves as if the wrapper
		# is not used
		proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
		stdout_value = proc.communicate()[0]
		while proc.returncode is None:
			time.wait(60)
			stdout_value = proc.communicate()[0]
		try:
			logdir = os.path.dirname(logfilepath)
			apParam.createDirectory(logdir)
			file = open(logfilepath,'w')
		except:
			apDisplay.printError('Log file can not be created, process did not run.')
		file.write(stdout_value)
		file.close()
		if proc.returncode > 0:
			pieces = cmd.split(' ')
			apDisplay.printWarning('FileScript %s had an error. Please check its log file: \n%s' % (pieces[0].upper(),logfilepath))
		else:
			apDisplay.printMsg('FileScript ran successfully')
		apDisplay.printMsg('------------------------------------------------')
		return proc.returncode
Ejemplo n.º 17
0
    def __init__(self, link_uri):
        """
        Initializes the control class and executes all needed functions.
        """

        # Connect Crazyflie
        self.Crazyflie = Crazyflie()
        self.Connected = False
        self.Connect(link_uri)

        while not self.Connected:
            wait(0.1)
            pass

        # Start Program
        self.t0 = 0#getTime()
        self.Running = True

        # Initialize
        self.SetInitialState()      
        self.InitializeReferenceCS()
        if Plotting:
            self.InitializePlotting()
        if GUI:
            self.InitializeGUI()
        if Animation:
            self.InitializeAnimation()

        # Run Main Loops
        Thread(target=self.MainLoop).start()
        if GUI:
            Thread(target=self.GUILoop).start()
        if Animation:
            Thread(target=self.AnimationLoop).start()
Ejemplo n.º 18
0
    def search(query, num=10, start=0, sleep=True, recent=None):
        results = []
        _start = start # Remembers the initial value of start for later use
        _url = None
        related_queries = None

        while len(results) < num:
            if sleep: # Prevents loading too many pages too soon
                wait(1)
            url = generate_url(query, str(start), recent)
            if _url is None:
                _url = url # Remembers the first url that is generated
            soup = BeautifulSoup(requests.get(url).text)
            new_results = Bing.scrape_search_result(soup)
            results += new_results
            start += len(new_results)

            if related_queries is None:
                related_queries = scrape_related(soup)

        results = results[:num]

        temp = {'results' : results,
                'url' : _url,
                'num' : num,
                'start' : _start,
                'search_engine' : 'bing',
                'related_queries' : related_queries,
        }
        return temp
def single_acquisition_example(name, n_events, trigger, trigger_channel):
    """ Acquire a set of triggerred single acquisitions for two channels."""
    tek_scope = scopes.Tektronix2000(scope_connections.VisaUSB())
    # First setup the scope, lock the front panel
    tek_scope.lock()
    tek_scope.set_active_channel(1)
    tek_scope.set_active_channel(2)
    tek_scope.set_single_acquisition() # Single signal acquisition mode
    tek_scope.set_edge_trigger(trigger, trigger_channel, True) # Falling edge trigger
    tek_scope.set_data_mode(49500, 50500)
    tek_scope.lock() # Re acquires the preamble
    # Now create a HDF5 file and save the meta information
    file_name = name + "_" + str(datetime.date.today())
    results = utils.HDF5File(file_name, 2)
    results.add_meta_data("trigger", trigger)
    results.add_meta_data("trigger_channel", trigger_channel)
    results.add_meta_data("ch1_timeform", tek_scope.get_timeform(1))
    results.add_meta_data("ch2_timeform", tek_scope.get_timeform(2))
    results.add_meta_dict(tek_scope.get_preamble(1), "ch1_")
    results.add_meta_dict(tek_scope.get_preamble(2), "ch2_")

    last_save_time = time.time()
    print "Starting data taking at time", time.strftime("%Y-%m-%d %H:%M:%S")
    for event in range(0, n_events):
        tek_scope.acquire()
        try:
            results.add_data(tek_scope.get_waveform(1), 1)
            results.add_data(tek_scope.get_waveform(2), 2)
        except Exception, e:
            print "Scope died, acquisition lost."
            print e
        except visa_exceptions.VisaIOError, e:
            print "Serious death"
            time.wait(1)
Ejemplo n.º 20
0
def store(sqlDb, alnScore):
    """store the results in sqlite"""
    # pdb.set_trace()
    if alnScore:
        c = sqlite3.connect(sqlDb)
        for aln in alnScore:
            row = alnScore[aln]
            row += (len(alnScore),)
            print row
            print "\n\n"
            try:
                c.execute(
                    "insert into blast (seq, match_cnt, e_value, perfect, types, positions, matches) values (?,?,?,?,?,?,?)",
                    row,
                )
            except:
                # rollback the pending transaction
                c.rollback()
                # wait for the dbase lock
                time.wait(0.2)
                c.execute(
                    "insert into blast (seq, match_cnt, e_value, perfect, types, positions) values (?,?,?,?,?,?)", row
                )
            c.commit()
        c.close()
    else:
        pass
Ejemplo n.º 21
0
    def search_news(query,
                    num=10,
                    start=0,
                    sleep=True,
                    recent=None,
                    country_code=None):
        results = []
        _start = start  # Remembers the initial value of start for later use
        _url = None
        while len(results) < num:
            if sleep:  # Prevents loading too many pages too soon
                wait(1)
            url = generate_news_url(query, str(start), recent, country_code)
            if _url is None:
                _url = url  # Remembers the first url that is generated
            soup = BeautifulSoup(requests.get(url).text, "html.parser")
            new_results = Bing.scrape_news_result(soup)
            results += new_results
            start += len(new_results)
        results = results[:num]

        temp = {
            'results': results,
            'url': _url,
            'num': num,
            'start': _start,
            'search_engine': 'bing',
            'country_code': country_code,
        }
        return temp
Ejemplo n.º 22
0
Archivo: SONA.py Proyecto: mjrand/SONA
    def save_s2p_data(self, file_name):
        print("Attempting to save data as " + str(file_name) + ".s2p")

        # Tells VNA to change current directory to "[USBDISK]:\".
        self.session.write('MMEM:CDIR "[USBDISK]:\"')
        time.sleep(1)

        # Tells VNA to save current traces to "file_name.s2p" on USBDISK.
        # "*OPC?" Tells VNA to send confirmation when finished.
        self.session.write('MMEM:STOR:SNP \"' + str(file_name) + '.s2p\";*OPC?')

        save_success = '0'
        save_loop_count = 0

        while save_success == '0':

            # VNA has 10 attempts to save data.
            if save_loop_count == 10:
                print("Saving data failed. Program will now exit. Check VNA.")
                sys.exit()

            try:
                # Attempts to read confirmation from VNA.
                # Throws error if confirmation has not been sent.
                save_success = self.session.read()
                print("Data successfully saved as: " + str(file_name) + ".s2p on USB!\n")
                time.wait(1)
                return

            except:
                print("Waiting on data to save... (" + str(10 - save_loop_count) + " attempts remaining)")

            save_loop_count += 1
            time.sleep(5)
Ejemplo n.º 23
0
 def turn(self, speed, seconds=None):
     self.send_cmd(left_flag, -speed)
     self.send_cmd(right_flag, speed)
     if seconds != None:
         wait(seconds)
         self.halt()
     return
Ejemplo n.º 24
0
    def search(query, num=10, start=0, sleep=True, recent=None, country_code=None):
        if sleep:
            wait(1)
        url = generate_url(query, str(num), str(start), recent, country_code)
        soup = BeautifulSoup(requests.get(url).text, "html.parser")
        results = Google.scrape_search_result(soup)
        related_queries = Google.scrape_related(soup)

        raw_total_results = soup.find('div', attrs = {'class' : 'sd'}).string
        total_results = 0
        if raw_total_results is not None:
            for i in raw_total_results:
                try:
                    temp = int(i)
                    total_results = total_results * 10 + temp
                except:
                    continue

        temp = {'results' : results,
                'url' : url,
                'expected_num' : num,
                'received_num' : len(results),
                'start' : start,
                'search_engine': 'google',
                'related_queries' : related_queries,
                'total_results' : total_results,
                'country_code': country_code,
        }
        return temp
def searchAndRepostBot():
    r = praw.Reddit(user_agent = AGENT)
    print("Logging in to Reddit...")
    try:
        
        r.login(USERNAME, PASSWORD)
    except:
        print("LOGIN FAILED")
        sys.exit()

    for SOURCE in SOURCES:
        subreddit = r.get_subreddit(SOURCE)
        repository = r.get_subreddit(REPOSITORY)
        print("Visiting Subreddit...(" + SOURCE + ")")
    
        submissions = subreddit.get_hot(limit=25)
        repositorySubmissions = subreddit.get_hot(limit=25)
    
        print("Parsing posts...")
    
        for submission in submissions:
            try:
                sbody = submission.selftext.lower()
                stitle = submission.title.lower()
    
                if any(key.lower() in sbody for key in KEYWORDS or key.lower() in stitle for key in KEYWORDS):
                    print("Result found: ")
                    print(submission.url)
                    print("Posting...")
                    r.submit(repository, "[X-Post " + SOURCE + "] " + submission.title, submission.url)
                    time.wait(2)
            except AttributeError:
                    pass
    print("DONE")
Ejemplo n.º 26
0
    def article(self, pageid=None, title=None):
        """ 
            Returns a specific article from Wikipedia, 
            given its pageid or its title.
            Downloads it if necessary
        """
        if pageid is None and title is None:
            raise Exception('Pageid and title can\'t be None at the same time')

        if pageid is None:
            d = self.db.articles.find_one({'title': title})

            if d is not None:
                return d # found it
        else:
            d = self.db.articles.find_one({'_id': pageid})

            if d is not None:
                return d # found it
            
        try:
            if not(pageid is None):
                page = wikipedia.page(pageid=pageid)
            else:
                page = wikipedia.page(title=title)

        except (
            wikipedia.exceptions.DisambiguationError,
            wikipedia.exceptions.PageError,
            wikipedia.exceptions.WikipediaException,
            requests.exceptions.RequestException,
            ValueError # error decoding JSON response
        ):
            return

        try:
            time.sleep(0.5)
        except:
            time.wait(0.5)

        # Even if we didn't find pageid or title, it still could be in the DB
        # since the title could have changed
        try:
            d = {
                '_id': int(page.pageid),
                'title': page.title,
                'content': page.content
            }
        except KeyboardInterrupt: # filter KeyboardInterrupt from here
            raise
        except Exception:
            return # can't add this entry

        self.db.articles.update_one(
            {'_id': d['_id']},
            {'$set': d},
            upsert=True
        )

        return d
Ejemplo n.º 27
0
    def search(query, num=10, start=0, sleep=True, recent=None,domain=".com"):
        if sleep:
            wait(1)
        url = generate_url(query, str(num), str(start), recent, domain)
        soup = BeautifulSoup(requests.get(url).text, "html.parser")
        results = Google.scrape_search_result(soup)
        related_queries = Google.scrape_related(soup)

        raw_total_results = soup.find('div', attrs = {'class' : 'sd'}).string
        total_results = 0
        if raw_total_results is not None:
            for i in raw_total_results:
                try:
                    temp = int(i)
                    total_results = total_results * 10 + temp
                except:
                    continue

        temp = {'results' : results,
                'url' : url,
                'expected_num' : num,
                'received_num' : len(results),
                'start' : start,
                'search_engine': 'google' + domain,
                'related_queries' : related_queries,
                'total_results' : total_results,
        }
        return temp
Ejemplo n.º 28
0
    def search(query, num=10, start=0, sleep=True, recent=None):
        if sleep:
            wait(1)
        url = generate_url(query, str(num), str(start), recent)
        soup = BeautifulSoup(requests.get(url).text)
        results = Google.scrape_search_result(soup)
        related_queries = Google.scrape_related(soup)

        raw_total_results = soup.find('div', attrs = {'class' : 'sd'}).string
        total_results = 0
        for i in raw_total_results:
            try:
                temp = int(i)
                total_results = total_results * 10 + temp
            except:
                continue

        temp = {'results' : results,
                'url' : url,
                'num' : num,
                'start' : start,
                'search_engine': 'google',
                'related_queries' : related_queries,
                'total_results' : total_results,
        }
        return temp
Ejemplo n.º 29
0
    def start(self, previous=None):
        """Will create the Kubernetes pod and hence start the action.

        Parameters
        ----------
        previous: dict
            Data from previous Action.

        Returns
        -------
        dict
            Data from previous Action appended with data from this Action.
        """
        target_dir = previous['rundir']
        if self.input_file_names is None:
            self.input_file_names = [previous['encoder_filename']]
        if self.output_file_name is None:
            self.output_file_name = previous['decoder_filename']
        file_names = [(os.path.join(target_dir, input_file_name), str(uuid.uuid4()))
                      for input_file_name in self.input_file_names]
        self.config_names = file_names
        dep = copy.deepcopy(self.body)
        dep['metadata']['name'] = str(uuid.uuid4())
        self.create_config_maps(self.config_names)
        self.create_volumes(self.config_names, dep)
        self.core_v1.create_namespaced_pod(body=dep, namespace="default")
        self._started = True
        self.result = previous
        while not self.finished():
            time.wait(5)
        self.finalise()
        return previous
Ejemplo n.º 30
0
def initLoop():
    #TODO IRC

    while doLoop == True:
        for plugin in pluginList:
            getattr(pluginList[plugin], "tick")();
    time.wait(1/30);
def cozmo_program(robot: cozmo.robot.Robot):
    time.sleep(2)

    robot.play_anim_trigger(cozmo.anim.Triggers.CodeLabStaring).wait_for_completed() #angry stare
    time.wait(5)
    robot.play_anim_trigger(cozmo.anim.Triggers.CodeLabStaring).wait_for_completed()
    time.wait(5)
Ejemplo n.º 32
0
 def flashPattern(self, pattern):
     for flash in pattern:
         print flash
         LED = self.LED[flash]
         LED.turnOn()
         wait(0.3)
         LED.turnOff()
         wait(0.2)
Ejemplo n.º 33
0
def run(self):
    while True:
        if self.num != self.end:
            self.num += 1
            self.q.put(self.num)
            time.wait(5)
        else:
            break
Ejemplo n.º 34
0
def vertir():
    try:
        GPIO.output(6, GPIO.LOW)  #time.sleep(3600)-------------- numero real
        time.wait(350)
        GPIO.output(6, GPIO.HIGH)  #time.sleep(3600)-------------- numero real
        tkMessageBox.showinfo("Mensaje", "Vertido con exito")
    except KeyboardInterrupt:
        print "quit"
Ejemplo n.º 35
0
 def run(self):
     while True:
         if self.num != end:
             self.num += 1
             print "Outputting: ", str(self.num)
             time.wait(5)
         else:
             break
Ejemplo n.º 36
0
 def Kill(self):
     print ">>>Killing Drone<<<"
     self.Crazyflie.commander.send_setpoint(0, 0, 0, 0)
     # Make sure that the last packet leaves before the link is closed
     # since the message queue is not flushed before closing
     self.Running = False
     wait(0.1)
     self.Crazyflie.close_link()
Ejemplo n.º 37
0
    def start_app(self):
        # do something when the service starts
        print "Starting app..."
        # @TODO: insert whatever the app should do to start
        while self.running:
            self.readText()
            time.wait(2)

        self.logger.info("Started!")
Ejemplo n.º 38
0
 def scroll_top(self, high):
     """
     向下滚动浏览器滚动条
     :param high: 滚动条距离顶部的距离
     """
     js = 'document.body.scrollTop=' + str(high)
     self.driver.execute_script(js)
     wait(2)
     log.info('向下滚动' + str(high) + '距离的浏览器滚动条')
Ejemplo n.º 39
0
def pan_GPIO(servo, servo_pwm, servo_dc_limits):
    GPIO_set(servo_pwm, 0)
    for i in range(servo_dc_limits[0], servo_dc_limits[1] + 1, 1):
        servo.ChangeDutyCycle(i)
        time.wait(0.01)
    for i in range(servo_dc_limits[1], servo_dc_limits[0] - 1, -1):
        servo.ChangeDutyCycle(i)
        time.wait(0.01)
    GPIO_clear(servo)
Ejemplo n.º 40
0
 def scroll_page(self):
     """
     浏览器上下滚动条向下滚动一页
     :return:
     """
     js = 'return(document.body.scrollTop=document.body.scrollTop+document.documentElement.clientHeight-5);'
     self.driver.execute_script(js)
     wait(2)
     log.info('向下翻页')
Ejemplo n.º 41
0
def Speak(Text):
    tts = gTTS(text=Text, lang='en')
    tts.save("/Sounds/Voice.mp3")
    pygame.mixer.init()
    pygame.mixer.music.load("/Sounds/Voice.mp3")
    pygame.mixer.music.play()
    while pygame.mixer.music.get_busy() == True:
        continue
    wait(0.1)
Ejemplo n.º 42
0
	def on_error(self, status):
		if status_code == 420:
			self.retryCount += 1
			if (self.retryCount > self.retryMax):
				return False
			else:
				time.wait(self.retryTime)
				self.retryTime *= 2
				return True
Ejemplo n.º 43
0
def get_market_history(priceMarket, ticks):
    saved = 0
    interval = 1
    nohistory = 0
    count = 0
    historystat = haasomeClient.marketDataApi.get_history_from_market(
        priceMarket, interval, ticks)
    if historystat.errorCode.value == 100:
        print(historystat.errorCode.name, historystat.errorMessage,
              priceMarket.primaryCurrency, priceMarket.secondaryCurrency)
        print(len(historystat.result))
        if len(historystat.result) > 0:
            marketdata = historystat.result
            filename = str(EnumPriceSource(
                priceMarket.priceSource).name) + '_' + str(
                    priceMarket.primaryCurrency) + '_' + str(
                        priceMarket.secondaryCurrency) + '_' + str(
                            len(marketdata)) + '_' + str(interval) + '.csv'
            currentfile = Path(str(filename))
            currentfile.touch()
            print(filename, 'has been saved!')
            with open(filename, 'w', newline='') as csvfile:
                fieldnames = [
                    'timeStamp', 'unixTimeStamp', 'open', 'highValue',
                    'lowValue', 'close', 'volume', 'currentBuyValue',
                    'currentSellValue'
                ]
                csvwriter = csv.DictWriter(csvfile, fieldnames=fieldnames)
                csvwriter.writeheader()
                for tick in marketdata:
                    csvwriter.writerow({
                        'timeStamp':
                        str(tick.timeStamp),
                        'unixTimeStamp':
                        str(tick.unixTimeStamp),
                        'open':
                        float(tick.open),
                        'highValue':
                        float(tick.highValue),
                        'lowValue':
                        float(tick.lowValue),
                        'close':
                        float(tick.close),
                        'volume':
                        float(tick.volume),
                        'currentBuyValue':
                        str(tick.currentBuyValue),
                        'currentSellValue':
                        float(tick.currentSellValue)
                    })
                    saved += 1
                    print(saved, 'ticks has been saved')
    else:
        print(historystat.errorCode)
        historystat = haasomeClient.marketDataApi.get_history_from_market(
            priceMarket, interval, ticks)
        time.wait()
Ejemplo n.º 44
0
def urlscan_data():
    search_url = "example.com"
    wait(5)
    info("Requested url - {}".format(search_url))
    uid = submit(search_url)
    info("Submited search with id {}".format(uid), pre=True)
    r = results(uid, wait_time=120)
    newest = search_newest(search_url)
    yield search_url, uid, r, newest
Ejemplo n.º 45
0
        def back(page=2):

            if page > 0:
                wait(0.5)
                click_previous()
                page -=1
                test_page_and_buttons(page)

            return page
Ejemplo n.º 46
0
        def forward(page=0):

            if page < 2:
                wait(0.5)
                click_next()
                page +=1
                test_page_and_buttons(page)

            return page
Ejemplo n.º 47
0
    def article(self, pageid=None, title=None):
        """ 
            Returns a specific article from Wikipedia, 
            given its pageid or its title.
            Downloads it if necessary
        """
        if pageid is None and title is None:
            raise Exception('Pageid and title can\'t be None at the same time')

        if pageid is None:
            d = self.db.articles.find_one({'title': title})

            if d is not None:
                return d  # found it
        else:
            d = self.db.articles.find_one({'_id': pageid})

            if d is not None:
                return d  # found it

        try:
            if not (pageid is None):
                page = wikipedia.page(pageid=pageid)
            else:
                page = wikipedia.page(title=title)

        except (
                wikipedia.exceptions.DisambiguationError,
                wikipedia.exceptions.PageError,
                wikipedia.exceptions.WikipediaException,
                requests.exceptions.RequestException,
                ValueError  # error decoding JSON response
        ):
            return

        try:
            time.sleep(0.5)
        except:
            time.wait(0.5)

        # Even if we didn't find pageid or title, it still could be in the DB
        # since the title could have changed
        try:
            d = {
                '_id': int(page.pageid),
                'title': page.title,
                'content': page.content
            }
        except KeyboardInterrupt:  # filter KeyboardInterrupt from here
            raise
        except Exception:
            return  # can't add this entry

        self.db.articles.update_one({'_id': d['_id']}, {'$set': d},
                                    upsert=True)

        return d
Ejemplo n.º 48
0
 def on_error(self, status):
     if status_code == 420:
         self.retryCount += 1
         if (self.retryCount > self.retryMax):
             return False
         else:
             time.wait(self.retryTime)
             self.retryTime *= 2
             return True
Ejemplo n.º 49
0
 def wireCond(self):
     
     self.headers = ['Gate(V)', 'x-Value', 'y-Value',  'x-Value-2', 'y-Value-2', 'Temperature (K)', 'Conductance(2e^2/h)']
     self.emit(SIGNAL("list(PyQt_PyObject)"), self.headers)
     
     stri = self.list2tabdel(self.headers)
     self.data_file.write(stri)
     
     stepTime = 0.5
     max_gate = -2
     stepsize = 0.005
     windowlower = -1.5
     windowupper = -2.0
     windowstep = 0.005
     gateVoltage = 0.0
     
     while gateVoltage > max_gate:
         if self.stop == True:
             break
         
         self.gate.set_voltage(gateVoltage)
         self.readCondData(gateVoltage)
     
         if (gateVoltage <= windowlower and gateVoltage >= windowupper):
             gateVoltage = gateVoltage - windowstep
         else:
             gateVoltage = gateVoltage - stepsize  
             
         time.sleep(stepTime)
     
     while gateVoltage < 0:
         if self.stop == True:
             break
         
         self.gate.set_voltage(gateVoltage)
         self.readCondData(gateVoltage)
     
         if (gateVoltage <= windowlower and gateVoltage >= windowupper):
             gateVoltage = gateVoltage + windowstep
         else:
             gateVoltage = gateVoltage + stepsize  
             
         time.sleep(stepTime)
         
     # Loop to slowly reduce gate
     
     if self.stop == True:        
         while gateVoltage < 0:
             gateVoltage += 0.001
             self.gate.setvoltage(gateVoltage)
             # 0.1 delay corresponds to 1:40 per volt (assuming 0.001 step)
             time.wait(0.2)
             
     self.gate.set_voltage(0)
     
     self.data_file.close()
Ejemplo n.º 50
0
 def alarm(self, time_point, identifier):
     trigger = True
     while trigger:
         if datetime.now() > time_point:
             print('ALARM: %s || %s' % (identifier, datetime.now()))
             trigger = False
             return identifier
         else:
             print('%s ||| %s' % (identifier, datetime.now()))
         wait(30)
Ejemplo n.º 51
0
	def runSubProcess(self,cmd):
		print cmd	
		proc = subprocess.Popen(cmd, shell=True, stdout=None)
		stdout_value = proc.communicate()[0]
		while proc.returncode is None:
			time.wait(60)
			stdout_value = proc.communicate()[0]
			print stdout_value
		if proc.returncode != 0:
			print "EXIT WITH ERROR"
			sys.exit(1)
Ejemplo n.º 52
0
 def _close_device_comms(self):
     """
     Close ethernet device comms and log with status file.
     """
     if self.device_sock:
         # -self.device_sock.shutdown(socket.SHUT_RDWR)
         self.device_sock.close()
         self.device_sock = None
         time.wait(1)
         self.statusfile.write("_close_device_comms: device connection closed.\n")
         self.statusfile.flush()
Ejemplo n.º 53
0
 def drawTree(self):
     #this will create a screen and draw a tree
     pygame.init()
     
     screen = pygame.display.set_mode((1050, 650))
     #screen.fill((255,255,255))
     
     self.drawSelf(screen, 1)
     pygame.display.update()
     pygame.image.save(screen, "BTNShot.png")
     time.wait(5000)
     pygame.display.quit()
Ejemplo n.º 54
0
    def unhighlight(self):
        """ Creates thread to wait 0.2 seconds before removing any highlights from the text """

        # Hold highlight
        
        wait(0.2)

        # Remove labels

        self.text.tag_delete("code")

        return
def getval(parent_conn, SIs, nodes):
	try:
		while True:
			toprint = parent_conn.recv()
			if toprint==None:
				time.wait(2)
			elif toprint.get('msg')=='END_OF_Q':
				print('Reached end of queue')
				time.sleep(5)
			else:
				#SIs = {'components':[], 'active':{}, 'standby':{}, 'cpu_usage':0}
				if toprint.get('component_info') != None:
					if toprint.get('from') in nodes:
						nodes.remove(toprint.get('from'))
					component_info = toprint.get('component_info')
					for component in component_info:
						SI = re.findall(r'(?<=safSi=)(.+)(?=,)', component_info[component]['CSI'])[0]
						if SI not in SIs:
							SIs[SI] = {'active': [], 'standby': [], 'zombie':[], 'cpu_usage':0.0}
						else:
							if component_info[component]['HAState']=='Active' and component not in SIs[SI]['active']:
								SIs[SI]['active'].append(component)
							if component_info[component]['HAState']=='Standby' and component not in SIs[SI]['standby']:
								SIs[SI]['standby'].append(component)
							elif component not in SIs[SI]['active'] and component not in SIs[SI]['standby']:
								SIs[SI]['zombie'].append(component)
							if component_info.get(component).get('cpu_usage')==None:
								if component in SIs[SI]['active']:
									del SIs[SI]['active'][component]
								elif component in SIs[SI]['standby']:
									del SIs[SI]['standby'][component]
									
							SIs[SI]['cpu_usage'] += float(component_info[component]['cpu_usage'])
					if len(nodes)==0:
						for SI in SIs:
							print('SI name : ' + SI)
							print('Total CPU usage : ' + str(SIs[SI]['cpu_usage']))
							print('Active components : ')
							print('\t'+str(SIs[SI]['active']))
							print('Standby components : ') 
							print('\t'+str(SIs[SI]['standby']))
							'''
							print('Zombie components : ')
							print('\t'+str(SIs[SI]['zombie']))
							'''
						print('\n\n\n\n\n\n\n\n\n\n\n\n')
						SIs = {}
						nodes = ['node1','node2']
	
	except KeyboardInterrupt:
		print("\n'KeyboardInterrupt' received. Stopping server-reader:%r" %(multiprocessing.current_process().name))
	except:
		raise
def test_dt():

	allowable_error = 0.01

	test_times = [ 1,0.5,0.33,2 ]

	tclock = Clock()

	for time_wait in test_times:
		tclock.update()
		time.wait(time_wait)
		tclock.update()
		assert math.fabs(tclock.dt - time_wait) < allowable_error, "clock DT test failed at %r seconds" % time_wait
Ejemplo n.º 57
0
 def _run(self):
     while True:
         for i in range(2):
             t = ThreadGetHttpSqs()
             #t.setDaemon(True)
             t.start()
         
         for i in range(2):
             b = ThreadInsertDB()
             #t.setDaemon(True)
             b.start()
         
         time.wait()
Ejemplo n.º 58
0
 def read(self):
     chunk = self._take_a_bite()
     feed_buffer = chunk
     while chunk['entries']:
         if self.fetch_limit != 0 and self.cursor_at >= self.fetch_limit:
             break
         feed_buffer['entries'].extend(chunk['entries'])
         logging.info("Waiting for %d seconds..." % self.WAIT_BETWEEN_BITES)
         wait(self.WAIT_BETWEEN_BITES)
         logging.info("Fetching data...")
         chunk = self._take_a_bite()
         logging.info("%s entries collected." % self.cursor_at)
     return dumps(feed_buffer)