def test_expired_confirmation_token(self): u = User(password="******") db.session.add(u) db.session.commit() token = u.generate_confirmation_token(1) time.sleep(2) self.assertFalse(u.confirm(token))
def start_automatic(self): start_time_ref = time.time() while self.mode == AUTO: try: # start_time = time.time() # wait_time = 1.0 # while wait_time > 0: # wait_time = CONST_TIME + start_time - time.time() # time.sleep(0.1) measured_value = int(self.sensor_reader.handle_reading(self.sensor_reader.readline())[1]) pid_value = self.pid.update(measured_value) pwm_value = LEDModel.get_pwm(pid_value) output = "{0}, {1}, {2}, {3}".format(time.time() - start_time_ref, measured_value, pid_value, pwm_value) print output self.led.update_all(pwm_value) try: self.output_file.write(output + "\n") # self.database.insert_into_control_table(measured_value, pid_value) except: # does not matter if saving exists pass except Exception as e: print e # exception probably caused by reading, sleep to give some time to update time.sleep(0.1) pass
def main(): # Domain Update URL update_dns_url = domain_update_url + domain_update_php + "?" + domain_update_key # Place hold for IP Address old_ip = "" while True: try: # print 'Checking current IP...' get_ip = urllib.urlopen(check_ip_host).read() my_ip = re.findall(r"\d{1,3}\.\d{1,3}\.\d{1,3}.\d{1,3}", get_ip) # print 'Current IP Address is:', str(my_ip[0]) if str(my_ip[0]) != old_ip: # print 'IP changed to...', str(my_ip[0]), ': Processing update.' update_dns = urllib.urlopen(update_dns_url).read() print update_dns print "IP Updated @", str(datetime.now()), ":", str(my_ip[0]) except: # Catch exception for whatever reason print "IPUpdater ERROR!" # Update place hold IP Address and Sleep # print 'Sleeping for', sleep_time, 'mins' old_ip = str(my_ip[0]) time.sleep(sleep_time * 60)
def get_image_code(self,image_base64_str): self.logger.mccWriteLog("开始识别验证码") try: #尝试三次进行识别,如果三次识别都失败则返回"" chaojiying = Chaojiying_Client(self.accounts.get("username"),md5(self.accounts.get("password")).hexdigest(),self.accounts.get("soft_id")) times = self.cfReader.readConfig("SYS","times") result = "" if image_base64_str is not None and len(image_base64_str)>0: start_time = datetime.now() #替换base64多余的字符 replace_str = image_base64_str.replace(self.cfReader.readConfig("IMAGEREG","reg"), "") self.logger.mccWriteLog("验证码替换字符串规则{}".format(self.cfReader.readConfig("IMAGEREG","reg"))) self.logger.mccWriteLog("验证码识别前为{}".format(replace_str)) content = base64.b64decode(replace_str) for i in range(int(times)): self.logger.mccWriteLog("开始识别验证码{}次".format(str(i+1))) result_dict = chaojiying.PostPic(content, self.cfReader.readConfig("IMAGETYPE","code")) self.logger.mccWriteLog("识别验证码结果{}".format(str(result_dict))) tag = result_dict.get("err_str") == u'OK' if tag: result = result_dict.get('pic_str') end_time= datetime.now() diff = (end_time - start_time).seconds self.logger.mccWriteLog("{}次识别验证码成功:{}\n耗时{}秒.".format(str(i),str(result),str(diff))) break else: self.logger.mccError("{}次识别验证码失败:{}".format(str(i+1), str(result))) time.sleep(2) return result except Exception,e: self.logger.mccError("验证码识别错误:"+str(e))
def request_address_info(address): res_address_info = {'formatted_address': None, 'location': {'lat': None, 'lng': None}} GEOCODE_BASE_URL = 'https://maps.googleapis.com/maps/api/geocode/json' RESTRICTION = '&components=administrative_area:CA|country:US' KEY = 'AIzaSyAv11A9OptZ5TX-Pqr3egpbddHQzQ_yULU' encode_string = "address=" + re.sub(r'\W', '+', address) url = GEOCODE_BASE_URL + '?' + encode_string + RESTRICTION + '&key=' + KEY result = simplejson.load(urllib2.urlopen(url)) attempt = 0 while result['status'] != 'OK' and attempt < 3: result = simplejson.load(urllib2.urlopen(url)) attempt += 1 if result['status'] == 'OVER_QUERY_LIMIT': time.sleep(1) continue if result['status'] == 'OK': res_address_info['formatted_address'] = result['results'][0]['formatted_address'] res_address_info['location']['lat'] = format(result['results'][0]['geometry']['location']['lat']) res_address_info['location']['lng'] = format(result['results'][0]['geometry']['location']['lng']) return res_address_info else: print_error("unexpected formatted address %s" % result) return res_address_info
def loop(cls, gtfs_source="gtfs.zip", debug=False, peroid=30, gtfs_check_peroid=1800, target="gtfs-rt.pb", for_humans=False): self = cls(gtfs_source, debug) rt_update = datetime.min gtfs_update = datetime.today() try: while True: rt_update = datetime.today() if (datetime.today() - gtfs_update).total_seconds() > gtfs_check_peroid: print("\033[1A\033[K" + "Checking if new GTFS is available") if self.gtfs.new_gtfs_available(): print("\033[1A\033[K" + "Attepmpting to load GTFS") self.gtfs.get_gtfs() self.gtfs.load_gtfs() print("\033[1A\033[K" + "Creating GTFS-Realtime") self.create(target, for_humans) # Sleep by `peroid` seconds minus what it took to create GTFS-RT file sleep_time = peroid - (datetime.today() - rt_update).total_seconds() if sleep_time < 0: sleep_time = 15 print("\033[1A\033[K" + "Sleeping until " + (datetime.today() + timedelta(seconds=sleep_time)).strftime("%H:%M:%S"), end="\n\n" ) time.sleep(sleep_time) finally: self.gtfs.gtfs.close() self.gtfs.arch.close()
def get_metric(metric, instance_id, start, stop, region='us-west-2', backoff=30): """ returns metric object associated with a paricular instance ID metric_name: str Name of Metric to be Collected instance_id: str Instance ID start: float ISO format of UTC time start point stop: float ISO format of UTC time stop point region: str AWS region :return: metric object """ metric_object = None # session = Session(region_name=region) cw = boto.ec2.cloudwatch.connect_to_region(region) namespace, metric_name = metric.rsplit('/', 1) logging.info('Start: {}\tStop: {}'.format(start, stop)) try: metric_object = cw.get_metric_statistics(namespace=namespace, metric_name=metric_name, dimensions={'InstanceId': instance_id}, start_time=start, end_time=stop, period=300, statistics=['Average']) except BotoServerError: logging.info('Failed to get metric due to BotoServerError, retrying in {} seconds'.format(backoff)) time.sleep(backoff) get_metric(metric, instance_id, start, stop, backoff=backoff+10) if metric_object is None: raise return metric_object
def main(): # Domain Update URL update_dns_url = domain_update_url + domain_update_php + "?" + domain_update_key # Place hold for IP Address old_ip = '' while True: try: #print('Checking current IP...') get_ip = requests.get(check_ip_host) #print(get_ip.text) my_ip = re.findall(r"\d{1,3}\.\d{1,3}\.\d{1,3}.\d{1,3}", get_ip.text) #print('Current IP Address is:', str(my_ip[0])) if str(my_ip[0]) != old_ip: #print('IP changed to...', str(my_ip[0]), ': Processing update.') update_dns = requests.get(update_dns_url) print(update_dns.text) print('IP Updated @', str(datetime.now()), ':', str(my_ip[0])) except: # Catch exception for whatever reason print('IPUpdater ERROR!') # Update place hold IP Address and Sleep #print 'Sleeping for', sleep_time, 'mins' old_ip = str(my_ip[0]) time.sleep(sleep_time*60)
def getLinkedInInfo(name, url): driver = webdriver.Firefox(capabilities=firefox_capabilities) if DEBUG: print driver driver.get(url) time.sleep(2) title = driver.title orgs_worklife = driver.find_elements_by_class_name("item-subtitle") dateranges = driver.find_elements_by_class_name("date-range") orgsAndCompanies = [] for i in range(len(dateranges)): try: daterange = dateranges[i] one_daterange = daterange.text.encode('ascii', 'ignore').decode('ascii') # print i one_org = orgs_worklife[i].text.encode('ascii', 'ignore').decode('ascii') print one_org orgsAndCompanies.append((one_org, one_daterange)) except IndexError as e: break driver.quit() time.sleep(2) f = open(PATH+'{}_linkedin.json'.format(name.encode("utf-8")), 'w') json.dump(orgsAndCompanies, f) f.close() return name + str(orgsAndCompanies)
def _call_api(self, url, **data): """ Calls the weewar API with authentication (if specified) """ headers = { 'Content-Type': 'application/xml', 'Accept': 'application/xml', 'User-Agent': 'python-weewar/%s' % __version__, } if data: req = requests.post( self.HOST + url, data, auth=(self.username, self.key), headers=headers) else: req = requests.get(self.HOST + url, headers=headers) # Be nice and wait for some time # before submitting the next request wait = datetime.now() - self.last_call if wait < self.throttle: time.sleep(wait) # Wait for it! self.last_call = datetime.now() if req.status_code == 401: raise AuthenticationError elif req.status_code == 404: raise NotFound elif req.status_code == 500: raise ServerError content = req.content parsed = objectify.fromstring(content) return parsed
def test_delta(self): import time before = time.time() time.sleep(1) after = time.time() print "////" + "%.2f seconds" % (after - before) print sys.path
def sendHeartbeat(self): while True: time.sleep(2) #sends heartbeat every 2 secs #print 'sending heartbeat' host,port = self.monitors[getMonitor()].partition(":")[::2] mon = Monitor(host,int(port),'False') mon.senddata('Heartbeat')
def block_until_complete(self, delay=15): assert isinstance( delay, int ) count = 0 while self.is_running(): total_wait = delay * count log.info("Waited %is for %s #%s to complete" % ( total_wait, self.job.id(), self.id() ) ) time.sleep( delay ) count += 1
def click_logout_button(self): '''logout''' header_div = self.browser.find_element(*self.HEADER_DIV_LOC) login_p = header_div.find_element(*self.LOGIN_P_LOC) login_p.click() time.sleep(1) logout_link = header_div.find_element(*self.LOGOUT_LINK_LOC) logout_link.click()
def test_ping(self): u = User(password="******") db.session.add(u) db.session.commit() time.sleep(2) last_seen_before = u.last_seen u.ping() self.assertTrue(u.last_seen > last_seen_before)
def track_weight(browser, weight): weightPage = MainPage(browser) weightPage.click_me_link() time.sleep(2) update_weight_result = weightPage.update_weight(weight) if update_weight_result: return 1 return 0
def CallWrapper_Exotel(id, role, type, demo_phone=None): timezone = 'Asia/Kolkata' tz = pytz.timezone(timezone) today = utcnow_aware().replace(tzinfo=tz) try: if role == 'ANM': anm = CareProvider.objects.get(id=int(id)) phone = anm.phone benefs = Beneficiary.objects.filter(careprovider=anm) count, string = services_processor(benefs=benefs, type=type, mode=1) else: asha = CareGiver.objects.get(id=int(id)) phone = asha.phone benefs = Beneficiary.objects.filter(caregiver=asha) count, string = services_processor(benefs=benefs, type=type, mode=1) except: print 'd' return 'ANM/ASHA does not exist' subcenter = benefs[0].subcenter if benefs else None string = unicode(string) if len(string) > 20: string = string[0:20] + u"आदि " sms_text = u"आपके क्षेत्र में शेष सर्विसेज- \n"+string+u"प्रेषक,\n मुख्य चिकित्साधिकारी, झाँसी" if demo_phone: phone = demo_phone #Send SMS sms_text_hexlified = toHex(sms_text) #print sms_text_hexlified try: SendSMSUnicode(recNum=phone, msgtxt=sms_text_hexlified, senderId='CMOJHS') except: pass #Send Exotel Call here custom_field = str(id)+"_"+role+"_"+type callback_url = "http://niramayh.com/subcenter/exotel/update/" import time time.sleep(.33) response = connect_customer_to_app(customer_no=phone, callerid="01130017630", CustomField=custom_field, callback_url=callback_url) result = response.text j_result = json.loads(result) try: call = j_result.get('Call') sid = call.get('Sid') if call else None status = call.get('Status') if call else None if not sid: return None except: return None #update call status here try: marker = ExotelCallStatus.objects.get(sid=sid) except ObjectDoesNotExist: marker = ExotelCallStatus.objects.create(sid=sid, status=status, uid = int(id),\ role=role, mode=type, subcenter = subcenter, date_initiated = today.date(), dt_updated = utcnow_aware()) return marker
def follow(thefile): thefile.seek(0,2) # Go to the end of the file while True: line = thefile.readline().decode('utf-8') if not line: time.sleep(0.1) # Sleep briefly if universe.killed: break continue yield line
def garage_main(): syslog.syslog("garaged: starting garage_main") closedtime = datetime.now() validtime = closedtime notified = 0 while 1: time.sleep(10) # Open socket to communicate with gpiod, and determine door state # TODO: handle collisions with garage door control web app s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) sockfile = "/var/run/gpiod.sock" s.connect(sockfile) doorclosed = gpio(s,"input 24") dooropen = gpio(s,"input 26") s.close() # Interpret door state if doorclosed == "true": if dooropen == "true": state = "invalid" else: state = "closed" closedtime = datetime.now() else: if dooropen == "true": state = "open" else: state = "undetermined" # Generate notifications as appropriate if state == "closed": closedtime = datetime.now() validtime = closedtime if notified > 0: notify(state, closedtime) notified = 0 elif state == "open": validtime = datetime.now() if datetime.now()-closedtime > timedelta(hours=1) and notified != 1: notify(state, closedtime) notified = 1 else: if datetime.now()-validtime > timedelta(seconds=60) and notified != 2: notify(state, validtime) notified = 2
def get_url(): try: a_obj = self.driver.find_element_by_xpath('//body/div[@id="dialog_out_weldeal"]/div[@class="diginfo"]/div[@class="weloutdialog"]/div[@id="ppLogin"]/form[@name="loginform"]/ul/li[@class="reg"]/a') a_obj.click() time.sleep(2) url = self.driver.current_url except: return None return url
def code_time(): bot = CodeTimeBot() is_code_time = True while is_code_time: bot.update_missed_chats() time.sleep(1.0) now = datetime.now() if now.time() > time(13, 00): is_code_time = False
def conn(self): conn_retry = 0 while True: try: self.redis_conn = redis.StrictRedis(host=self.host, port=self.port, db=self.db) break except: conn_retry += 1 print "Connection to Redis %s:%s failed for %sth times, retry in 10 seconds" % ( self.host, self.port, conn_retry ) time.sleep(10)
def back_and_wait(browser): browser.back() # Random float x, 1.0 <= x < 10.0 randomTimePeriod = random.uniform(1, 4) print_log("Wait time: " + str(randomTimePeriod) + " seconds") time.sleep(randomTimePeriod)
def click_and_wait(htmlObj, browser): hover_and_click(htmlObj, browser) # Random float x, 1.0 <= x < 10.0 randomTimePeriod = random.uniform(1, 4) print_log("Wait time: " + str(randomTimePeriod) + " seconds") time.sleep(randomTimePeriod)
def act_pin(server): data = server.recv(1024) if data[:3] != "pin": return 0 nb = data[4:6] print "Test cmd pin sur le client:"+nb while 1: server.send("pin "+nb+"\n") data = server.recv(1024) print data[:-1] time.sleep(0.01)
def login(browser, emailAddress, password): loginPage = MainPage(browser) loginPage.click_login_link() time.sleep(2) loginPage.set_login_infor(emailAddress, password) login_result = loginPage.click_login_button() time.sleep(2) if login_result: return 1 return 0
def run(self): #if self.redis_conn[0].exists("s.%s" % (redis_latest_date)) == False:#date on 248 too fast # redis_latest_date = (datetime.strptime(redis_latest_date,"%Y%m%d") + timedelta(days = -1)).strftime("%Y%m%d") #redis_latest_position = self.redis_conn[0].get( "s.%s" % (redis_latest_date) ) #run from current to latest while True: start = self.redis_current_position #print "s.%s" % (self.redis_current_date) stop = int( self.redis_conn[0].get("s.%s" % (self.redis_current_date)) ) print "run at date %s from %s to %s" % (self.redis_current_date, start, stop) for i in range(start, stop + 1): pass got_it = False try_count = 0 while got_it == False: for redis_instance_number in range(len(self.redis_conn)): if self.redis_conn[redis_instance_number].exists( "v.%s.%s" % (self.redis_current_date, i) ): got_it = True json_string = self.redis_conn[redis_instance_number].get( "v.%s.%s" % (self.redis_current_date, i) ) json_result = json.loads(json_string) table_name = str(json_result['table']) if table_name in self.table_list: table_index = self.table_list.index(table_name) command = str(json_result['command']) if command in self.table_operation_list[table_index]: self.queue.put(json_result) #if command in ['insert','update']: # print 'new row for table: %s command %s' % (table_name, command) # for i in range(len(json_result['newrow'])): # print i, json_result['newrow'][str(i)] #if command in ['update', 'delete']: # print 'old row for table: %s command %s' % (table_name, command) # for i in range(len(json_result['oldrow'])): # print i, json_result['oldrow'][str(i)] if got_it == False: print "NOT_FOUND, try again v.%s.%s" % (self.redis_current_date, i) try_count += 1 time.sleep(0.1) if try_count > 10: "Failed to find v.%s.%s" % (self.redis_current_date, i) break self.redis_current_position = stop + 1 #record current position self.record_position() #test loop condition #if self.redis_current_date < redis_latest_date: if start >= stop: next_day = (datetime.strptime(self.redis_current_date,"%Y%m%d") + timedelta(days = 1)).strftime("%Y%m%d") if self.redis_conn[0].exists("s.%s" % (next_day)) == True: print "goto next day because start = %s and stop = %s" % (start, stop) self.redis_current_date = (datetime.strptime(self.redis_current_date,"%Y%m%d") + timedelta(days = 1)).strftime("%Y%m%d") self.redis_current_position = 1
def playGamble(browser): optionRnd = randint(1, 9) gambleRnd = exists_by_id(browser, 'Gamble' + str(optionRnd), ignoreNone = False, waitToFind = True) hover_and_click(gambleRnd, browser) SubmitBtnBtn = exists_by_id(browser, 'SubmitBtn', ignoreNone = False, waitToFind = True) hover_and_click(SubmitBtnBtn, browser) time.sleep(7) ContinueBtn = exists_by_id(browser, 'Continue', ignoreNone = False, waitToFind = True) hover_and_click(ContinueBtn, browser)
def select_meal_to_add_food(self, mealType): '''Select the meal type to add food: breakfast, lunch, dinner...''' add_meal_div = self.browser.find_element(*self.ADD_MEAL_DIV_LOC) dropdown_icon = add_meal_div.find_element(*self.DROPDOWN_ICON_LOC) dropdown_icon.click() time.sleep(1) meal = {"breakfast":1, "lunch":2, "dinner":3, "snack":4, "exercise":5} i = str(meal[mealType]) meal_li = add_meal_div.find_element_by_xpath('.//li[@data-mealtime="' +i+'"]') meal_li.click() time.sleep(2)
def exists_by_css_selector(parentObj, css_selector, ignoreNone = False, waitToFind = False, triesNum = 0): try: return parentObj.find_element_by_css_selector(css_selector) except: print_log("Element by CSS selector = '" + css_selector + "' not found.") if (ignoreNone): if waitToFind and triesNum < 10: time.sleep(1) return exists_by_css_selector(parentObj, css_selector, ignoreNone, waitToFind, triesNum + 1) return None if waitToFind and triesNum < 10: time.sleep(1) return exists_by_css_selector(parentObj, css_selector, ignoreNone, waitToFind, triesNum + 1) sys.exit()
def backup_data(self): self.get_time_table() now = datetime.datetime.now() print("Updating schedule:" + str(now)) self.__schedule_backup.create_schedule(strings.f_backup_schedule, now) self.__schedule_backup.update_schedule( SaveStuff.read(strings.f_config)['room'], strings.f_backup_schedule) if self.__schedule_backup.check_connection( SaveStuff.read(strings.f_config)['room']) is not False: self.backup_time_lbl.setText("Last successful back up made at: " + now.strftime("%d %b") + " " + str(now.hour) + ":" + str(now.minute)) self.connection_status_label.setText("Connection status: online") else: self.connection_status_label.setText("Connection status: offline") self.__stat_screen.write_interactions_into_json() self.__stat_screen.write_passing_into_json() time.sleep( 600 ) # tries to back up from api every 10 minutes and displays the last successful "pull" and status self.backup_data()
def exists_by_css_selector(parentObj, css_selector, ignoreNone=False, waitToFind=False, triesNum=0): try: return parentObj.find_element_by_css_selector(css_selector) except: print_log("Element by CSS selector = '" + css_selector + "' not found.") if (ignoreNone): if waitToFind and triesNum < 10: time.sleep(1) return exists_by_css_selector(parentObj, css_selector, ignoreNone, waitToFind, triesNum + 1) return None if waitToFind and triesNum < 10: time.sleep(1) return exists_by_css_selector(parentObj, css_selector, ignoreNone, waitToFind, triesNum + 1) sys.exit()
def create_distance_matrix( request ): # origins is a list of strings 'adresse + code postal + ville' patients = Patient.objects.all() all_addresses = [] for patient in patients: all_addresses.append("{} {} {}".format(patient.address, patient.postcode, patient.city)) print(all_addresses) distance_matrix = [] n = len(all_addresses) for i in range(n): row = [] for j in range(n): time.sleep(5) # not exceed the API query limit address1 = all_addresses[i] address2 = all_addresses[j] url = 'http://maps.googleapis.com/maps/api/distancematrix/json?' \ 'origins={0}&' \ 'destinations={1}&' \ 'mode=driving&' \ 'language=en-EN&' \ 'sensor=false'.format(address1, address2) resp = requests.get(url) resp_json = resp.json() pprint.pprint(resp_json) for element in resp_json["rows"]: for value in element['elements']: row.append(value["duration"]['value']) distance_matrix.append(row) with open('general_matrix', 'wb') as matrix: pickler_matrix = pickle.Pickler(matrix) pickler_matrix.dump(distance_matrix) return redirect('/patient/list/?matrix_generated=true')
def download_reports(self): client_id = self.client_id base_url = self.base_url un = self.un up = self.up OutFileName = "files/downloadReports{}.json".format(client_id) print("download reports with c_Id", client_id) header = {"content-type": "application/json"} url = base_url + "download_report/ ?client_id={}&download_format=json&report_sections =policy".format( client_id) response = requests.get(url, auth=HTTPBasicAuth(un, up), headers=header, verify=True) print("DR status = ", response.status_code) if response.status_code == 200: writeLog("download report successfully downloaded as {}".format( OutFileName)) with open(OutFileName, "wb") as code: code.write(response.content) code.close() print("download report successfully done", response.status_code, response.content) elif response.status_code == 202: if self.retryCounter < 20 and response.status_code == 202: self.retryCounter = self.retryCounter + 1 print("retry in 30 sec", self.retryCounter) writeLog("download report retry in 30 sec for the {}".format( self.retryCounter)) time.sleep(30) polwizApi.download_reports(self) else: print("download OCR data failled with status", response.status_code, response.content, un, up) writeLog("download report failed with status {}".format( response.status_code)) exit() DR_Response = {"status": response.status_code, "filename": OutFileName} return DR_Response
async def otz(message: types.Message, state: FSMContext): print("-------") for sd in worksheet_poisk: print(worksheet_poisk) nazvsnie_biblioteki = sd spisok_knig = worksheet_poisk[nazvsnie_biblioteki] for nekniga in spisok_knig: print("---------", nekniga) cv = nekniga['книга'] print("-----------------", cv) aid = nekniga['айди'] print(aid) user_id = message.chat.id bron = nekniga['бронь'] ne_bron = "не забронировано" print(aid, bron) if aid == user_id and bron == ne_bron: # сравниваем и если не пустата то отсылаем юзеру который в табл сообщение о просьбе оставить отзыв print(aid) await message.answer( f"вы недавно прочитали книгу '{cv}', не хотите ли оставить отзыв?", reply_markup=kb.keyboard_net_and_otz) time.sleep(10)
def get_tweets(): for query in queries: print("Gathering tweets for " + query) if os.path.exists(filename): append_write = 'a' # append if already exists else: append_write = 'w' # make a new file if not try: tweets = api.search(q=query, tweet_mode='extended') with open(filename, append_write, newline='', encoding='utf-8') as c: writer = csv.writer(c) if append_write == 'w': # new file writer.writerow(["text", "id"]) #append column title count = 0 # number of tweets found in this feature for tweet in tweets: ## ensure string is one one line, for proper compliation tweetText = tweet.full_text.replace('\n', ' ').replace('\r', '') writer.writerow([tweetText, tweet.id]) print("feature: " + query + ", " + tweet.full_text + " | " + tweet.place.name if tweet. place else "Undefined place \n") count = count + 1 print("Found " + str(count) + " tweets containing /'" + query + "'/") except: print("Too many requests, waiting 15 minutes.." ) #twitter 88 error code - wait 15 minutes time.sleep(60 * 15) # wait 15 minutes
def get_content(url, data=None): header = { 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8', 'Accept-Encoding': 'gzip, deflate, sdch', 'Accept-Language': 'zh-CN,zh;q=0.8', 'Connection': 'keep-alive', 'User-Agent': 'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.235' } timeout = random.choice(range(80, 180)) while True: try: rep = requests.get(url, headers=header, timeout=timeout) rep.encoding = 'utf-8' # req = urllib.request.Request(url, data, header) # response = urllib.request.urlopen(req, timeout=timeout) # html1 = response.read().decode('UTF-8', errors='ignore') # response.close() break # except urllib.request.HTTPError as e: # print( '1:', e) # time.sleep(random.choice(range(5, 10))) # # except urllib.request.URLError as e: # print( '2:', e) # time.sleep(random.choice(range(5, 10))) except socket.timeout as e: print('3:', e) time.sleep(random.choice(range(8, 15))) except socket.error as e: print('4:', e) time.sleep(random.choice(range(20, 60))) except http.client.BadStatusLine as e: print('5:', e) time.sleep(random.choice(range(30, 80))) except http.client.IncompleteRead as e: print('6:', e) time.sleep(random.choice(range(5, 15))) # print(rep.text) return rep.text
def uploadOnVk(jpg_path, token, uid): vk = api.API(token) resp = vk.photos.getWallUploadServer(user_id=uid) time.sleep(2) #print(resp) try: link = resp.get('response').get('upload_url') except AttributeError: return "null" #print(link) multipart_form_data = { 'file': open(jpg_path, 'rb'), } response = requests.post(link, files=multipart_form_data) obj = json.loads(response.text) #print("---- photo = \n" + obj["photo"] + "----- hash" ) #print("\n\n ---------- FILE ID ON VK ---------- \n\n") #print(obj["file"]) #print("\n\n Result: \n") resp1 = vk.photos.saveWallPhoto(photo=obj["photo"], server=obj["server"], hash=obj["hash"]) #print(resp1) try: entity = "photo" + str(uid) + "_" + str(resp1.get('response')[0].get('id')) except TypeError: return "null" #print(entity) return entity
def aktivieren(self, order): self.gehe_online() print("Bot ist aktiviert") id = ID self.sende_nachricht( "Das Warten hat ein Ende! Dein Paket wird in den nächsten 30min bei dir sein.", id) time.sleep(1) self.sende_nachricht( "Bist du zu Hause, um die Lieferung in Empfang zu nehmen?", id) antwort = 0 while not antwort: nachrichten = self.hole_updates() if len(nachrichten) > 0: antwort = nachrichten[0] if antwort.inhalt in JA: self.confirm(antwort) else: self.deny(antwort) nachrichten = self.hole_updates() while (True): time.sleep(1) nachrichten = self.hole_updates() for nachricht in nachrichten: self.reagiere(nachricht) print("Bot wird deaktiviert")
def thread_function(): print('запустился поток (thread). ') # По запланированному времени вызываем ф-цию, в которой проверим статус подписчиков и разошлем им уведомления # schedule.every(5).seconds.do(send_alarms_to_all_subscribers,'5 секунд прошло') if True: # Отключить все планировщики schedule.every().day.at("08:27").do(send_alarms_to_all_subscribers, "3 минуты до начала 1 урока") schedule.every().day.at("09:27").do(send_alarms_to_all_subscribers, "3 минуты до начала 2 урока") schedule.every().day.at("10:27").do(send_alarms_to_all_subscribers, "3 минуты до начала 3 урока") schedule.every().day.at("11:27").do(send_alarms_to_all_subscribers, "3 минуты до начала 4 урока") schedule.every().day.at("12:32").do(send_alarms_to_all_subscribers, "3 минуты до начала 5 урока") schedule.every().day.at("13:37").do(send_alarms_to_all_subscribers, "3 минуты до начала 6 урока") schedule.every().day.at("14:42").do(send_alarms_to_all_subscribers, "3 минуты до начала 7 урока") schedule.every().day.at("09:10").do(send_alarms_to_all_subscribers, "5 минут до конца 1 урока") schedule.every().day.at("10:10").do(send_alarms_to_all_subscribers, "5 минут до конца 2 урока") schedule.every().day.at("11:10").do(send_alarms_to_all_subscribers, "5 минут до конца 3 урока") schedule.every().day.at("12:10").do(send_alarms_to_all_subscribers, "5 минут до конца 4 урока") schedule.every().day.at("13:15").do(send_alarms_to_all_subscribers, "5 минут до конца 5 урока") schedule.every().day.at("14:20").do(send_alarms_to_all_subscribers, "5 минут до конца 6 урока") schedule.every().day.at("15:25").do(send_alarms_to_all_subscribers, "5 минут до конца 7 урока") while True: schedule.run_pending() time.sleep(1) print('конец потока', message.from_user.id)
def main(): # Generate a list of all of the dates we want data for start_date = "2010-07-30" end_date = "2018-07-30" start = parser.parse(start_date) end = parser.parse(end_date) dates = list(rrule.rrule(rrule.DAILY, dtstart=start, until=end)) # Create a list of stations here to download data for stations = ["KCASANFR49"] # Set a backoff time in seconds if a request fails backoff_time = 10 data = {} # Gather data for each station in turn and save to CSV. for station in stations: print("Working on {}".format(station)) data[station] = [] for date in dates: # Print period status update messages if date.day % 10 == 0: print("Working on date: {} for station {}".format( date, station)) done = False while done == False: try: weather_data = get_temperature_data( station, date.day, date.month, date.year) done = True except ConnectionError: # May get rate limited by Wunderground.com, backoff if so. print("Got connection error on {}".format(date)) print("Will retry in {} seconds".format(backoff_time)) time.sleep(10) # Add each processed date to the overall data data[station].append(weather_data) # Finally combine all of the individual days and output to CSV for analysis. pd.concat(data[station]).to_csv("temp_data_raw.csv".format(station))
def get_torrent_stats(df_row): title = remove_punctuation(df_row.title) title = title.replace(' ', '%20') url = 'https://thepiratebay.se/search/' + str(title) + '%20' + str( df_row.year) + '/0/4/0' print(df_row.title) soup = get_soup(url) time.sleep(0.5) if soup == 0: return df_row activity = 0 num_torrents = 0 for row in soup.find_all('tr')[1:]: tds = row.find_all('td') #activity += int(tds[2].get_text()) + int(tds[3].get_text()) text = tds[1].get_text().split('\n')[4].replace('\xa0', ' ') reg = re.compile('[0-9]+-[0-9]+ [0-9]+\:[0-9]+|[0-9]+-[0-9]+ [0-9]+') try: date = re.findall(reg, text)[0] except: date = datetime.today() try: date = datetime.strptime(date, '%m-%d %Y') except: try: date = datetime.strptime(date + datetime.today().year, '%m-%d %H:%M%Y') except: date = datetime.today() if date <= df_row.release_date: num_torrents += 1 activity += int(tds[2].get_text()) + int(tds[3].get_text()) df_row['num_torrents'] = num_torrents df_row['torrent_activity'] = activity return df_row
def run(self): self.logMethod("starting run loop: debugging: %s" % ("True" if self.threadDebug else "False")) try: while self.shouldContinue: queueHasItems = True while self.commandQueue and self.shouldContinue and queueHasItems and not self.fullUpdateNow: try: commandDict = self.commandQueue.get_nowait() self.logMethod("processing command: %s" % str(commandDict)) self._processCommand(commandDict) self.commandQueue.task_done() except: queueHasItems = False if ((int(time.time()) - self.lastPoll) >= kPollInterval) or self.fullUpdateNow: self._update(fullUpdate=self.fullUpdateNow) self.lastPoll = int(time.time()) time.sleep(.1) except Exception, e: self.logMethod("some exception in the run loop occurred:\n%s" % str(e))
def Guru(path, start, stop, isBleed, isAdult, isColor, isSize): page1 = "https://kdp.amazon.com/en_US/title-setup/paperback/new/details?ref_=kdp_BS_D_cr_ti" options = webdriver.ChromeOptions() options.add_argument( "user-data-dir=C:\\Users\\OnceLearner\\ChromeProfiles\\Profile 1") driver = webdriver.Chrome(options=options) # driver.get("https://kdp.amazon.com/en_US/title-setup/paperback/new/details?ref_=kdp_BS_D_cr_ti") driver.get(page1) wait = WebDriverWait(driver, 555) workbook = openpyxl.load_workbook(path, data_only=True) sheet = workbook.active a = start while start <= stop: time.sleep(2) firstPage(start, sheet, driver, isBleed, isAdult, isColor, isSize) start += 1 time.sleep(1) driver.execute_script("window.open('{}')".format(page1)) nb_tabs = driver.window_handles driver.switch_to.window(driver.window_handles[len(nb_tabs) - 1])
def load3(): """ Route to return the posts """ time.sleep(0.2) # Used to simulate delay if request.args: counter = int(request.args.get("c")) # The 'counter' value sent in the QS if counter == 0: print(f"Returning posts 0 to {quantity}") # Slice 0 -> quantity from the db res = make_response(jsonify(postsDB2[0: quantity]), 200) elif counter == posts: print("No more posts") res = make_response(jsonify({}), 200) else: print(f"Returning posts {counter} to {counter + quantity}") # Slice counter -> quantity from the db res = make_response(jsonify(postsDB2[counter: counter + quantity]), 200) return res
def modo_temporizador_especifico(): print("Quieres introducir el timepo en A) Minutos B) Segundos") election = input() mins = 0 secs = 0 a = 0 if election == "a" or election == "A": print("Indica el número de minutos que quieres esperar") mins = int(input()) if election == "b" or election == "B": print("Indica el número de segundos que quieres esperar") secs = int(input()) if int(mins) == 0: mins = int(secs) / 60 if int(secs) == 0: secs = int(mins) * 60 while True: if int(secs) - a == -1: break print("quedan", int(secs) - a, "segundos") a += 1 time.sleep(1.0) print("El temporizador ha terminado")
def find_or_create(cls, session, user: User, key: str, value: str = None): try: return cls._find(user, key) except NoResultFound: try: new_pref = cls(user, key, value) session.add(new_pref) session.commit() zeeguu_core.log( "Created new preference since original was missing") return new_pref except: for _ in range(10): try: session.rollback() pref = cls._find(user, key) zeeguu_core.log( "Successfully avoided race condition. Nice! ") return pref except sqlalchemy.orm.exc.NoResultFound: time.sleep(0.3) continue break
def getImageUrls(comic_url): ''' 通过Selenium和Phantomjs获取动态生成的数据 ''' urls = [] dcap = dict(DesiredCapabilities.PHANTOMJS) dcap["phantomjs.page.settings.userAgent"] = ("Mozilla/4.0 (compatible; MSIE 5.5; windows NT)") browser = webdriver.PhantomJS(executable_path=r"C:\Program Files\phantomjs-2.1.1-windows\bin\phantomjs", desired_capabilities=dcap) browser.get(comic_url) imgs = browser.find_elements_by_xpath("//div[@id='mainView']/ul[@id='comicContain']//img") for i in range(0, len(imgs) - 1): if i == 1: # 略过广告图片 continue urls.append(imgs[i].get_attribute("src")) js = 'window.scrollTo( 800 ,' + str((i + 1) * 1280) + ')' browser.execute_script(js) time.sleep(randint(2, 4)) browser.quit() return urls
def newarrested(message): serv = google_auth() ssl._create_default_https_context = ssl._create_unverified_context cars = read_new_cars(serv, ARRESTED_SPREADSHEET_ID) bot.send_message(message.chat.id, 'Всего объявлений: ' + str(len(cars))) for car in cars: if len(car) > 11: try: bot.send_message( message.chat.id, 'Объявление: ' + car[0] + '\nНазвание: ' + car[1] + '\nЦена: ' + car[2] + '\nМодель: ' + car[11] + '\nВыпущена: ' + car[12] + '\nСсылка: ' + car[6]) except: bot.send_message( message.chat.id, 'Объявление: ' + car[0] + '\nНазвание: ' + car[1] + '\nЦена: ' + car[2] + '\nМодель: ' + car[11] + '\nСсылка: ' + car[6]) else: bot.send_message( message.chat.id, 'Объявление: ' + car[0] + '\nНазвание: ' + car[1] + '\nЦена: ' + car[2] + '\nСсылка: ' + car[6]) time.sleep(3.1) bot.send_message(message.chat.id, 'Это все объявления на данный момент')
def process_waypoint(self, point): move_direction = point.pos - self.player_pos self.last_pos = self.player_pos pos_counter = 50 while move_direction.l2() > 12: self.init_local_pos() if self.player_pos == self.last_pos: pos_counter -= 1 else: pos_counter = 50 if pos_counter == 0: raise Exception('Stuck!') self.last_pos = self.player_pos move_direction = point.pos - self.player_pos mouse_pos = self.get_mouse_rel_pos(move_direction, 120) mousemove(mouse_pos) time.sleep(0.1)
def restart_all(self): """ Restarts everything after Save """ self.threadpool.waitForDone() self.update_console("UI paused- for restart") self.uiTimer.stop() self.workerTimer.stop() self.update_console("Configuration changed - restarting everything") self.chbxProcess.setEnabled(False) self.chbxProcess.setChecked(False) self.btnSettings.setEnabled(False) self.ibkrworker.app.disconnect() while self.ibkrworker.app.isConnected(): print("waiting for disconnect") time.sleep(1) self.ibkrworker = None self.ibkrworker = IBKRWorker(self.settings) self.connect_to_ibkr() i = 4
def add_new_wheels_in_chunks(connection, wheels, total_wheels): """ Method used to add Wheel Pros Wheels :param wheels: wheels added :param total_wheels: total wheels to be added :return: Nothing """ # TODO: Need to test this method bar = pyprind.ProgBar(total_wheels, monitor=True, update_interval=.1) total_added = 1 # This is for each of the sections sections_done = 1 for i in range(len(wheels)): total_in_section_done = 1 for j in range(len(wheels[i])): try: if i * j >= -1: time.sleep(0.25) w = wheels[i][j] ShopifyToolsWheels.add_new_wheel(connection, w) bar_graph_string = "Section: " + str(sections_done) bar_graph_string += " - Index: " + str(total_added) bar.update(item_id=bar_graph_string) total_added += 1 total_in_section_done += 1 # This worked!!! I don't know what happened, but it worked! # The error was caught, and then it continued. It happened # at 5221 except pyactiveresource.connection.Error: print("Internet is out, restarting server in 5 seconds") j -= 1 time.sleep(10) except TimeoutError: total_added -= 1 j -= 1 print("Timeout error has occured, restarting server in 5 seconds") time.sleep(10) except HTTPError: total_added -= 1 j -= 1 print("HTTP error has occured, restarting server in 5 seconds") time.sleep(10) sections_done += 1 print(bar)
def wake_up_user(user): while user.start <= calendar.timegm( time.gmtime()) < user.start + user.threshold: if door_status: time.sleep(10) client.publish(TOPIC_ALARM + userid, payload="Wake up!") time.sleep(180) # 3 min to move his lazy ass to the toilet break time.sleep(5)
def main(): Azone = checkprice() #turn up instances in the lowest price zone instanceturnup(zone=Azone) #loop checking to see when the spot instances are active while spotinstancestatuscheck() != int(sys.argv[1]): spotsup = spotinstancestatuscheck() spotstogo = int(sys.argv[1]) - spotsup print 'Not all of your instances are up.' print str(spotsup) + ' are up. ' + str(spotstogo) + ' spots to go.' time.sleep(15) if spotinstancestatuscheck() == int(sys.argv[1]): #loop that checking your ECS cluster size while clustercheck() != int(sys.argv[1]): ecsinstances = clustercheck() ecstogo = int(sys.argv[1]) - ecsinstances print 'Not all of your instances are registered with ECS yet.' print str(ecsinstances) + ' are attached to the cluster. ' + str(ecstogo) + ' instaces to go.' time.sleep(15) print 'All instances are now registered with ECS. Deploying containers.' #You can only submit job requests 10 at a time so the code below finds out how many containers you specified and then requests over and over until your number is hit commandnumber = int(sys.argv[1]) sumnumber = commandnumber / 10 leftover = commandnumber - (sumnumber * 10) if commandnumber < 11: deploycontainers(commandnumber) else: n = sumnumber while (n > 0): deploycontainers(10) n = n - 1 if (leftover > 0): deploycontainers(leftover) #Watches scan result folder to understand when scans are complete dnmapserverwatch() #terminates spots once scans are done terminatespots()
def resumable_upload(self, request, resource, method): response = None error = None retry = 0 while response is None: try: print("Uploading file...") status, response = request.next_chunk() if response is not None: if method == 'insert' and 'id' in response: return response elif method != 'insert' or 'id' not in response: return response else: exit( "The upload failed with an unexpected response: %s" % response) except HttpError as e: if e.resp.status in RETRIABLE_STATUS_CODES: error = "A retriable HTTP error %d occurred:\n%s" % ( e.resp.status, e.content) else: raise except RETRIABLE_EXCEPTIONS as e: error = "A retriable error occurred: %s" % e if error is not None: print(error) retry += 1 if retry > 3: exit("No longer attempting to retry.") max_sleep = 2**retry sleep_seconds = random.random() * max_sleep print("Sleeping %f seconds and then retrying..." % sleep_seconds) time.sleep(sleep_seconds)
def resumable_upload(self, insert_request): response = None error = None retry = 0 while response is None: try: print "Uploading file..." status, response = insert_request.next_chunk() if 'url' in response: print("Banner was successfully uploaded to '%s'." % (response['url'])) else: exit("The upload failed with an unexpected response: %s" % response) except HttpError as e: if e.resp.status in RETRIABLE_STATUS_CODES: error = "A retriable HTTP error %d occurred:\n%s" % ( e.resp.status, e.content) else: raise except RETRIABLE_EXCEPTIONS as e: error = "A retriable error occurred: %s" % e if error is not None: print(error) retry += 1 if retry > 3: exit("No longer attempting to retry.") max_sleep = 2**retry sleep_seconds = random.random() * max_sleep print("Sleeping %f seconds and then retrying..." % sleep_seconds) time.sleep(sleep_seconds) return response['url']
def crawling_goobne(): results = [] url = 'http://www.goobne.co.kr/store/search_store.jsp' # 첫 페이지 로딩 wd = webdriver.Chrome('D:\cafe24\chromedriver_win32\chromedriver.exe') wd.get(url) time.sleep(5) for page in count(start=1): # 자바스크립트 실행 script = 'store.getList(%d)' % page wd.execute_script(script) print(f'{datetime.now()}:success for request [{url}]') time.sleep(3) # 실행 결과 rendering 된 HTML 가져오기 html = wd.page_source # parsing with bs4 bs = BeautifulSoup(html, 'html.parser') tag_tbody = bs.find('tbody', attrs={"id": "store_list"}) tags_tr = tag_tbody.findAll('tr') # detect last page if tags_tr[0].get('class') is None: break for tag_tr in tags_tr: strings = list(tag_tr.strings) name = strings[1] address = strings[6] sidogu = address.split()[:2] results.append((name, address) + tuple(sidogu)) wd.quit() for t in results: print(t)
def say_text(self): instance = vlc.Instance() current_media_list = None media_lists = [] player = instance.media_list_player_new() media_player = player.get_media_player() media_player.audio_set_volume(100) for pending in self.pending_text: if pending.use_file: #TODO: handle file position ? if current_media_list is None: current_media_list = instance.media_list_new() data_infos = pending.say_text() for data_info in data_infos: media = instance.media_new( os.path.join(pending.tts_path, data_info["source"])) media.get_mrl() current_media_list.add_media(media) else: if current_media_list is not None: media_lists.append(current_media_list) current_media_list = None media_lists.append(pending) if current_media_list is not None: media_lists.append(current_media_list) current_media_list = None for media in media_lists: if type(media) is vlc.MediaList: player.set_media_list(media) player.next() state = player.get_state() while state.value != 6: state = player.get_state() time.sleep(0.1) else: media.say_text() self.pending_text.clear()
def handle_vcode(self, driver): """ This method handles verification codes. :param self: :param driver: :return: """ # Get login_form loginform = driver.find_element_by_id('login_form') formwidth = loginform.size['width'] formheight = loginform.size['height'] X = int(loginform.location_once_scrolled_into_view['x']) Y = int(loginform.location_once_scrolled_into_view['y']) left = X top = Y right = X + formwidth bottom = Y + formheight # Remove temp image captures in temp dir if os.path.exists('temp\\screenshot.png'): os.remove('temp\\screenshot.png') if os.path.exists('temp\\screenshot_form.png'): os.remove('temp\\screenshot_form.png') time.sleep(5) # Time to capture current screenshots if driver.get_screenshot_as_file('temp\\screenshot.png'): img = Image.open('temp\\screenshot.png') imgform = img.crop((left, top, right, bottom)) imgform.save('temp\\screenshot_form.png') # vcode position relative to form imgvcode = imgform.crop((84, 203, 148, 222)) imgvcode.show() vcode = input("vcode is:") print(vcode) imgvcode.save('vcodes\\' + str(vcode) + '.png') else: self.logger("Screenshot failed!") return vcode
def IdeasAbstractExtractor(query): notRetrievedPage = True while notRetrievedPage: try: browserIdeas.get( 'https://ideas.repec.org/cgi-bin/htsearch?cmd=Search%21&ul=&q=' + query) notRetrievedPage = False except: time.sleep(10) try: paperLinkTag = browserIdeas.find_element_by_xpath( '//*[@id="content-block"]/dl[1]/dt/a') paperLinkTag.click() abstractTag = browserIdeas.find_element_by_xpath( '//*[@id="abstract-body"]/p') return abstractTag.text + " " + abstractExtractor(query) except Exception, e: print "There is no keyword for this publication on Ideas. The error message is: " + str( e) + " I'm going to search Econpapers.repec.org" return abstractExtractor(query)