def write(self, data, remote_path, mode='w+b', overwrite=False, log_level=0): # Will raise an exception or delete the remote location, depending on what is needed self._overwrite_if_needed(remote_path, overwrite, log_level) # A temporary file to write data to .. with NamedTemporaryFile(mode, suffix='zato-sftp-write.txt') as local_path: # .. wrap the file in separate thread so as not to block the event loop. thread_file = FileObjectThread(local_path) thread_file.write(data) try: # Data written out, we can now upload it to the remote location self.upload(local_path.name, remote_path, False, overwrite, log_level, False) finally: # Now we can close the file too thread_file.close()
def download_image(url, fname, local_path): try: hdr = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3', 'Accept-Encoding': 'none', 'Accept-Language': 'en-US,en;q=0.8', 'Connection': 'keep-alive'} req = urllib2.Request(url, headers=hdr) r = urllib2.urlopen(req, timeout=10) ctype = r.info().getheader('Content-Type') if r.code == 200: img_path = '/'.join(fname.split('/')[:-1]) # remove fname.jpg from path img_path = local_path + img_path fname = local_path + fname if not os.path.exists(img_path): print "CREATED DIRECTORY ::: " + img_path os.makedirs(img_path.decode('utf-8').encode('ascii', 'ignore'), 0755); print "path created" # success with open(fname, 'wb') as fd: f = FileObjectThread(fd, "wb") f.write(r.read()) f.close() return True except: global failed_imgs failed_imgs.append((url, fname)) print "Error: {}".format(url) print traceback.format_exc() return False
def write(self, data, remote_path, mode='w+b', overwrite=False, log_level=0, encoding='utf8'): # Will raise an exception or delete the remote location, depending on what is needed self._overwrite_if_needed(remote_path, overwrite, log_level) # Data to be written must be always bytes data = data if isinstance(data, bytes) else data.encode(encoding) # A temporary file to write data to .. with NamedTemporaryFile(mode, suffix='zato-sftp-write.txt') as local_path: # .. wrap the file in separate thread so as not to block the event loop. thread_file = FileObjectThread(local_path, mode=mode) thread_file.write(data) thread_file.flush() try: # Data written out, we can now upload it to the remote location self.upload(local_path.name, remote_path, False, overwrite, log_level, False) except Exception: logger.warn('Exception in SFTP write method `%s`', format_exc()) finally: # Now we can close the file too thread_file.close()
def _write_snapshot(self, data: bytes): logger.info('Writing snapshot') f = FileObjectThread(open(self.file_path, 'wb')) try: f.write(data) finally: try: f.close() except: pass
def limiter(self): ''' Kill the script if user supplied limit of successful proxy attempts (-s argument) is reached ''' if self.print_counter >= int(self.show_num): gevent.killall f_raw = open('proxies2.txt', "wb" ) f = FileObjectThread(f_raw, 'wb') for item in r_list: if item not in prox_list: prox_list.append(item.encode('utf-8')) for prox in prox_list: f.write("%s\n" % prox) f.close() sys.exit()
def limiter(self): ''' Kill the script if user supplied limit of successful proxy attempts (-s argument) is reached ''' if self.print_counter >= int(self.show_num): gevent.killall f_raw = open('proxies2.txt', "wb") f = FileObjectThread(f_raw, 'wb') for item in r_list: if item not in prox_list: prox_list.append(item.encode('utf-8')) for prox in prox_list: f.write("%s\n" % prox) f.close() sys.exit()
def run(self): ''' Gets raw high anonymity (L1) proxy data then calls make_proxy_list() Currently parses data from gatherproxy.com and letushide.com ''' if not self.quiet: print '[*] Your accurate external IP: %s' % self.externalip letushide_list = self.letushide_req() if not self.quiet: print '[*] letushide.com: %s proxies' % str(len(letushide_list)) # Has a login now :( gatherproxy_list = self.gatherproxy_req() if not self.quiet: print '[*] gatherproxy.com: %s proxies' % str( len(gatherproxy_list)) checkerproxy_list = self.checkerproxy_req() if not self.quiet: print '[*] checkerproxy.net: %s proxies' % str( len(checkerproxy_list)) self.proxy_list.append(letushide_list) self.proxy_list.append(gatherproxy_list) self.proxy_list.append(checkerproxy_list) # Flatten list of lists (1 master list containing 1 list of ips per proxy website) self.proxy_list = [ ips for proxy_site in self.proxy_list for ips in proxy_site ] self.proxy_list = list(set(self.proxy_list)) # Remove duplicates if not self.quiet: print '[*] %d unique high anonymity proxies found' % len( self.proxy_list) print '[*] Testing proxy speeds ...' print '' print ' Proxy | CC | Domain | Time/Errors' self.proxy_checker() f_raw = open('proxies.txt', "wb") f = FileObjectThread(f_raw, 'wb') for item in r_list: if item not in prox_list: prox_list.append(item.encode('utf-8')) for prox in prox_list: f.write("%s\n" % prox) f.close() sys.exit()
def handle(self, url): try: url = url.strip() response = requests.get(url, timeout=5, allow_redirects=False) fp = codecs.open('200.txt', 'a+', 'utf-8') f = FileObjectThread(fp, lock=True) if response.status_code == 200: print url + ' ---->success' f.write(url + '\n') else: print url, response.status_code f.close() except Exception, e: print 'error:', url
def run(self): ''' Gets raw high anonymity (L1) proxy data then calls make_proxy_list() Currently parses data from gatherproxy.com and letushide.com ''' if not self.quiet: print '[*] Your accurate external IP: %s' % self.externalip letushide_list = self.letushide_req() if not self.quiet: print '[*] letushide.com: %s proxies' % str(len(letushide_list)) # Has a login now :( gatherproxy_list = self.gatherproxy_req() if not self.quiet: print '[*] gatherproxy.com: %s proxies' % str(len(gatherproxy_list)) checkerproxy_list = self.checkerproxy_req() if not self.quiet: print '[*] checkerproxy.net: %s proxies' % str(len(checkerproxy_list)) self.proxy_list.append(letushide_list) self.proxy_list.append(gatherproxy_list) self.proxy_list.append(checkerproxy_list) # Flatten list of lists (1 master list containing 1 list of ips per proxy website) self.proxy_list = [ips for proxy_site in self.proxy_list for ips in proxy_site] self.proxy_list = list(set(self.proxy_list)) # Remove duplicates if not self.quiet: print '[*] %d unique high anonymity proxies found' % len(self.proxy_list) print '[*] Testing proxy speeds ...' print '' print ' Proxy | CC | Domain | Time/Errors' self.proxy_checker() f_raw = open('proxies.txt', "wb" ) f = FileObjectThread(f_raw, 'wb') for item in r_list: if item not in prox_list: prox_list.append(item.encode('utf-8')) for prox in prox_list: f.write("%s\n" % prox) f.close() sys.exit()
return False return True def limiter(self): ''' Kill the script if user supplied limit of successful proxy attempts (-s argument) is reached ''' if self.print_counter >= int(self.show_num): gevent.killall f_raw = open('proxies2.txt', "wb" ) f = FileObjectThread(f_raw, 'wb') for item in r_list: if item not in prox_list: prox_list.append(item.encode('utf-8')) for prox in prox_list: f.write("%s\n" % prox) f.close() sys.exit() P = find_http_proxy(parse_args()) P.run() f_raw = open('proxies2.txt', "wb" ) f = FileObjectThread(f_raw, 'wb') for item in r_list: if item not in prox_list: prox_list.append(item.encode('utf-8')) for prox in prox_list: f.write("%s\n" % prox) f.close() sys.exit()
return False return True def limiter(self): ''' Kill the script if user supplied limit of successful proxy attempts (-s argument) is reached ''' if self.print_counter >= int(self.show_num): gevent.killall f_raw = open('proxies2.txt', "wb") f = FileObjectThread(f_raw, 'wb') for item in r_list: if item not in prox_list: prox_list.append(item.encode('utf-8')) for prox in prox_list: f.write("%s\n" % prox) f.close() sys.exit() P = find_http_proxy(parse_args()) P.run() f_raw = open('proxies2.txt', "wb") f = FileObjectThread(f_raw, 'wb') for item in r_list: if item not in prox_list: prox_list.append(item.encode('utf-8')) for prox in prox_list: f.write("%s\n" % prox) f.close() sys.exit()
class STDOUT(OutputModule): ''' Prints event data to STDOUT. Prints incoming events to STDOUT. When <complete> is True, the complete event including headers is printed to STDOUT. You can optionally define the colors used. Parameters:: - background_color(str)("RESET") | The background color. | Valid values: BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE, RESET - colorize(bool)(False) | When True all STDOUT output is wrapped in between ANSI color | escape sequences defined by `foreground_color`, `background_color`, | `color_style`. - color_style(str)("NORMAL") | The coloring style to use | Valid values: DIM, NORMAL, BRIGHT - counter(bool)(False) | Puts an incremental number for each event in front | of each event. - foreground_color(str)("WHITE") | The foreground color. | Valid values: BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE - native_events(bool)(False) | If True, outgoing events are native events. - parallel_streams(int)(1) | The number of outgoing parallel data streams. - payload(str)(None) | The string to submit. | If defined takes precedence over `selection`. - pid(bool)(False) | Includes the pid of the process producing the output. - prefix(str)("")* | Puts the prefix in front of each printed event. - selection(str)(None) | The event key to submit. | If ``None`` the complete event is selected. Queues:: - inbox | Incoming events. ''' def __init__(self, actor_config, selection=None, payload=None, native_events=False, parallel_streams=1, counter=False, prefix="", pid=False, colorize=False, foreground_color="WHITE", background_color="RESET", color_style="NORMAL"): OutputModule.__init__(self, actor_config) self.__validateInput(foreground_color, background_color, color_style) self.pool.createQueue("inbox") self.registerConsumer(self.consume, "inbox") self.ansi_escape = re.compile(r'\x1B\[[0-?]*[ -/]*[@-~]') def preHook(self): self.format = Format( self.kwargs.selection, self.kwargs.counter, self.kwargs.pid ) if self.kwargs.colorize: init(autoreset=True) self.getString = self.__stringColor else: self.getString = self.__stringNoColor self.f = FileObjectThread(sys.stdout) def consume(self, event): data = self.encode( self.getDataToSubmit( event ) ) output = self.getString( getattr(Fore, event.kwargs.foreground_color), getattr(Back, event.kwargs.background_color), getattr(Style, event.kwargs.color_style), event.kwargs.prefix, self.format.do(data) ) self.f.write(output) # def postHook(self): # self.f.close() def __validateInput(self, f, b, s): if f not in ["BLACK", "RED", "GREEN", "YELLOW", "BLUE", "MAGENTA", "CYAN", "WHITE"]: raise Exception("Foreground value is not correct.") if b not in ["BLACK", "RED", "GREEN", "YELLOW", "BLUE", "MAGENTA", "CYAN", "WHITE", "RESET"]: raise Exception("Background value is not correct.") if s not in ["DIM", "NORMAL", "BRIGHT"]: raise Exception("Style value is not correct.") def __stringColor(self, f, b, s, p, d): return "%s%s%s%s%s\n" % ( f, b, s, p, self.format.do(d) ) def __stringNoColor(self, f, b, s, p, d): d = self.ansi_escape.sub('', str(d)) return "%s%s\n" % ( p, self.format.do(d) )
except Exception,e: pass elif entrez_element == 'empty' and gene_element == 'empty': try: uniprot_element = self.driver.find_element_by_xpath('//*[@id="_summaries"]/div[1]/ul/li/p').text.strip() except Exception, e: pass else: try: uniprot_element = self.driver.find_element_by_xpath('//*[@id="_summaries"]/div[2]/ul/li/p').text.strip() except Exception, e: pass fp = codecs.open('result1.txt', 'a+', 'utf-8') f = FileObjectThread(fp) f.write(gene+'##'+entrez_element+'##'+gene_element+'##'+uniprot_element+'##'+new_gene+'\n') f.close() def get_localization(self, gene, new_gene): compartment = 'empty' confidence = 'empty' goid = 'empty' goterm = 'empty' try: compartment = self.driver.find_element_by_xpath('//*[@id="compartmentsTable"]/tbody/tr[1]/td[1]').text.strip() confidence = self.driver.find_element_by_xpath('//*[@id="compartmentsTable"]/tbody/tr[1]/td[2]').text.strip() except Exception, e: pass