示例#1
0
    def write(self,
              data,
              remote_path,
              mode='w+b',
              overwrite=False,
              log_level=0):

        # Will raise an exception or delete the remote location, depending on what is needed
        self._overwrite_if_needed(remote_path, overwrite, log_level)

        # A temporary file to write data to ..
        with NamedTemporaryFile(mode,
                                suffix='zato-sftp-write.txt') as local_path:

            # .. wrap the file in separate thread so as not to block the event loop.
            thread_file = FileObjectThread(local_path)
            thread_file.write(data)

            try:
                # Data written out, we can now upload it to the remote location
                self.upload(local_path.name, remote_path, False, overwrite,
                            log_level, False)
            finally:
                # Now we can close the file too
                thread_file.close()
示例#2
0
文件: sftp.py 项目: danlg/zato
    def write(self,
              data,
              remote_path,
              mode='w+b',
              overwrite=False,
              log_level=0,
              encoding='utf8'):

        # Will raise an exception or delete the remote location, depending on what is needed
        self._overwrite_if_needed(remote_path, overwrite, log_level)

        # Data to be written must be always bytes
        data = data if isinstance(data, bytes) else data.encode(encoding)

        # A temporary file to write data to ..
        with NamedTemporaryFile(mode,
                                suffix='zato-sftp-write.txt') as local_path:

            # .. wrap the file in separate thread so as not to block the event loop.
            thread_file = FileObjectThread(local_path, mode=mode)
            thread_file.write(data)
            thread_file.flush()

            try:
                # Data written out, we can now upload it to the remote location
                self.upload(local_path.name, remote_path, False, overwrite,
                            log_level, False)

            except Exception:
                logger.warn('Exception in SFTP write method `%s`',
                            format_exc())

            finally:
                # Now we can close the file too
                thread_file.close()
def download_image(url, fname, local_path):

    try:
        hdr = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
       'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
       'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
       'Accept-Encoding': 'none',
       'Accept-Language': 'en-US,en;q=0.8',
       'Connection': 'keep-alive'}
        req = urllib2.Request(url, headers=hdr)
        r = urllib2.urlopen(req, timeout=10)
        ctype = r.info().getheader('Content-Type')

        if r.code == 200:
            img_path = '/'.join(fname.split('/')[:-1])  # remove fname.jpg from path
            img_path = local_path + img_path
            fname = local_path + fname
            if not os.path.exists(img_path):
                print "CREATED DIRECTORY ::: " + img_path
                os.makedirs(img_path.decode('utf-8').encode('ascii', 'ignore'), 0755);
                print "path created"
            # success
            with open(fname, 'wb') as fd:
                f = FileObjectThread(fd, "wb")
                f.write(r.read())
                f.close()
                return True
    except:
        global failed_imgs
        failed_imgs.append((url, fname))
        print "Error: {}".format(url)
        print traceback.format_exc()
        return False
示例#4
0
def save_file(dst, stream, buffer_size=16384):
    from shutil import copyfileobj

    _dst = open(dst, 'wb')
    f = FileObjectThread(_dst, 'wb')
    try:
        copyfileobj(stream, _dst, buffer_size)
    finally:
        f.close()
示例#5
0
    def _write_snapshot(self, data: bytes):
        logger.info('Writing snapshot')
        f = FileObjectThread(open(self.file_path, 'wb'))

        try:
            f.write(data)
        finally:
            try:
                f.close()
            except:
                pass
示例#6
0
    def read(self, remote_path, mode='r+b', log_level=0):

        # Download the file to a temporary location ..
        with NamedTemporaryFile(mode,
                                suffix='zato-sftp-read.txt') as local_path:
            self.download_file(remote_path, local_path.name)

            # .. and read it in using a separate thread so as not to block the event loop.
            thread_file = FileObjectThread(local_path)
            data = thread_file.read()
            thread_file.close()

            return data
示例#7
0
    def limiter(self):
        ''' Kill the script if user supplied limit of successful proxy attempts (-s argument) is reached '''
        if self.print_counter >= int(self.show_num):
            gevent.killall

            f_raw = open('proxies2.txt', "wb" )
            f = FileObjectThread(f_raw, 'wb')
            for item in r_list:
                if item not in prox_list:
                    prox_list.append(item.encode('utf-8'))
            for prox in prox_list:
                f.write("%s\n" % prox)
            f.close()
            sys.exit()
示例#8
0
    def limiter(self):
        ''' Kill the script if user supplied limit of successful proxy attempts (-s argument) is reached '''
        if self.print_counter >= int(self.show_num):
            gevent.killall

            f_raw = open('proxies2.txt', "wb")
            f = FileObjectThread(f_raw, 'wb')
            for item in r_list:
                if item not in prox_list:
                    prox_list.append(item.encode('utf-8'))
            for prox in prox_list:
                f.write("%s\n" % prox)
            f.close()
            sys.exit()
示例#9
0
    def run(self):
        ''' Gets raw high anonymity (L1) proxy data then calls make_proxy_list()
        Currently parses data from gatherproxy.com and letushide.com '''

        if not self.quiet:
            print '[*] Your accurate external IP: %s' % self.externalip

        letushide_list = self.letushide_req()
        if not self.quiet:
            print '[*] letushide.com: %s proxies' % str(len(letushide_list))

        # Has a login now :(
        gatherproxy_list = self.gatherproxy_req()
        if not self.quiet:
            print '[*] gatherproxy.com: %s proxies' % str(
                len(gatherproxy_list))

        checkerproxy_list = self.checkerproxy_req()
        if not self.quiet:
            print '[*] checkerproxy.net: %s proxies' % str(
                len(checkerproxy_list))

        self.proxy_list.append(letushide_list)
        self.proxy_list.append(gatherproxy_list)
        self.proxy_list.append(checkerproxy_list)

        # Flatten list of lists (1 master list containing 1 list of ips per proxy website)
        self.proxy_list = [
            ips for proxy_site in self.proxy_list for ips in proxy_site
        ]
        self.proxy_list = list(set(self.proxy_list))  # Remove duplicates

        if not self.quiet:
            print '[*] %d unique high anonymity proxies found' % len(
                self.proxy_list)
            print '[*] Testing proxy speeds ...'
            print ''
            print '      Proxy           | CC  |       Domain          | Time/Errors'

        self.proxy_checker()
        f_raw = open('proxies.txt', "wb")
        f = FileObjectThread(f_raw, 'wb')
        for item in r_list:
            if item not in prox_list:
                prox_list.append(item.encode('utf-8'))
        for prox in prox_list:
            f.write("%s\n" % prox)
        f.close()
        sys.exit()
示例#10
0
    def handle(self, url):

        try:
            url = url.strip()
            response = requests.get(url, timeout=5, allow_redirects=False)

            fp = codecs.open('200.txt', 'a+', 'utf-8')
            f = FileObjectThread(fp, lock=True)
            if response.status_code == 200:
                print url + '  ---->success'
                f.write(url + '\n')
            else:
                print url, response.status_code
            f.close()

        except Exception, e:
            print 'error:', url
示例#11
0
    def run(self):
        ''' Gets raw high anonymity (L1) proxy data then calls make_proxy_list()
        Currently parses data from gatherproxy.com and letushide.com '''

        if not self.quiet:
            print '[*] Your accurate external IP: %s' % self.externalip

        letushide_list = self.letushide_req()
        if not self.quiet:
            print '[*] letushide.com: %s proxies' % str(len(letushide_list))

         # Has a login now :(
        gatherproxy_list = self.gatherproxy_req()
        if not self.quiet:
            print '[*] gatherproxy.com: %s proxies' % str(len(gatherproxy_list))

        checkerproxy_list = self.checkerproxy_req()
        if not self.quiet:
            print '[*] checkerproxy.net: %s proxies' % str(len(checkerproxy_list))

        self.proxy_list.append(letushide_list)
        self.proxy_list.append(gatherproxy_list)
        self.proxy_list.append(checkerproxy_list)

        # Flatten list of lists (1 master list containing 1 list of ips per proxy website)
        self.proxy_list = [ips for proxy_site in self.proxy_list for ips in proxy_site]
        self.proxy_list = list(set(self.proxy_list)) # Remove duplicates

        if not self.quiet:
            print '[*] %d unique high anonymity proxies found' % len(self.proxy_list)
            print '[*] Testing proxy speeds ...'
            print ''
            print '      Proxy           | CC  |       Domain          | Time/Errors'

        self.proxy_checker()
        f_raw = open('proxies.txt', "wb" )
        f = FileObjectThread(f_raw, 'wb')
        for item in r_list:
            if item not in prox_list:
                prox_list.append(item.encode('utf-8'))
        for prox in prox_list:
            f.write("%s\n" % prox)
        f.close()
        sys.exit()
示例#12
0
    def from_snapshot(cls, file_path: str) -> 'Storage':
        logger.info('Restoring from snapshot: file_path=%s', file_path)

        try:
            f = FileObjectThread(open('./tmp/data', 'rb'))

            data = None
            try:
                data = f.read()
            finally:
                try:
                    f.close()
                except:
                    pass

            assert data.startswith(b'dredispy:1:')
            storage = pickle.loads(data[11:])
        except FileNotFoundError:
            logger.info('Snapshot not found, creating empty storage')
            storage = Storage()

        storage.file_path = file_path
        return storage
示例#13
0
                return False
        return True

    def limiter(self):
        ''' Kill the script if user supplied limit of successful proxy attempts (-s argument) is reached '''
        if self.print_counter >= int(self.show_num):
            gevent.killall

            f_raw = open('proxies2.txt', "wb" )
            f = FileObjectThread(f_raw, 'wb')
            for item in r_list:
                if item not in prox_list:
                    prox_list.append(item.encode('utf-8'))
            for prox in prox_list:
                f.write("%s\n" % prox)
            f.close()
            sys.exit()


P = find_http_proxy(parse_args())
P.run()
f_raw = open('proxies2.txt', "wb" )
f = FileObjectThread(f_raw, 'wb')
for item in r_list:
    if item not in prox_list:
        prox_list.append(item.encode('utf-8'))
for prox in prox_list:
    f.write("%s\n" % prox)
f.close()
sys.exit()
示例#14
0
                return False
        return True

    def limiter(self):
        ''' Kill the script if user supplied limit of successful proxy attempts (-s argument) is reached '''
        if self.print_counter >= int(self.show_num):
            gevent.killall

            f_raw = open('proxies2.txt', "wb")
            f = FileObjectThread(f_raw, 'wb')
            for item in r_list:
                if item not in prox_list:
                    prox_list.append(item.encode('utf-8'))
            for prox in prox_list:
                f.write("%s\n" % prox)
            f.close()
            sys.exit()


P = find_http_proxy(parse_args())
P.run()
f_raw = open('proxies2.txt', "wb")
f = FileObjectThread(f_raw, 'wb')
for item in r_list:
    if item not in prox_list:
        prox_list.append(item.encode('utf-8'))
for prox in prox_list:
    f.write("%s\n" % prox)
f.close()
sys.exit()