Esempio n. 1
0
    def write(self,
              data,
              remote_path,
              mode='w+b',
              overwrite=False,
              log_level=0):

        # Will raise an exception or delete the remote location, depending on what is needed
        self._overwrite_if_needed(remote_path, overwrite, log_level)

        # A temporary file to write data to ..
        with NamedTemporaryFile(mode,
                                suffix='zato-sftp-write.txt') as local_path:

            # .. wrap the file in separate thread so as not to block the event loop.
            thread_file = FileObjectThread(local_path)
            thread_file.write(data)

            try:
                # Data written out, we can now upload it to the remote location
                self.upload(local_path.name, remote_path, False, overwrite,
                            log_level, False)
            finally:
                # Now we can close the file too
                thread_file.close()
Esempio n. 2
0
  def new_connection(self, params, for_process):
    """Returns (file): A new configured stream.

    The returned object implements (minimally) `write` and `close`.

    Creates a new LogDog stream with the specified parameters.

    Args:
      params (StreamParams): The parameters to use with the new connection.
      for_process (bool): If this connection will be attached to a standard
        handle on a subprocess.

    Raises:
      ValueError if the stream name has already been used, or if the parameters
      are not valid.
    """
    self._register_new_stream(params.name)
    params_json = params.to_json()

    fobj = self._connect_raw()
    fobj.write(BUTLER_MAGIC)
    varint.write_uvarint(fobj, len(params_json))
    fobj.write(params_json)

    if not for_process:
      fobj = FileObjectThread(fobj)

    return fobj
def download_image(url, fname, local_path):

    try:
        hdr = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
       'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
       'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
       'Accept-Encoding': 'none',
       'Accept-Language': 'en-US,en;q=0.8',
       'Connection': 'keep-alive'}
        req = urllib2.Request(url, headers=hdr)
        r = urllib2.urlopen(req, timeout=10)
        ctype = r.info().getheader('Content-Type')

        if r.code == 200:
            img_path = '/'.join(fname.split('/')[:-1])  # remove fname.jpg from path
            img_path = local_path + img_path
            fname = local_path + fname
            if not os.path.exists(img_path):
                print "CREATED DIRECTORY ::: " + img_path
                os.makedirs(img_path.decode('utf-8').encode('ascii', 'ignore'), 0755);
                print "path created"
            # success
            with open(fname, 'wb') as fd:
                f = FileObjectThread(fd, "wb")
                f.write(r.read())
                f.close()
                return True
    except:
        global failed_imgs
        failed_imgs.append((url, fname))
        print "Error: {}".format(url)
        print traceback.format_exc()
        return False
Esempio n. 4
0
File: sftp.py Progetto: danlg/zato
    def write(self,
              data,
              remote_path,
              mode='w+b',
              overwrite=False,
              log_level=0,
              encoding='utf8'):

        # Will raise an exception or delete the remote location, depending on what is needed
        self._overwrite_if_needed(remote_path, overwrite, log_level)

        # Data to be written must be always bytes
        data = data if isinstance(data, bytes) else data.encode(encoding)

        # A temporary file to write data to ..
        with NamedTemporaryFile(mode,
                                suffix='zato-sftp-write.txt') as local_path:

            # .. wrap the file in separate thread so as not to block the event loop.
            thread_file = FileObjectThread(local_path, mode=mode)
            thread_file.write(data)
            thread_file.flush()

            try:
                # Data written out, we can now upload it to the remote location
                self.upload(local_path.name, remote_path, False, overwrite,
                            log_level, False)

            except Exception:
                logger.warn('Exception in SFTP write method `%s`',
                            format_exc())

            finally:
                # Now we can close the file too
                thread_file.close()
def monitor_tasks(task_log):
    print("SAVING LOG")
    f_raw = open('task_log.json', 'w')
    with FileObjectThread(f_raw, 'w') as handle:
        task_log = _serialise_task_log(task_log)
        json.dump(task_log, handle)

    f_raw.close()
Esempio n. 6
0
def save_file(dst, stream, buffer_size=16384):
    from shutil import copyfileobj

    _dst = open(dst, 'wb')
    f = FileObjectThread(_dst, 'wb')
    try:
        copyfileobj(stream, _dst, buffer_size)
    finally:
        f.close()
Esempio n. 7
0
    def _default_monitor(self, task_log):
        # Save the state to ensure we can recover in case of a crash
        f_raw = open(self.log_file, 'w')
        with FileObjectThread(f_raw, 'w') as handle:
            dill.dump(self.task_log, handle)

        f_raw.close()

        gevent.sleep(60)
Esempio n. 8
0
def file_stream(path, chunk_size=512 * 1024):
    with open(path, 'rb') as src:
        wrapper = FileObjectThread(src, 'rb')

        while True:
            data = wrapper.read(chunk_size)
            if not data:
                return

            yield data
Esempio n. 9
0
    def _write_snapshot(self, data: bytes):
        logger.info('Writing snapshot')
        f = FileObjectThread(open(self.file_path, 'wb'))

        try:
            f.write(data)
        finally:
            try:
                f.close()
            except:
                pass
Esempio n. 10
0
 def readFile(self, filename):
     if filename.endswith("ready") and self.loop():
         with open(filename, "r") as output_file:
             self.logging.debug("Reading file %s" % filename)
             f = FileObjectThread(output_file)
             while self.loop():
                 try:
                     event = pickle.load(f)
                     self.submit(event, self.pool.queue.outbox)
                 except EOFError:
                     break
         remove(filename)
Esempio n. 11
0
    def read(self, remote_path, mode='r+b', log_level=0):

        # Download the file to a temporary location ..
        with NamedTemporaryFile(mode,
                                suffix='zato-sftp-read.txt') as local_path:
            self.download_file(remote_path, local_path.name)

            # .. and read it in using a separate thread so as not to block the event loop.
            thread_file = FileObjectThread(local_path)
            data = thread_file.read()
            thread_file.close()

            return data
Esempio n. 12
0
    def limiter(self):
        ''' Kill the script if user supplied limit of successful proxy attempts (-s argument) is reached '''
        if self.print_counter >= int(self.show_num):
            gevent.killall

            f_raw = open('proxies2.txt', "wb")
            f = FileObjectThread(f_raw, 'wb')
            for item in r_list:
                if item not in prox_list:
                    prox_list.append(item.encode('utf-8'))
            for prox in prox_list:
                f.write("%s\n" % prox)
            f.close()
            sys.exit()
Esempio n. 13
0
    def run(self):
        ''' Gets raw high anonymity (L1) proxy data then calls make_proxy_list()
        Currently parses data from gatherproxy.com and letushide.com '''

        if not self.quiet:
            print '[*] Your accurate external IP: %s' % self.externalip

        letushide_list = self.letushide_req()
        if not self.quiet:
            print '[*] letushide.com: %s proxies' % str(len(letushide_list))

        # Has a login now :(
        gatherproxy_list = self.gatherproxy_req()
        if not self.quiet:
            print '[*] gatherproxy.com: %s proxies' % str(
                len(gatherproxy_list))

        checkerproxy_list = self.checkerproxy_req()
        if not self.quiet:
            print '[*] checkerproxy.net: %s proxies' % str(
                len(checkerproxy_list))

        self.proxy_list.append(letushide_list)
        self.proxy_list.append(gatherproxy_list)
        self.proxy_list.append(checkerproxy_list)

        # Flatten list of lists (1 master list containing 1 list of ips per proxy website)
        self.proxy_list = [
            ips for proxy_site in self.proxy_list for ips in proxy_site
        ]
        self.proxy_list = list(set(self.proxy_list))  # Remove duplicates

        if not self.quiet:
            print '[*] %d unique high anonymity proxies found' % len(
                self.proxy_list)
            print '[*] Testing proxy speeds ...'
            print ''
            print '      Proxy           | CC  |       Domain          | Time/Errors'

        self.proxy_checker()
        f_raw = open('proxies.txt', "wb")
        f = FileObjectThread(f_raw, 'wb')
        for item in r_list:
            if item not in prox_list:
                prox_list.append(item.encode('utf-8'))
        for prox in prox_list:
            f.write("%s\n" % prox)
        f.close()
        sys.exit()
Esempio n. 14
0
    def flushDisk(self):

        self.__flush_lock.clear()
        if self.pool.queue.disk.size() > 0:

            i = str(uuid4())
            filename = "%s/%s.%s.writing" % (self.directory, self.name, i)
            self.logging.debug("Flusing %s messages to %s." % (self.pool.queue.disk.size(), filename))

            try:
                with open(r"%s/%s.%s.writing" % (self.directory, self.name, i), "wb") as output_file:
                    f = FileObjectThread(output_file)
                    for event in self.pool.queue.disk.dump():
                        pickle.dump(event, f)
            except Exception as err:
                os.rename("%s/%s.%s.writing" % (self.directory, self.name, i), "%s/%s.%s.failed" % (self.directory, self.name, i))
            else:
                os.rename("%s/%s.%s.writing" % (self.directory, self.name, i), "%s/%s.%s.ready" % (self.directory, self.name, i))
        self.__flush_lock.set()
Esempio n. 15
0
    def from_snapshot(cls, file_path: str) -> 'Storage':
        logger.info('Restoring from snapshot: file_path=%s', file_path)

        try:
            f = FileObjectThread(open('./tmp/data', 'rb'))

            data = None
            try:
                data = f.read()
            finally:
                try:
                    f.close()
                except:
                    pass

            assert data.startswith(b'dredispy:1:')
            storage = pickle.loads(data[11:])
        except FileNotFoundError:
            logger.info('Snapshot not found, creating empty storage')
            storage = Storage()

        storage.file_path = file_path
        return storage
Esempio n. 16
0
def _patch_sys_std(name):
    from gevent.fileobject import FileObjectThread
    orig = getattr(sys, name)
    if not isinstance(orig, FileObjectThread):
        patch_item(sys, name, FileObjectThread(orig))
Esempio n. 17
0
                return False
        return True

    def limiter(self):
        ''' Kill the script if user supplied limit of successful proxy attempts (-s argument) is reached '''
        if self.print_counter >= int(self.show_num):
            gevent.killall

            f_raw = open('proxies2.txt', "wb")
            f = FileObjectThread(f_raw, 'wb')
            for item in r_list:
                if item not in prox_list:
                    prox_list.append(item.encode('utf-8'))
            for prox in prox_list:
                f.write("%s\n" % prox)
            f.close()
            sys.exit()


P = find_http_proxy(parse_args())
P.run()
f_raw = open('proxies2.txt', "wb")
f = FileObjectThread(f_raw, 'wb')
for item in r_list:
    if item not in prox_list:
        prox_list.append(item.encode('utf-8'))
for prox in prox_list:
    f.write("%s\n" % prox)
f.close()
sys.exit()
Esempio n. 18
0
    def open(cls, path, mode='r+b'):
        fd = super(GeventFileWrapper, cls).open(path, mode)

        from gevent.fileobject import FileObjectThread
        return FileObjectThread(fd, mode)