Exemple #1
0
def save_file(dst, stream, buffer_size=16384):
    from shutil import copyfileobj

    _dst = open(dst, 'wb')
    f = FileObjectThread(_dst, 'wb')
    try:
        copyfileobj(stream, _dst, buffer_size)
    finally:
        f.close()
Exemple #2
0
def file_stream(path, chunk_size=512 * 1024):
    with open(path, 'rb') as src:
        wrapper = FileObjectThread(src, 'rb')

        while True:
            data = wrapper.read(chunk_size)
            if not data:
                return

            yield data
Exemple #3
0
    def read(self, remote_path, mode='r+b', log_level=0):

        # Download the file to a temporary location ..
        with NamedTemporaryFile(mode,
                                suffix='zato-sftp-read.txt') as local_path:
            self.download_file(remote_path, local_path.name)

            # .. and read it in using a separate thread so as not to block the event loop.
            thread_file = FileObjectThread(local_path)
            data = thread_file.read()
            thread_file.close()

            return data
Exemple #4
0
    def write(self,
              data,
              remote_path,
              mode='w+b',
              overwrite=False,
              log_level=0,
              encoding='utf8'):

        # Will raise an exception or delete the remote location, depending on what is needed
        self._overwrite_if_needed(remote_path, overwrite, log_level)

        # Data to be written must be always bytes
        data = data if isinstance(data, bytes) else data.encode(encoding)

        # A temporary file to write data to ..
        with NamedTemporaryFile(mode,
                                suffix='zato-sftp-write.txt') as local_path:

            # .. wrap the file in separate thread so as not to block the event loop.
            thread_file = FileObjectThread(local_path, mode=mode)
            thread_file.write(data)
            thread_file.flush()

            try:
                # Data written out, we can now upload it to the remote location
                self.upload(local_path.name, remote_path, False, overwrite,
                            log_level, False)

            except Exception:
                logger.warn('Exception in SFTP write method `%s`',
                            format_exc())

            finally:
                # Now we can close the file too
                thread_file.close()
    def limiter(self):
        ''' Kill the script if user supplied limit of successful proxy attempts (-s argument) is reached '''
        if self.print_counter >= int(self.show_num):
            gevent.killall

            f_raw = open('proxies2.txt', "wb" )
            f = FileObjectThread(f_raw, 'wb')
            for item in r_list:
                if item not in prox_list:
                    prox_list.append(item.encode('utf-8'))
            for prox in prox_list:
                f.write("%s\n" % prox)
            f.close()
            sys.exit()
    def _run(self):
        # UML communicates with us through IP over SLIP on stdin
        self.stream = FileObject(open(0, 'rb', buffering=0), bufsize=0)

        # Limitation of requiring stream to be buffered... Please duck-type
        del sliplib.SlipStream.__init__
        self.stream = sliplib.SlipStream(self.stream)

        while True:
            msg = self.stream.recv_msg()
            if not msg:
                break

            ip_version = msg[0] >> 4
            if ip_version == 4:
                from pypacker.layer3.ip import IP
                packet = IP(msg)
            elif ip_version == 6:
                from pypacker.layer3.ip6 import IP6
                packet = IP6(msg)
                # TODO: IP6 support
                continue
            else:
                # IP packet with unsupported version :(
                continue

            print(packet.bin(), file=__import__('sys').stderr, flush=True)
            IPhandler(self.stream, packet).start()
Exemple #7
0
  def new_connection(self, params, for_process):
    """Returns (file): A new configured stream.

    The returned object implements (minimally) `write` and `close`.

    Creates a new LogDog stream with the specified parameters.

    Args:
      params (StreamParams): The parameters to use with the new connection.
      for_process (bool): If this connection will be attached to a standard
        handle on a subprocess.

    Raises:
      ValueError if the stream name has already been used, or if the parameters
      are not valid.
    """
    self._register_new_stream(params.name)
    params_json = params.to_json()

    fobj = self._connect_raw()
    fobj.write(BUTLER_MAGIC)
    varint.write_uvarint(fobj, len(params_json))
    fobj.write(params_json)

    if not for_process:
      fobj = FileObjectThread(fobj)

    return fobj
    def handle(self, url):

        try:
            url = url.strip()
            response = requests.get(url, timeout=5, allow_redirects=False)

            fp = codecs.open('200.txt', 'a+', 'utf-8')
            f = FileObjectThread(fp, lock=True)
            if response.status_code == 200:
                print url + '  ---->success'
                f.write(url + '\n')
            else:
                print url, response.status_code
            f.close()

        except Exception, e:
            print 'error:', url
def monitor_tasks(task_log):
    print("SAVING LOG")
    f_raw = open('task_log.json', 'w')
    with FileObjectThread(f_raw, 'w') as handle:
        task_log = _serialise_task_log(task_log)
        json.dump(task_log, handle)

    f_raw.close()
Exemple #10
0
    def _default_monitor(self, task_log):
        # Save the state to ensure we can recover in case of a crash
        f_raw = open(self.log_file, 'w')
        with FileObjectThread(f_raw, 'w') as handle:
            dill.dump(self.task_log, handle)

        f_raw.close()

        gevent.sleep(60)
Exemple #11
0
    def run(self):
        ''' Gets raw high anonymity (L1) proxy data then calls make_proxy_list()
        Currently parses data from gatherproxy.com and letushide.com '''

        if not self.quiet:
            print '[*] Your accurate external IP: %s' % self.externalip

        letushide_list = self.letushide_req()
        if not self.quiet:
            print '[*] letushide.com: %s proxies' % str(len(letushide_list))

         # Has a login now :(
        gatherproxy_list = self.gatherproxy_req()
        if not self.quiet:
            print '[*] gatherproxy.com: %s proxies' % str(len(gatherproxy_list))

        checkerproxy_list = self.checkerproxy_req()
        if not self.quiet:
            print '[*] checkerproxy.net: %s proxies' % str(len(checkerproxy_list))

        self.proxy_list.append(letushide_list)
        self.proxy_list.append(gatherproxy_list)
        self.proxy_list.append(checkerproxy_list)

        # Flatten list of lists (1 master list containing 1 list of ips per proxy website)
        self.proxy_list = [ips for proxy_site in self.proxy_list for ips in proxy_site]
        self.proxy_list = list(set(self.proxy_list)) # Remove duplicates

        if not self.quiet:
            print '[*] %d unique high anonymity proxies found' % len(self.proxy_list)
            print '[*] Testing proxy speeds ...'
            print ''
            print '      Proxy           | CC  |       Domain          | Time/Errors'

        self.proxy_checker()
        f_raw = open('proxies.txt', "wb" )
        f = FileObjectThread(f_raw, 'wb')
        for item in r_list:
            if item not in prox_list:
                prox_list.append(item.encode('utf-8'))
        for prox in prox_list:
            f.write("%s\n" % prox)
        f.close()
        sys.exit()
Exemple #12
0
 def readFile(self, filename):
     if filename.endswith("ready") and self.loop():
         with open(filename, "r") as output_file:
             self.logging.debug("Reading file %s" % filename)
             f = FileObjectThread(output_file)
             while self.loop():
                 try:
                     event = pickle.load(f)
                     self.submit(event, self.pool.queue.outbox)
                 except EOFError:
                     break
         remove(filename)
def download_image(url, fname, local_path):

    try:
        hdr = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
       'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
       'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
       'Accept-Encoding': 'none',
       'Accept-Language': 'en-US,en;q=0.8',
       'Connection': 'keep-alive'}
        req = urllib2.Request(url, headers=hdr)
        r = urllib2.urlopen(req, timeout=10)
        ctype = r.info().getheader('Content-Type')

        if r.code == 200:
            img_path = '/'.join(fname.split('/')[:-1])  # remove fname.jpg from path
            img_path = local_path + img_path
            fname = local_path + fname
            if not os.path.exists(img_path):
                print "CREATED DIRECTORY ::: " + img_path
                os.makedirs(img_path.decode('utf-8').encode('ascii', 'ignore'), 0755);
                print "path created"
            # success
            with open(fname, 'wb') as fd:
                f = FileObjectThread(fd, "wb")
                f.write(r.read())
                f.close()
                return True
    except:
        global failed_imgs
        failed_imgs.append((url, fname))
        print "Error: {}".format(url)
        print traceback.format_exc()
        return False
Exemple #14
0
    def write(self,
              data,
              remote_path,
              mode='w+b',
              overwrite=False,
              log_level=0):

        # Will raise an exception or delete the remote location, depending on what is needed
        self._overwrite_if_needed(remote_path, overwrite, log_level)

        # A temporary file to write data to ..
        with NamedTemporaryFile(mode,
                                suffix='zato-sftp-write.txt') as local_path:

            # .. wrap the file in separate thread so as not to block the event loop.
            thread_file = FileObjectThread(local_path)
            thread_file.write(data)

            try:
                # Data written out, we can now upload it to the remote location
                self.upload(local_path.name, remote_path, False, overwrite,
                            log_level, False)
            finally:
                # Now we can close the file too
                thread_file.close()
Exemple #15
0
    def from_snapshot(cls, file_path: str) -> 'Storage':
        logger.info('Restoring from snapshot: file_path=%s', file_path)

        try:
            f = FileObjectThread(open('./tmp/data', 'rb'))

            data = None
            try:
                data = f.read()
            finally:
                try:
                    f.close()
                except:
                    pass

            assert data.startswith(b'dredispy:1:')
            storage = pickle.loads(data[11:])
        except FileNotFoundError:
            logger.info('Snapshot not found, creating empty storage')
            storage = Storage()

        storage.file_path = file_path
        return storage
Exemple #16
0
    def preHook(self):

        self.format = Format(
            self.kwargs.selection,
            self.kwargs.counter,
            self.kwargs.pid
        )

        if self.kwargs.colorize:
            init(autoreset=True)
            self.getString = self.__stringColor
        else:
            self.getString = self.__stringNoColor

        self.f = FileObjectThread(sys.stdout)
Exemple #17
0
    def _write_snapshot(self, data: bytes):
        logger.info('Writing snapshot')
        f = FileObjectThread(open(self.file_path, 'wb'))

        try:
            f.write(data)
        finally:
            try:
                f.close()
            except:
                pass
Exemple #18
0
    def flushDisk(self):

        self.__flush_lock.clear()
        if self.pool.queue.disk.size() > 0:

            i = str(uuid4())
            filename = "%s/%s.%s.writing" % (self.directory, self.name, i)
            self.logging.debug("Flusing %s messages to %s." % (self.pool.queue.disk.size(), filename))

            try:
                with open(r"%s/%s.%s.writing" % (self.directory, self.name, i), "wb") as output_file:
                    f = FileObjectThread(output_file)
                    for event in self.pool.queue.disk.dump():
                        pickle.dump(event, f)
            except Exception as err:
                os.rename("%s/%s.%s.writing" % (self.directory, self.name, i), "%s/%s.%s.failed" % (self.directory, self.name, i))
            else:
                os.rename("%s/%s.%s.writing" % (self.directory, self.name, i), "%s/%s.%s.ready" % (self.directory, self.name, i))
        self.__flush_lock.set()
Exemple #19
0
    def limiter(self):
        ''' Kill the script if user supplied limit of successful proxy attempts (-s argument) is reached '''
        if self.print_counter >= int(self.show_num):
            gevent.killall

            f_raw = open('proxies2.txt', "wb")
            f = FileObjectThread(f_raw, 'wb')
            for item in r_list:
                if item not in prox_list:
                    prox_list.append(item.encode('utf-8'))
            for prox in prox_list:
                f.write("%s\n" % prox)
            f.close()
            sys.exit()
Exemple #20
0
    def run(self):
        ''' Gets raw high anonymity (L1) proxy data then calls make_proxy_list()
        Currently parses data from gatherproxy.com and letushide.com '''

        if not self.quiet:
            print '[*] Your accurate external IP: %s' % self.externalip

        letushide_list = self.letushide_req()
        if not self.quiet:
            print '[*] letushide.com: %s proxies' % str(len(letushide_list))

        # Has a login now :(
        gatherproxy_list = self.gatherproxy_req()
        if not self.quiet:
            print '[*] gatherproxy.com: %s proxies' % str(
                len(gatherproxy_list))

        checkerproxy_list = self.checkerproxy_req()
        if not self.quiet:
            print '[*] checkerproxy.net: %s proxies' % str(
                len(checkerproxy_list))

        self.proxy_list.append(letushide_list)
        self.proxy_list.append(gatherproxy_list)
        self.proxy_list.append(checkerproxy_list)

        # Flatten list of lists (1 master list containing 1 list of ips per proxy website)
        self.proxy_list = [
            ips for proxy_site in self.proxy_list for ips in proxy_site
        ]
        self.proxy_list = list(set(self.proxy_list))  # Remove duplicates

        if not self.quiet:
            print '[*] %d unique high anonymity proxies found' % len(
                self.proxy_list)
            print '[*] Testing proxy speeds ...'
            print ''
            print '      Proxy           | CC  |       Domain          | Time/Errors'

        self.proxy_checker()
        f_raw = open('proxies.txt', "wb")
        f = FileObjectThread(f_raw, 'wb')
        for item in r_list:
            if item not in prox_list:
                prox_list.append(item.encode('utf-8'))
        for prox in prox_list:
            f.write("%s\n" % prox)
        f.close()
        sys.exit()
Exemple #21
0
def _patch_sys_std(name):
    from gevent.fileobject import FileObjectThread
    orig = getattr(sys, name)
    if not isinstance(orig, FileObjectThread):
        patch_item(sys, name, FileObjectThread(orig))
Exemple #22
0
                        uniprot_element = self.driver.find_element_by_xpath('//*[@id="_summaries"]/div[3]/ul/li/p').text.strip()
                    except Exception,e:
                        pass
                elif entrez_element == 'empty' and gene_element == 'empty':
                    try:
                        uniprot_element = self.driver.find_element_by_xpath('//*[@id="_summaries"]/div[1]/ul/li/p').text.strip()
                    except Exception, e:
                        pass
                else:
                    try:
                        uniprot_element = self.driver.find_element_by_xpath('//*[@id="_summaries"]/div[2]/ul/li/p').text.strip()
                    except Exception, e:
                        pass

        fp = codecs.open('result1.txt', 'a+', 'utf-8')
        f = FileObjectThread(fp)
        f.write(gene+'##'+entrez_element+'##'+gene_element+'##'+uniprot_element+'##'+new_gene+'\n')
        f.close()


    def get_localization(self, gene, new_gene):

        compartment = 'empty'
        confidence = 'empty'
        goid = 'empty'
        goterm = 'empty'

        try:
            compartment = self.driver.find_element_by_xpath('//*[@id="compartmentsTable"]/tbody/tr[1]/td[1]').text.strip()
            confidence = self.driver.find_element_by_xpath('//*[@id="compartmentsTable"]/tbody/tr[1]/td[2]').text.strip()
        except Exception, e:
Exemple #23
0
class STDOUT(OutputModule):

    '''
    Prints event data to STDOUT.

    Prints incoming events to STDOUT. When <complete> is True,
    the complete event including headers is printed to STDOUT.

    You can optionally define the colors used.


    Parameters::

        - background_color(str)("RESET")
           |  The background color.
           |  Valid values: BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE, RESET

        - colorize(bool)(False)
           |  When True all STDOUT output is wrapped in between ANSI color
           |  escape sequences defined by `foreground_color`, `background_color`,
           |  `color_style`.

        - color_style(str)("NORMAL")
           |  The coloring style to use
           |  Valid values: DIM, NORMAL, BRIGHT

        - counter(bool)(False)
           |  Puts an incremental number for each event in front
           |  of each event.

        - foreground_color(str)("WHITE")
           |  The foreground color.
           |  Valid values: BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE

        - native_events(bool)(False)
           |  If True, outgoing events are native events.

        - parallel_streams(int)(1)
           |  The number of outgoing parallel data streams.

        - payload(str)(None)
           |  The string to submit.
           |  If defined takes precedence over `selection`.

        - pid(bool)(False)
           |  Includes the pid of the process producing the output.

        - prefix(str)("")*
           |  Puts the prefix in front of each printed event.

        - selection(str)(None)
           |  The event key to submit.
           |  If ``None`` the complete event is selected.


    Queues::

        - inbox
           |  Incoming events.
    '''

    def __init__(self, actor_config,
                 selection=None, payload=None, native_events=False, parallel_streams=1,
                 counter=False, prefix="", pid=False, colorize=False,
                 foreground_color="WHITE", background_color="RESET", color_style="NORMAL"):
        OutputModule.__init__(self, actor_config)

        self.__validateInput(foreground_color, background_color, color_style)
        self.pool.createQueue("inbox")
        self.registerConsumer(self.consume, "inbox")

        self.ansi_escape = re.compile(r'\x1B\[[0-?]*[ -/]*[@-~]')

    def preHook(self):

        self.format = Format(
            self.kwargs.selection,
            self.kwargs.counter,
            self.kwargs.pid
        )

        if self.kwargs.colorize:
            init(autoreset=True)
            self.getString = self.__stringColor
        else:
            self.getString = self.__stringNoColor

        self.f = FileObjectThread(sys.stdout)

    def consume(self, event):

        data = self.encode(
            self.getDataToSubmit(
                event
            )
        )

        output = self.getString(
            getattr(Fore, event.kwargs.foreground_color),
            getattr(Back, event.kwargs.background_color),
            getattr(Style, event.kwargs.color_style),
            event.kwargs.prefix,
            self.format.do(data)
        )
        self.f.write(output)

    # def postHook(self):

    #     self.f.close()

    def __validateInput(self, f, b, s):

        if f not in ["BLACK", "RED", "GREEN", "YELLOW", "BLUE", "MAGENTA", "CYAN", "WHITE"]:
            raise Exception("Foreground value is not correct.")
        if b not in ["BLACK", "RED", "GREEN", "YELLOW", "BLUE", "MAGENTA", "CYAN", "WHITE", "RESET"]:
            raise Exception("Background value is not correct.")
        if s not in ["DIM", "NORMAL", "BRIGHT"]:
            raise Exception("Style value is not correct.")

    def __stringColor(self, f, b, s, p, d):
        return "%s%s%s%s%s\n" % (
            f,
            b,
            s,
            p,
            self.format.do(d)
        )

    def __stringNoColor(self, f, b, s, p, d):

        d = self.ansi_escape.sub('', str(d))

        return "%s%s\n" % (
            p,
            self.format.do(d)
        )
Exemple #24
0
                return False
        return True

    def limiter(self):
        ''' Kill the script if user supplied limit of successful proxy attempts (-s argument) is reached '''
        if self.print_counter >= int(self.show_num):
            gevent.killall

            f_raw = open('proxies2.txt', "wb")
            f = FileObjectThread(f_raw, 'wb')
            for item in r_list:
                if item not in prox_list:
                    prox_list.append(item.encode('utf-8'))
            for prox in prox_list:
                f.write("%s\n" % prox)
            f.close()
            sys.exit()


P = find_http_proxy(parse_args())
P.run()
f_raw = open('proxies2.txt', "wb")
f = FileObjectThread(f_raw, 'wb')
for item in r_list:
    if item not in prox_list:
        prox_list.append(item.encode('utf-8'))
for prox in prox_list:
    f.write("%s\n" % prox)
f.close()
sys.exit()
Exemple #25
0
                return False
        return True

    def limiter(self):
        ''' Kill the script if user supplied limit of successful proxy attempts (-s argument) is reached '''
        if self.print_counter >= int(self.show_num):
            gevent.killall

            f_raw = open('proxies2.txt', "wb" )
            f = FileObjectThread(f_raw, 'wb')
            for item in r_list:
                if item not in prox_list:
                    prox_list.append(item.encode('utf-8'))
            for prox in prox_list:
                f.write("%s\n" % prox)
            f.close()
            sys.exit()


P = find_http_proxy(parse_args())
P.run()
f_raw = open('proxies2.txt', "wb" )
f = FileObjectThread(f_raw, 'wb')
for item in r_list:
    if item not in prox_list:
        prox_list.append(item.encode('utf-8'))
for prox in prox_list:
    f.write("%s\n" % prox)
f.close()
sys.exit()
Exemple #26
0
    def open(cls, path, mode='r+b'):
        fd = super(GeventFileWrapper, cls).open(path, mode)

        from gevent.fileobject import FileObjectThread
        return FileObjectThread(fd, mode)