Esempio n. 1
0
    def parse_base(self, line):
        '''
        tshark line parser
        '''
        data = line.split()
        if len(data) >= 5:
            proto = data[4]

            # If we dont see the protocol yet in the protos dictionary, we need
            # to initialize it.  After that, we can then increment regardless.
            if proto not in self.protos:
                self.protos[proto] = 0
            self.protos[proto] += 1

            # If the counter timer is set to 0, then this is the first packet
            # we have parsed.  Set the counter to the current time so that we
            # dont send a single packet stat to the API.
            if self.counter == 0:
                self.counter = int(time.time())

            # Once we reach 60 seconds, we need to purge out the protocol counts
            # that we have counted.  Make an API call for each proto we have,
            # then reset the counter timer and the protos dictionary.
            if (int(time.time()) - self.counter) >= 60:
                for proto in self.protos:
                    log.debug('TSHARK: sending %s=%s' % (proto, self.protos[proto]))
                    self.api.stat(proto, self.protos[proto])
                self.counter = int(time.time())
                self.protos = {}
Esempio n. 2
0
    def parse_packet(self):
        '''
        This function will parse the needed data from the packet PSML XML
        definition and send the data to the API.
        '''
        # If the counter timer is set to 0, then this is the first packet
        # we have parsed.  Set the counter to the current time so that we
        # dont send a single packet stat to the API.
        if self.counter == 0:
            self.counter = int(time.time())

        # Next we instantiate a BeautifulSoup object to parse the packet and
        # pull out the protocol name.
        packet = soup(self.packet)
        proto = packet.findAll('section')[4].text

        # If we dont see the protocol yet in the protos dictionary, we need
        # to initialize it.  After that, we can then increment regardless.
        if proto not in self.protos:
            self.protos[proto] = 0
        self.protos[proto] += 1

        # Once we reach 60 seconds, we need to purge out the protocol counts
        # that we have counted.  Make an API call for each proto we have,
        # then reset the counter timer and the protos dictionary.
        if (int(time.time()) - self.counter) >= 60:
            for proto in self.protos:
                log.debug('TSHARK: sending %s=%s' %
                          (proto, self.protos[proto]))
                self.api.stat(proto, self.protos[proto])
            self.counter = int(time.time())
            self.protos = {}
Esempio n. 3
0
    def parse_packet(self):
        '''
        This function will parse the needed data from the packet PSML XML
        definition and send the data to the API.
        '''
        # If the counter timer is set to 0, then this is the first packet
        # we have parsed.  Set the counter to the current time so that we
        # dont send a single packet stat to the API.
        if self.counter == 0:
            self.counter = int(time.time())

        # Next we instantiate a BeautifulSoup object to parse the packet and
        # pull out the protocol name.
        packet = soup(self.packet)
        proto = packet.findAll('section')[4].text


        # If we dont see the protocol yet in the protos dictionary, we need
        # to initialize it.  After that, we can then increment regardless.
        if proto not in self.protos:
            self.protos[proto] = 0
        self.protos[proto] += 1

        # Once we reach 60 seconds, we need to purge out the protocol counts
        # that we have counted.  Make an API call for each proto we have,
        # then reset the counter timer and the protos dictionary.
        if (int(time.time()) - self.counter) >= 60:
            for proto in self.protos:
                log.debug('TSHARK: sending %s=%s' % (proto, self.protos[proto]))
                self.api.stat(proto, self.protos[proto])
            self.counter = int(time.time())
            self.protos = {}
Esempio n. 4
0
        def when_available(fut):
            try:
                conn = fut.result()
            except (psycopg2.Error, psycopg2.OperationalError) as error:
                future.set_exc_info(sys.exc_info())
                if retry and not keep:
                    self.putconn(retry[0])
                return

            log.debug("Obtained connection: %s", conn.fileno)
            try:
                future_or_result = method(conn, *args, **kwargs)
            except (psycopg2.Error, psycopg2.OperationalError) as error:
                log.debug("Method failed synchronously")
                return self._retry(retry, when_available, conn, keep, future)

            if not async:
                if not keep:
                    self.putconn(conn)
                future.set_result(future_or_result)
                return

            def when_done(rfut):
                try:
                    result = rfut.result()
                except (psycopg2.Error, psycopg2.OperationalError) as error:
                    log.debug("Method failed Asynchronously")
                    return self._retry(retry, when_available, conn, keep, future)

                if not keep:
                    self.putconn(conn)
                future.set_result(result)

            self.ioloop.add_future(future_or_result, when_done)
Esempio n. 5
0
    def parse_base(self, line):
        '''
        tshark line parser
        '''
        data = line.split()
        if len(data) >= 5:
            proto = data[4]

            # If we dont see the protocol yet in the protos dictionary, we need
            # to initialize it.  After that, we can then increment regardless.
            if proto not in self.protos:
                self.protos[proto] = 0
            self.protos[proto] += 1

            # If the counter timer is set to 0, then this is the first packet
            # we have parsed.  Set the counter to the current time so that we
            # dont send a single packet stat to the API.
            if self.counter == 0:
                self.counter = int(time.time())

            # Once we reach 60 seconds, we need to purge out the protocol counts
            # that we have counted.  Make an API call for each proto we have,
            # then reset the counter timer and the protos dictionary.
            if (int(time.time()) - self.counter) >= 60:
                for proto in self.protos:
                    log.debug('TSHARK: sending %s=%s' %
                              (proto, self.protos[proto]))
                    self.api.stat(proto, self.protos[proto])
                self.counter = int(time.time())
                self.protos = {}
Esempio n. 6
0
    def add_dead(self, conn):
        log.debug("Adding dead connection")
        self.pending.discard(conn)
        self.dead.add(conn)

        # If everything is dead, abort anything pending.
        if not self.pending:
            self.abort_waiting_queue(Pool.DatabaseNotAvailable("No database connection available"))
Esempio n. 7
0
 def _new_connection(self):
     log.debug("Spawning new connection")
     conn = Connection(self.dsn,
                       connection_factory=self.connection_factory,
                       cursor_factory=self.cursor_factory,
                       ioloop=self.ioloop,
                       setsession=self.setsession)
     return self._connect_one(conn)
Esempio n. 8
0
 def release(self, conn):
     log.debug("About to release connection %s", conn.fileno)
     assert conn in self.busy, "Tried to release non-busy connection"
     self.busy.remove(conn)
     if conn.closed:
         self.dead.add(conn)
     else:
         self.add_free(conn)
Esempio n. 9
0
    def show_(self):
        log.debug("Received show")
        for i in range(5):
            line = self.get_line(i)

            line.setText(self.translations[i])
            self.set_color(line, "#FFFFFF")

        self.set_next()
Esempio n. 10
0
    def check_(self):
        log.debug("Received check")
        for i in range(5):
            line = self.get_line(i)
            ans, corr = line.text(), self.translations[i]

            self.set_color(line, "#00FF00" if ans == corr else "#FF0000")

        self.set_next()
Esempio n. 11
0
            def when_done(rfut):
                try:
                    result = rfut.result()
                except (psycopg2.Error, psycopg2.OperationalError) as error:
                    log.debug("Method failed Asynchronously")
                    return self._retry(retry, when_available, conn, keep, future)

                if not keep:
                    self.putconn(conn)
                future.set_result(result)
Esempio n. 12
0
 def on_connect(fut):
     if pending[0]:
         pending[0] -= 1
         return
     # all connection attempts are complete
     if self.conns.dead and self.raise_connect_errors:
         ecp = PartiallyConnectedError("%s connection(s) failed to connect" % len(self.conns.dead))
         future.set_exception(ecp)
     else:
         future.set_result(self)
     log.debug("All initial connection requests complete")
Esempio n. 13
0
 def parse(self, line):
     '''Ettercap line output parser.'''
     if 'USER' in line:
         usernames = self.ruser.findall(line)
         passwords = self.rpass.findall(line)
         infos = self.rinfo.findall(line)
         protos = self.rproto.findall(line)
         if len(usernames) > 0 and len(passwords) > 0:
             username = usernames[0]
             password = passwords[0]
             info = infos[0]
             proto = protos[0]
             log.debug('ETTERCAP: sending Account <%s>' % username)
             self.api.account(username, password, info, proto, 'ettercap')
Esempio n. 14
0
 def acquire(self, pool_need_log=False):
     """Occupy free connection"""
     future = Future()
     while True:
         if self.free:
             conn = self.free.pop()
             if conn.valid:
                 self.busy.add(conn)
             else:
                 self.dead.add(conn)
                 continue
             future.set_result(conn)
             conn.connection_need_log = pool_need_log
             log.debug("Acquired free connection %s", conn.fileno)
             return future
         elif self.busy:
             log.debug("No free connections, and some are busy - put in waiting queue")
             self.waiting_queue.appendleft(future)
             return future
         elif self.pending:
             log.debug("No free connections, but some are pending - put in waiting queue")
             self.waiting_queue.appendleft(future)
             return future
         else:
             log.debug("All connections are dead")
             return None
Esempio n. 15
0
 def parse(self, line):
     '''
     Driftnet output line parser. 
     '''
     # This parser is about as simple as they come.  Every line is simply a
     # filename of the image that driftnet carved out.  All we need to do is
     # open it up, send the data to the API, then remove the file.
     filename = line.strip('\r\n ')
     log.debug('DRIFTNET: sending image %s' % filename)
     self.api.image(filename)
     try:
         os.remove(filename)
     except:
         log.warn('DRIFTNET: could not remove %s' % filename)
Esempio n. 16
0
    def parse(self, line):
        '''Ettercap line output parser.'''
	log.debug('ETTERCAP: DEBUG0: %s' % line)
        if 'USER' in line:
            usernames = self.ruser.findall(line)
            passwords = self.rpass.findall(line)
            infos = self.rinfo.findall(line)
            protos = self.rproto.findall(line)
            hosts = self.rhost.findall(line)
            log.debug('ETTERCAP: DEBUG1: %s, %s, %s,%s' % (usernames,passwords,infos,protos))
            if len(usernames) > 0 and len(passwords) > 0:
                username = usernames[0]
                password = passwords[0]
                host     = hosts[0]
                proto    = protos[0]
                try:
                    info = infos[0]
                except:
                    info = ''
                if proto == 'FTP':
                    info = host
                log.debug('ETTERCAP: sending Account <%s>' % username)
                self.api.account(username, password, info, proto, 'ettercap')

        if 'COMMUNITY' in line:
            communities = self.rcommunity.findall(line)
            hosts = self.rhost.findall(line)
            host = (hosts[0].split(':'))[0]
            port = (hosts[0].split(':'))[1]
            protos = self.rproto.findall(line)
            if len(communities) > 0 and port == '161':
                community = communities[0]
                proto     = protos[0]
                log.debug('ETTERCAP: sending SNMP community: <%s>' % community)
                self.api.account('n/a', community, host, proto, 'ettercap')
Esempio n. 17
0
 def parse(self, line):
     '''
     Driftnet output line parser. 
     '''
     # This parser is about as simple as they come.  Every line is simply a
     # filename of the image that driftnet carved out.  All we need to do is
     # open it up, send the data to the API, then remove the file.
     filename = line.strip('\r\n ')
     log.debug('DRIFTNET: sending image %s' % filename)
     self.api.image(filename)
     try:
         os.remove(filename)
     except:
         log.warn('DRIFTNET: could not remove %s' % filename)
Esempio n. 18
0
 def parse(self, line):
     '''Ettercap line output parser.'''
     if 'USER' in line:
         usernames = self.ruser.findall(line)
         passwords = self.rpass.findall(line)
         infos = self.rinfo.findall(line)
         protos = self.rproto.findall(line)
         if len(usernames) > 0 and len(passwords) > 0:
             username = usernames[0]
             password = passwords[0]
             info = infos[0]
             proto = protos[0]
             log.debug('ETTERCAP: sending Account <%s>' % username)
             self.api.account(username, password, info, proto, 'ettercap')
Esempio n. 19
0
    def _reanimate_and_stretch_if_needed(self):
        if self.conns.dead:
            self._reanimate()
            return

        if self.conns.total == self.max_size:
            return  # max size reached
        if self.conns.free:
            return  # no point to stretch if there are free connections
        if self.conns.pending:
            if len(self.conns.pending) >= len(self.conns.waiting_queue):
                return  # there are enough outstanding connection requests

        log.debug("Stretching pool")
        self._new_connection()
Esempio n. 20
0
def cacheService(fileName, url, params):
    '''Before call service, get data from cache first, if not data from cache, 
    then call service and cache it.
    Args: 
        fileName: cache file name.
        url: service api url.
        params: request params. Dict
    Returns:
        Response content: Json String
        if not any data from cache and service, then return ''.
    '''
    respContent = cache.getWithParams(fileName, params)
    if respContent != '' and config.enable_cache:
        log.debug('Return cache data, from :' +
                  cache.spliceFileName(fileName, params))
        return respContent
    respContent = callService(url, params)
    if respContent != '':
        cache.setWithParams(fileName, params, respContent)
    return respContent
Esempio n. 21
0
    def _reanimate(self):
        assert self.conns.dead, "BUG: don't call reanimate when there is no one to reanimate"

        future = Future()

        if self.ioloop.time() - self._last_connect_time < self.reconnect_interval:
            log.debug("Not reconnecting - too soon")
            future.set_result(None)
            return future

        pending = [len(self.conns.dead)-1]

        def on_connect(fut):
            if pending[0]:
                pending[0] -= 1
                return
            future.set_result(None)

        while self.conns.dead:
            conn = self.conns.dead.pop()
            self.ioloop.add_future(self._connect_one(conn), on_connect)

        return future
Esempio n. 22
0
def callService(url, params):
    '''Main function to call cninfo services.
    Args:
        url: api url. String
        params: request params. Dict
    Returns:
        if service return code is not 200, then return ''.
        if call service success, then return response Json string.
    '''
    params['access_token'] = getAccessToken()
    paramsbyte = bytes(urllib.parse.urlencode(params), 'utf8')
    response = urllib.request.urlopen(config.base_url + url, paramsbyte)
    respContentStr = response.read()
    respContent = json.loads(respContentStr)
    respCode = respContent['resultcode']
    if respCode == 401 or respCode == 404 or respCode == 405:
        log.debug('Token invalid. Updating it from service')
        getAccessTokenFromService()
        return callService(url, params)
    elif respCode != 200:
        log.error('Api调用出错:' + respContent['resultmsg'])
        return ''
    else:
        return respContentStr
Esempio n. 23
0
 def acquire(self):
     """Occupy free connection"""
     future = Future()
     if self.free:
         conn = self.free.pop()
         self.busy.add(conn)
         future.set_result(conn)
         log.debug("Acquired free connection %s", conn.fileno)
         return future
     elif self.busy:
         log.debug("No free connections, and some are busy - put in waiting queue")
         self.waiting_queue.appendleft(future)
         return future
     elif self.pending:
         log.debug("No free connections, but some are pending - put in waiting queue")
         self.waiting_queue.appendleft(future)
         return future
     else:
         log.debug("All connections are dead")
         return None
Esempio n. 24
0
    def add_free(self, conn):
        self.pending.discard(conn)
        log.debug("Handling free connection %s", conn.fileno)

        if not self.waiting_queue:
            log.debug("No outstanding requests - adding to free pool")
            conn.last_used_time = time.time()
            self.free.append(conn)
            return

        log.debug("There are outstanding requests - resumed future from waiting queue")
        self.busy.add(conn)
        future = self.waiting_queue.pop()
        future.set_result(conn)
Esempio n. 25
0
    def parse(self, line):
        '''
        Driftnet output line parser. 
        '''
        # This parser is about as simple as they come.  Every line is simply a
        # filename of the image that driftnet carved out.  All we need to do is
        # open it up, send the data to the API, then remove the file.
        filename = line.strip('\r\n ')

        # Check if the picture is "p**n"
        try:
            im = Image.open(filename)
            skinratio = self.checkporn(Image.open(filename)) * 100
            if skinratio > 30:
                log.debug('DRIFTNET: skipping image %s (detected as p**n - skin: %s%%)' % (filename, skinratio))
            else:
                log.debug('DRIFTNET: sending image %s (skin: %s%%)' % (filename, skinratio))
                self.api.image(filename)
        except:
            log.debug('DRIFTNET: skipping image %s (not readable)' % filename)
        try:
            os.remove(filename)
        except:
            log.warn('DRIFTNET: cannot remove %s' % filename)
Esempio n. 26
0
    def run(self, args):
        assert (len(args) == 0), "Unexpected launcher command-line arguments: {!r}".format(args)

        assert os.path.exists(self.flags.java_path), \
            "JVM executable not found: {!r}".format(self.flags.java_path)

        self.extract()

        # Identify which profile to use:
        profiles = self.config["profiles"]

        if self.flags.profile is not None:
            # Profile explicitly specified by user must exist:
            assert (self.flags.profile in profiles), \
                "Invalid profile {!r}, use one of {}." \
                .format(self.flags.profile, sorted(profiles.keys()))
            profile_name = self.flags.profile
        else:
            # No explicit override, default is to use the program name if possible,
            # falling back to the configured default profile if needed.
            profile_name = base.get_program_name()
            if profile_name not in profiles:
                profile_name = self.config["default_profile"]
        profile = profiles[profile_name]

        # Compute the JVM arguments from explicit extension/overrides and from global map:
        jvm_args = list()
        if not self.flags.ignore_profile_jvm_args:
            jvm_args.extend(profile.get("jvm_args", tuple()))
        jvm_args.extend(arg_map[JVM_ARGS])

        # Recover program arguments from global map:
        program_args = arg_map[PROGRAM_ARGS]

        # Compute concrete classpath and set environment CLASSPATH accordingly:
        cp_entries = tuple(self.make_classpath_entries(profile))
        env = dict(os.environ)
        env["CLASSPATH"] = ":".join(cp_entries)

        # Handle --print-X launcher commands:
        should_exit = False  # becomes true if a special command is invoked (eg. --print-foo)
        if self.flags.print_classpath:
            print(":".join(cp_entries))
            should_exit = True
        if self.flags.print_config:
            print(base.json_encode(self.config))
            should_exit = True
        if should_exit:
            return os.EX_OK

        # Determine which Java class to invoke:
        class_name = profile["main_class"]
        if self.flags.class_name is not None:
            class_name = self.flags.class_name

        log.info("Using Java executable: {!r}", self.flags.java_path)
        log.info("Using JVM arguments: {!r}", jvm_args)
        log.info("Using Java main class: {!r}", class_name)
        log.info("Using classpath: {}", base.json_encode(cp_entries))
        log.info("Using Java program arguments: {!r}", program_args)
        log.debug("Using environment: {}", base.json_encode(env))

        args = list()
        args.append(self.flags.java_path)
        args.extend(jvm_args)
        args.append(class_name)
        args.extend(program_args)

        os.execve(self.flags.java_path, args, env)
Esempio n. 27
0
    def run(self, args):
        action = self.flags.do
        if (action is None) and (len(args) > 0):
            action, args = args[0], args[1:]
        if action is None:
            action = "classpath"
        assert (action is not None), ("Must specify an action to perform, eg. 'classpath'.")

        local_repo = self.flags.maven_local
        if local_repo == ":wkspc:":
            wkspc = workspace.find_workspace_root(path=os.getcwd())
            if wkspc is not None:
                local_repo = os.path.join(wkspc, "output", "maven_repository")
                log.info("Using workspace local repo {!r}", local_repo)
            else:
                local_repo = os.path.join(os.environ["HOME"], ".m2", "repository")
                log.info("No workspace found, using global repo {!r}", local_repo)
        assert os.path.exists(local_repo), \
            "Maven local repository not found: {!r}".format(local_repo)

        self._repo = maven_repo.MavenRepository(
            local = local_repo,
            remotes = self.flags.maven_remotes.split(","),
            exclusions = frozenset(self.flags.maven_remote_exclusions.split(",")),
        )

        self._scanner = maven_loader.ArtifactScanner(
            repo=self.repo,
            fetch_content=self.flags.fetch,
            force_unresolved=True,
        )

        targets = list(self.flags.targets.split(",")) + list(args)
        targets = list(filter(None, targets))
        target_artifacts = map(artifact.parse_artifact, targets)

        exclusions = self.flags.exclusions.split(",")
        exclusions = filter(None, exclusions)
        exclusion_artifacts = frozenset(map(artifact.parse_artifact_name, exclusions))

        deps = list()
        for target_artifact in target_artifacts:
            dep = maven_wrapper.Dependency(
                artifact=target_artifact,
                scope=self.flags.scope,
                exclusions=exclusion_artifacts,
            )
            deps.append(dep)

        log.debug("Scanning dependencies: {!r}", deps)
        classpath = self._scanner.scan(deps)

        list_file_path = self.flags.list_file
        if list_file_path is None:
            list_file_path = '/dev/null'
        with open(list_file_path, "wt") as ofile:
            self._scanner.write_dep_list(output=ofile)

        if action == "classpath":
            print(":".join(sorted(classpath)))

        elif action == "resolve":
            for artf, dep_chains in sorted(self._scanner.versions.items()):
                chain = dep_chains[0]
                selected = chain[0]
                print(selected.artifact)

        elif action == "explain":
            self._scanner.explain(artifact.parse_artifact_name(self.flags.explain))

        else:
            raise Error("Invalid action: {!r}".format(action))
Esempio n. 28
0
    def run(self, args):
        action = self.flags.do
        if (action is None) and (len(args) > 0):
            action, args = args[0], args[1:]
        if action is None:
            action = "classpath"
        assert (action is not None), (
            "Must specify an action to perform, eg. 'classpath'.")

        local_repo = self.flags.maven_local
        if local_repo == ":wkspc:":
            wkspc = workspace.find_workspace_root(path=os.getcwd())
            if wkspc is not None:
                local_repo = os.path.join(wkspc, "output", "maven_repository")
                log.info("Using workspace local repo {!r}", local_repo)
            else:
                local_repo = os.path.join(os.environ["HOME"], ".m2",
                                          "repository")
                log.info("No workspace found, using global repo {!r}",
                         local_repo)
        assert os.path.exists(local_repo), \
            "Maven local repository not found: {!r}".format(local_repo)

        self._repo = maven_repo.MavenRepository(
            local=local_repo,
            remotes=self.flags.maven_remotes.split(","),
            exclusions=frozenset(
                self.flags.maven_remote_exclusions.split(",")),
        )

        self._scanner = maven_loader.ArtifactScanner(
            repo=self.repo,
            fetch_content=self.flags.fetch,
            force_unresolved=True,
        )

        targets = list(self.flags.targets.split(",")) + list(args)
        targets = list(filter(None, targets))
        target_artifacts = map(artifact.parse_artifact, targets)

        exclusions = self.flags.exclusions.split(",")
        exclusions = filter(None, exclusions)
        exclusion_artifacts = frozenset(
            map(artifact.parse_artifact_name, exclusions))

        deps = list()
        for target_artifact in target_artifacts:
            dep = maven_wrapper.Dependency(
                artifact=target_artifact,
                scope=self.flags.scope,
                exclusions=exclusion_artifacts,
            )
            deps.append(dep)

        log.debug("Scanning dependencies: {!r}", deps)
        classpath = self._scanner.scan(deps)

        list_file_path = self.flags.list_file
        if list_file_path is None:
            list_file_path = '/dev/null'
        with open(list_file_path, "wt") as ofile:
            self._scanner.write_dep_list(output=ofile)

        if action == "classpath":
            print(":".join(sorted(classpath)))

        elif action == "resolve":
            for artf, dep_chains in sorted(self._scanner.versions.items()):
                chain = dep_chains[0]
                selected = chain[0]
                print(selected.artifact)

        elif action == "explain":
            self._scanner.explain(
                artifact.parse_artifact_name(self.flags.explain))

        else:
            raise Error("Invalid action: {!r}".format(action))