Exemple #1
0
def download_all(verbose=False):
    utils.makedirs(config.GEOIP_PATH)
    opener = build_opener()
    opener.addheaders = [('User-agent', 'IVRE/1.0 +https://ivre.rocks/')]
    for fname, url in viewitems(config.IPDATA_URLS):
        outfile = os.path.join(config.GEOIP_PATH, fname)
        if verbose:
            sys.stdout.write("Downloading %s to %s: " % (url, outfile))
            sys.stdout.flush()
        if url.endswith('.gz'):
            decode = zlib.decompress
        else:
            decode = lambda x: x
        with open(outfile, 'wb') as wdesc:
            udesc = opener.open(url)
            wdesc.write(decode(udesc.read()))
            if verbose:
                sys.stdout.write("done.\n")
    if verbose:
        sys.stdout.write("Unpacking: ")
        sys.stdout.flush()
    for func, args, kargs in PARSERS:
        func(*args, **kargs)
    if verbose:
        sys.stdout.write("done.\n")
Exemple #2
0
def download_all(verbose=False):
    utils.makedirs(config.GEOIP_PATH)
    opener = build_opener()
    opener.addheaders = [('User-agent', 'IVRE/1.0 +https://ivre.rocks/')]
    for fname, url in viewitems(config.IPDATA_URLS):
        outfile = os.path.join(config.GEOIP_PATH, fname)
        if verbose:
            sys.stdout.write("Downloading %s to %s: " % (url, outfile))
            sys.stdout.flush()
        if url.endswith('.gz'):
            decode = zlib.decompress
        else:
            decode = lambda x: x
        with open(outfile, 'wb') as wdesc:
            udesc = opener.open(url)
            wdesc.write(decode(udesc.read()))
            if verbose:
                sys.stdout.write("done.\n")
    if verbose:
        sys.stdout.write("Unpacking: ")
        sys.stdout.flush()
    for func, args, kargs in PARSERS:
        func(*args, **kargs)
    if verbose:
        sys.stdout.write("done.\n")
Exemple #3
0
    def sync(self, agent):
        """This function should only be called from `agent.sync()`
        method. It stores the results of terminated scans according to
        the target status.

        """
        remout = agent.get_local_path('remoteoutput')
        for remfname in glob.glob(
                os.path.join(remout, self.visiblecategory + '.*.xml')):
            locfname = os.path.basename(remfname).split('.', 4)
            locfname[0] = self.category
            status = 'unknown'
            with open(remfname) as remfdesc:
                remfcontent = remfdesc.read()
                if '<status state="up"' in remfcontent:
                    status = 'up'
                elif '<status state="down"' in remfcontent:
                    if not self.storedown:
                        remfdesc.close()
                        os.unlink(remfname)
                        continue
                    status = 'down'
                del remfcontent
            locfname = os.path.join(
                self.outputpath,
                locfname[0],
                status,
                re.sub('[/@:]', '_', agent.name),
                *locfname[1:])
            utils.makedirs(os.path.dirname(locfname))
            os.rename(remfname, locfname)
Exemple #4
0
    def sync(self, agent):
        """This function should only be called from `agent.sync()`
        method. It stores the results of terminated scans according to
        the target status.

        """
        remout = agent.get_local_path('remoteoutput')
        for remfname in glob.glob(
                os.path.join(remout, self.visiblecategory + '.*.xml')):
            locfname = os.path.basename(remfname).split('.', 4)
            locfname[0] = self.category
            status = 'unknown'
            with open(remfname) as remfdesc:
                remfcontent = remfdesc.read()
                if '<status state="up"' in remfcontent:
                    status = 'up'
                elif '<status state="down"' in remfcontent:
                    if not self.storedown:
                        remfdesc.close()
                        os.unlink(remfname)
                        continue
                    status = 'down'
                del remfcontent
            locfname = os.path.join(self.outputpath, locfname[0], status,
                                    re.sub('[/@:]', '_', agent.name),
                                    *locfname[1:])
            utils.makedirs(os.path.dirname(locfname))
            os.rename(remfname, locfname)
Exemple #5
0
 def create_local_dirs(self):
     """Create local directories used to manage the agent"""
     for dirname in [
             'input', 'remoteinput', 'remotecur', 'remoteoutput',
             'remotedata'
     ]:
         utils.makedirs(self.get_local_path(dirname))
Exemple #6
0
 def sync(self, agent):
     remout = agent.get_local_path('remoteoutput')
     for remfname in glob.glob(
             os.path.join(remout, self.visiblecategory + '.*.xml')):
         locfname = os.path.basename(remfname).split('.', 4)
         locfname[0] = self.category
         status = 'unknown'
         with open(remfname) as remfdesc:
             remfcontent = remfdesc.read()
             if '<status state="up"' in remfcontent:
                 status = 'up'
             elif '<status state="down"' in remfcontent:
                 if not self.storedown:
                     remfdesc.close()
                     os.unlink(remfname)
                     continue
                 status = 'down'
             del remfcontent
         locfname = os.path.join(
             self.outputpath,
             locfname[0],
             status,
             re.sub('[/@:]', '_', agent.name),
             *locfname[1:])
         utils.makedirs(os.path.dirname(locfname))
         os.rename(remfname, locfname)
Exemple #7
0
    def add_agent(self,
                  masterid,
                  host,
                  remotepath,
                  rsync=None,
                  source=None,
                  maxwaiting=60):
        """Prepares an agent and adds it to the DB using
        `self._add_agent()`

        """
        if rsync is None:
            rsync = ["rsync"]
        if not remotepath.endswith('/'):
            remotepath += '/'
        if source is None:
            source = (remotepath if host is None else "%s:%s" %
                      (host, remotepath))
        master = self.get_master(masterid)
        localpath = tempfile.mkdtemp(prefix="", dir=master['path'])
        for dirname in ["input"] + [
                os.path.join("remote", dname)
                for dname in "input", "cur", "output"
        ]:
            utils.makedirs(os.path.join(localpath, dirname))
Exemple #8
0
    def add_agent(self, masterid, host, remotepath,
                  rsync=None, source=None, maxwaiting=60):
        """Prepares an agent and adds it to the DB using
        `self._add_agent()`

        """
        if rsync is None:
            rsync = ["rsync"]
        if not remotepath.endswith('/'):
            remotepath += '/'
        if source is None:
            source = (remotepath if host is None
                      else "%s:%s" % (host, remotepath))
        master = self.get_master(masterid)
        localpath = tempfile.mkdtemp(prefix="", dir=master['path'])
        for dirname in ["input"] + [os.path.join("remote", dname)
                                    for dname in "input", "cur",
                                    "output"]:
            utils.makedirs(os.path.join(localpath, dirname))
        agent = {
            "host": host,
            "path": {
                "remote": remotepath,
                "local": localpath,
            },
            "source": source,
            "rsync": rsync,
            "maxwaiting": maxwaiting,
            "scan": None,
            "sync": True,
            "master": masterid,
        }
        return self._add_agent(agent)
Exemple #9
0
def worker(progname, directory, sensor=None):
    """This function is the main loop, creating the processes when
    needed and feeding them with the data from the files.

    """
    utils.makedirs(os.path.join(directory, "current"))
    procs = {}
    while not WANTDOWN:
        # We get the next file to handle
        fname = getnextfiles(directory, sensor=sensor, count=1)
        # ... if we don't, we sleep for a while
        if not fname:
            utils.LOGGER.debug("Sleeping for %d s", SLEEPTIME)
            time.sleep(SLEEPTIME)
            continue
        fname = fname[0]
        fname_sensor = fname.groupdict()['sensor']
        if fname_sensor in procs:
            proc = procs[fname_sensor]
        else:
            proc = create_process(progname, fname_sensor)
            procs[fname_sensor] = proc
        fname = fname.group()
        # Our "lock system": if we can move the file, it's ours
        try:
            shutil.move(os.path.join(directory, fname),
                        os.path.join(directory, "current"))
        except shutil.Error:
            continue
        if config.DEBUG:
            utils.LOGGER.debug("Handling %s", fname)
        fname = os.path.join(directory, "current", fname)
        fdesc = utils.open_file(fname)
        handled_ok = True
        for line in fdesc:
            try:
                proc.stdin.write(line)
            except ValueError:
                utils.LOGGER.warning(
                    "Error while handling line %r. "
                    "Trying again", line)
                proc = create_process(progname, fname_sensor)
                procs[fname_sensor] = proc
                # Second (and last) try
                try:
                    proc.stdin.write(line)
                    utils.LOGGER.warning("  ... OK")
                except ValueError:
                    handled_ok = False
                    utils.LOGGER.warning("  ... KO")
        fdesc.close()
        if handled_ok:
            os.unlink(fname)
            utils.LOGGER.debug('  ... OK')
        else:
            utils.LOGGER.debug('  ... KO')
    # SHUTDOWN
    for sensor in procs:
        procs[sensor].stdin.close()
        procs[sensor].wait()
def worker(progname, directory, sensor=None):
    """This function is the main loop, creating the processes when
    needed and feeding them with the data from the files.

    """
    utils.makedirs(os.path.join(directory, "current"))
    procs = {}
    while not WANTDOWN:
        # We get the next file to handle
        fname = getnextfiles(directory, sensor=sensor, count=1)
        # ... if we don't, we sleep for a while
        if not fname:
            utils.LOGGER.debug("Sleeping for %d s", SLEEPTIME)
            time.sleep(SLEEPTIME)
            continue
        fname = fname[0]
        fname_sensor = fname.groupdict()['sensor']
        if fname_sensor in procs:
            proc = procs[fname_sensor]
        else:
            proc = create_process(progname, fname_sensor)
            procs[fname_sensor] = proc
        fname = fname.group()
        # Our "lock system": if we can move the file, it's ours
        try:
            shutil.move(os.path.join(directory, fname),
                        os.path.join(directory, "current"))
        except shutil.Error:
            continue
        if config.DEBUG:
            utils.LOGGER.debug("Handling %s", fname)
        fname = os.path.join(directory, "current", fname)
        fdesc = utils.open_file(fname)
        handled_ok = True
        for line in fdesc:
            try:
                proc.stdin.write(line)
            except ValueError:
                utils.LOGGER.warning("Error while handling line %r. "
                                     "Trying again", line)
                proc = create_process(progname, fname_sensor)
                procs[fname_sensor] = proc
                # Second (and last) try
                try:
                    proc.stdin.write(line)
                    utils.LOGGER.warning("  ... OK")
                except ValueError:
                    handled_ok = False
                    utils.LOGGER.warning("  ... KO")
        fdesc.close()
        if handled_ok:
            os.unlink(fname)
            utils.LOGGER.debug('  ... OK')
        else:
            utils.LOGGER.debug('  ... KO')
    # SHUTDOWN
    for sensor in procs:
        procs[sensor].stdin.close()
        procs[sensor].wait()
Exemple #11
0
 def sync(self, agentid):
     agent = self.get_agent(agentid)
     master = self.get_master(agent['master'])
     subprocess.call(agent['rsync'] + [
         '-a',
         self.get_local_path(agent, 'input'),
         self.get_local_path(agent, os.path.join('remote', 'input'))
     ])
     subprocess.call(agent['rsync'] + [
         '-a', '--remove-source-files',
         self.get_local_path(agent, 'input'),
         self.get_remote_path(agent, 'input')
     ])
     for dname in ['input', 'cur']:
         subprocess.call(agent['rsync'] + [
             '-a', '--delete',
             self.get_remote_path(agent, dname),
             self.get_local_path(agent, os.path.join('remote', dname))
         ])
     subprocess.call(agent['rsync'] + [
         '-a', '--remove-source-files',
         self.get_remote_path(agent, 'output'),
         self.get_local_path(agent, os.path.join('remote', 'output'))
     ])
     outpath = self.get_local_path(agent, os.path.join('remote', 'output'))
     for fname in os.listdir(outpath):
         scanid = fname.split('-', 1)[0]
         scan = self.get_scan(self.str2id(scanid))
         storedir = os.path.join(
             master["path"],
             "output",
             scanid,
             str(agentid),
         )
         utils.makedirs(storedir)
         with tempfile.NamedTemporaryFile(prefix="",
                                          suffix=".xml",
                                          dir=storedir,
                                          delete=False) as fdesc:
             pass
         shutil.move(
             os.path.join(outpath, fname),
             fdesc.name
         )
         self.globaldb.nmap.store_scan(
             fdesc.name,
             categories=scan['target'].target.infos['categories'],
             source=agent['source'],
         )
         # TODO gettoarchive parameter
         self.incr_scan_results(self.str2id(scanid))
Exemple #12
0
 def sync(self, agentid):
     agent = self.get_agent(agentid)
     master = self.get_master(agent['master'])
     subprocess.call(agent['rsync'] + [
         '-a',
         self.get_local_path(agent, 'input'),
         self.get_local_path(agent, os.path.join('remote', 'input'))
     ])
     subprocess.call(agent['rsync'] + [
         '-a', '--remove-source-files',
         self.get_local_path(agent, 'input'),
         self.get_remote_path(agent, 'input')
     ])
     for dname in ['input', 'cur']:
         subprocess.call(agent['rsync'] + [
             '-a', '--delete',
             self.get_remote_path(agent, dname),
             self.get_local_path(agent, os.path.join('remote', dname))
         ])
     subprocess.call(agent['rsync'] + [
         '-a', '--remove-source-files',
         self.get_remote_path(agent, 'output'),
         self.get_local_path(agent, os.path.join('remote', 'output'))
     ])
     outpath = self.get_local_path(agent, os.path.join('remote', 'output'))
     for fname in os.listdir(outpath):
         scanid = fname.split('-', 1)[0]
         scan = self.get_scan(self.str2id(scanid))
         storedir = os.path.join(
             master["path"],
             "output",
             scanid,
             str(agentid),
         )
         utils.makedirs(storedir)
         with tempfile.NamedTemporaryFile(prefix="",
                                          suffix=".xml",
                                          dir=storedir,
                                          delete=False) as fdesc:
             pass
         shutil.move(
             os.path.join(outpath, fname),
             fdesc.name
         )
         self.globaldb.nmap.store_scan(
             fdesc.name,
             categories=scan['target'].target.infos['categories'],
             source=agent['source'],
         )
         # TODO gettoarchive parameter
         self.incr_scan_results(self.str2id(scanid))
Exemple #13
0
    def sync(self, agent):
        """This function should only be called from `agent.sync()`
        method. It stores the results of terminated scans according to
        the target status.

        """
        for remfname in glob.glob(
            os.path.join(
                agent.get_local_path("remoteoutput"), self.visiblecategory + ".*.xml*"
            )
        ):
            locfname = os.path.basename(remfname).split(".", 4)
            locfname[0] = self.category
            status = "unknown"
            with utils.open_file(remfname) as remfdesc:
                remfcontent = remfdesc.read()
                if b'<status state="up"' in remfcontent:
                    status = "up"
                elif b'<status state="down"' in remfcontent:
                    if not self.storedown:
                        remfdesc.close()
                        os.unlink(remfname)
                        continue
                    status = "down"
                del remfcontent
            locfname = os.path.join(
                self.outputpath,
                locfname[0],
                status,
                re.sub("[/@:]", "_", agent.name),
                *locfname[1:],
            )
            utils.makedirs(os.path.dirname(locfname))
            os.rename(remfname, locfname)
        for remfname in glob.glob(
            os.path.join(
                agent.get_local_path("remotedata"), self.visiblecategory + ".*.tar*"
            )
        ):
            locfname = os.path.basename(remfname).split(".", 4)
            locfname[0] = self.category
            locfname = os.path.join(
                self.outputpath,
                locfname[0],
                "data",
                re.sub("[/@:]", "_", agent.name),
                *locfname[1:],
            )
            utils.makedirs(os.path.dirname(locfname))
            os.rename(remfname, locfname)
Exemple #14
0
def download_all(verbose: bool = False) -> None:
    assert config.GEOIP_PATH is not None
    utils.makedirs(config.GEOIP_PATH)
    opener = build_opener()
    opener.addheaders = [("User-agent",
                          "IVRE/%s +https://ivre.rocks/" % VERSION)]
    for fname, url in config.IPDATA_URLS.items():
        if url is None:
            if not fname.startswith("GeoLite2-"):
                continue
            if fname.startswith("GeoLite2-dumps."):
                continue
            basename, ext = fname.split(".", 1)
            url = ("https://download.maxmind.com/app/geoip_download?"
                   "edition_id=%s&suffix=%s&license_key=%s" % (
                       basename,
                       ext,
                       config.MAXMIND_LICENSE_KEY,
                   ))
        outfile = os.path.join(config.GEOIP_PATH, fname)
        if verbose:
            sys.stdout.write("Downloading %s to %s: " % (url, outfile))
            sys.stdout.flush()
        with open(outfile, "wb") as wdesc:
            udesc = opener.open(url)
            wdesc.write(udesc.read())
            if verbose:
                sys.stdout.write("done.\n")
    if verbose:
        sys.stdout.write("Unpacking: ")
        sys.stdout.flush()
    for func, args, kargs in PARSERS:
        try:
            func(*args, **kargs)
        except Exception:
            utils.LOGGER.warning(
                "A parser failed: %s(%s, %s)",
                func.__name__,
                ", ".join(args),
                ", ".join("%s=%r" % k_v for k_v in kargs.items()),
                exc_info=True,
            )
    if verbose:
        sys.stdout.write("done.\n")
Exemple #15
0
 def sync(self, agentid):
     agent = self.get_agent(agentid)
     master = self.get_master(agent["master"])
     subprocess.call(
         agent["rsync"]
         + ["-a", self.get_local_path(agent, "input"), self.get_local_path(agent, os.path.join("remote", "input"))]
     )
     subprocess.call(
         agent["rsync"]
         + ["-a", "--remove-source-files", self.get_local_path(agent, "input"), self.get_remote_path(agent, "input")]
     )
     for dname in ["input", "cur"]:
         subprocess.call(
             agent["rsync"]
             + [
                 "-a",
                 "--delete",
                 self.get_remote_path(agent, dname),
                 self.get_local_path(agent, os.path.join("remote", dname)),
             ]
         )
     subprocess.call(
         agent["rsync"]
         + [
             "-a",
             "--remove-source-files",
             self.get_remote_path(agent, "output"),
             self.get_local_path(agent, os.path.join("remote", "output")),
         ]
     )
     outpath = self.get_local_path(agent, os.path.join("remote", "output"))
     for fname in os.listdir(outpath):
         scanid = fname.split("-", 1)[0]
         scan = self.get_scan(self.str2id(scanid))
         storedir = os.path.join(master["path"], "output", scanid, str(agentid))
         utils.makedirs(storedir)
         with tempfile.NamedTemporaryFile(prefix="", suffix=".xml", dir=storedir, delete=False) as fdesc:
             pass
         shutil.move(os.path.join(outpath, fname), fdesc.name)
         self.globaldb.nmap.store_scan(
             fdesc.name, categories=scan["target"].target.infos["categories"], source=agent["source"]
         )
         # TODO gettoarchive parameter
         self.incr_scan_results(self.str2id(scanid))
Exemple #16
0
def download_all(verbose=False):
    utils.makedirs(config.GEOIP_PATH)
    opener = build_opener()
    opener.addheaders = [('User-agent',
                          'IVRE/%s +https://ivre.rocks/' % VERSION)]
    for fname, url in viewitems(config.IPDATA_URLS):
        if url is None:
            if not fname.startswith('GeoLite2-'):
                continue
            if fname.startswith('GeoLite2-dumps.'):
                continue
            basename, ext = fname.split('.', 1)
            url = ('https://download.maxmind.com/app/geoip_download?'
                   'edition_id=%s&suffix=%s&license_key=%s' % (
                       basename,
                       ext,
                       config.MAXMIND_LICENSE_KEY,
                   ))
        outfile = os.path.join(config.GEOIP_PATH, fname)
        if verbose:
            sys.stdout.write("Downloading %s to %s: " % (url, outfile))
            sys.stdout.flush()
        with open(outfile, 'wb') as wdesc:
            udesc = opener.open(url)
            wdesc.write(udesc.read())
            if verbose:
                sys.stdout.write("done.\n")
    if verbose:
        sys.stdout.write("Unpacking: ")
        sys.stdout.flush()
    for func, args, kargs in PARSERS:
        try:
            func(*args, **kargs)
        except Exception:
            utils.LOGGER.warning(
                "A parser failed: %s(%s, %s)",
                func.__name__,
                ', '.join(args),
                ', '.join('%s=%r' % k_v for k_v in viewitems(kargs)),
                exc_info=True,
            )
    if verbose:
        sys.stdout.write("done.\n")
Exemple #17
0
def download_all(verbose=False):
    utils.makedirs(config.GEOIP_PATH)
    for fname, url in URLS.iteritems():
        outfile = os.path.join(config.GEOIP_PATH, fname)
        if verbose:
            sys.stdout.write("Downloading %s to %s: " % (url, outfile))
            sys.stdout.flush()
        if url.endswith('.gz'):
            decode = zlib.decompress
        else:
            decode = lambda x: x
        with open(outfile, 'w') as wdesc:
            udesc = urllib.urlopen(url)
            wdesc.write(decode(udesc.read()))
            if verbose:
                sys.stdout.write("done.\n")
    if verbose:
        sys.stdout.write("Unpacking: ")
        sys.stdout.flush()
    for func, args, kargs in PARSERS:
        func(*args, **kargs)
    if verbose:
        sys.stdout.write("done.\n")
Exemple #18
0
def download_all(verbose=False):
    utils.makedirs(config.GEOIP_PATH)
    for fname, url in URLS.iteritems():
        outfile = os.path.join(config.GEOIP_PATH, fname)
        if verbose:
            sys.stdout.write("Downloading %s to %s: " % (url, outfile))
            sys.stdout.flush()
        if url.endswith('.gz'):
            decode = zlib.decompress
        else:
            decode = lambda x: x
        with open(outfile, 'w') as wdesc:
            udesc = urllib.urlopen(url)
            wdesc.write(decode(udesc.read()))
            if verbose:
                sys.stdout.write("done.\n")
    if verbose:
        sys.stdout.write("Unpacking: ")
        sys.stdout.flush()
    for func, args, kargs in PARSERS:
        func(*args, **kargs)
    if verbose:
        sys.stdout.write("done.\n")
Exemple #19
0
def download_all(verbose: bool = False) -> None:
    assert config.GEOIP_PATH is not None
    utils.makedirs(config.GEOIP_PATH)
    opener = build_opener()
    new_files = set()
    for fname, url in config.IPDATA_URLS.items():
        opener.addheaders = [("User-Agent",
                              "IVRE/%s +https://ivre.rocks/" % VERSION)]
        if url is None:
            if not fname.startswith("GeoLite2-"):
                continue
            if fname.startswith("GeoLite2-dumps."):
                continue
            basename, ext = fname.split(".", 1)
            url = ("https://download.maxmind.com/app/geoip_download?"
                   "edition_id=%s&suffix=%s&license_key=%s" % (
                       basename,
                       ext,
                       config.MAXMIND_LICENSE_KEY,
                   ))
        outfile = os.path.join(config.GEOIP_PATH, fname)
        try:
            outstat = os.stat(outfile)
        except FileNotFoundError:
            pass
        else:
            opener.addheaders.append((
                "If-Modified-Since",
                time.strftime("%a, %d %b %Y %H:%M:%S GMT",
                              time.gmtime(outstat.st_mtime)),
            ))
        if verbose:
            sys.stdout.write("Downloading %s to %s: " % (url, outfile))
            sys.stdout.flush()
        try:
            with opener.open(url) as udesc, open(outfile, "wb") as wdesc:
                copyfileobj(udesc, wdesc)
        except HTTPError as exc:
            if exc.status == 304:
                if verbose:
                    sys.stdout.write("already downloaded.\n")
                continue
            raise
        new_files.add(fname)
        if verbose:
            sys.stdout.write("done.\n")
    if verbose:
        sys.stdout.write("Unpacking: ")
        sys.stdout.flush()
    for fname in new_files:
        for func, args, kargs in PARSERS.get(fname, []):
            print("%r(*%r, **%r)" % (func, args, kargs))
            try:
                func(*args, **kargs)
            except Exception:
                utils.LOGGER.warning(
                    "A parser failed: %s(%s, %s)",
                    func.__name__,
                    ", ".join(args),
                    ", ".join("%s=%r" % k_v for k_v in kargs.items()),
                    exc_info=True,
                )
    if verbose:
        sys.stdout.write("done.\n")
Exemple #20
0
def worker(progname, directory, sensor=None):
    """This function is the main loop, creating the processes when
    needed and feeding them with the data from the files.

    """
    utils.makedirs(os.path.join(directory, "current"))
    procs = {}
    while not WANTDOWN:
        # We get the next file to handle
        fname = getnextfiles(directory, sensor=sensor, count=1)
        # ... if we don't, we sleep for a while
        if not fname:
            if config.DEBUG:
                print "Sleeping for %d s" % SLEEPTIME,
                sys.stdout.flush()
            time.sleep(SLEEPTIME)
            if config.DEBUG:
                print "DONE"
            continue
        fname = fname[0]
        fname_sensor = fname.groupdict()['sensor']
        if fname_sensor in procs:
            proc = procs[fname_sensor]
        else:
            proc = create_process(progname, fname_sensor)
            procs[fname_sensor] = proc
        fname = fname.group()
        # Our "lock system": if we can move the file, it's ours
        try:
            shutil.move(os.path.join(directory, fname),
                        os.path.join(directory, "current"))
        except shutil.Error:
            continue
        if config.DEBUG:
            print "Handling %s" % fname,
            sys.stdout.flush()
        fname = os.path.join(directory, "current", fname)
        if fname.endswith('.gz'):
            fdesc = gzip.open(fname)
        else:
            fdesc = open(fname)
        handled_ok = True
        for line in fdesc:
            try:
                proc.stdin.write(line)
            except ValueError:
                proc = create_process(progname, fname_sensor)
                procs[fname_sensor] = proc
                # Second (and last) try
                try:
                    proc.stdin.write(line)
                except ValueError:
                    handled_ok = False
        fdesc.close()
        if handled_ok:
            os.unlink(fname)
        if config.DEBUG:
            if handled_ok:
                print "OK"
            else:
                print "KO!"
    # SHUTDOWN
    for sensor in procs:
        procs[sensor].stdin.close()
        procs[sensor].wait()
Exemple #21
0
 def create_local_dirs(self):
     for dirname in ['input', 'remoteinput', 'remotecur', 'remoteoutput']:
         utils.makedirs(self.get_local_path(dirname))
Exemple #22
0
def worker(progname, directory, sensor=None):
    """This function is the main loop, creating the processes when
    needed and feeding them with the data from the files.

    """
    utils.makedirs(os.path.join(directory, "current"))
    procs = {}
    while not WANTDOWN:
        # We get the next file to handle
        fname = getnextfiles(directory, sensor=sensor, count=1)
        # ... if we don't, we sleep for a while
        if not fname:
            if config.DEBUG:
                print "Sleeping for %d s" % SLEEPTIME,
                sys.stdout.flush()
            time.sleep(SLEEPTIME)
            if config.DEBUG:
                print "DONE"
            continue
        fname = fname[0]
        fname_sensor = fname.groupdict()['sensor']
        if fname_sensor in procs:
            proc = procs[fname_sensor]
        else:
            proc = create_process(progname, fname_sensor)
            procs[fname_sensor] = proc
        fname = fname.group()
        # Our "lock system": if we can move the file, it's ours
        try:
            shutil.move(os.path.join(directory, fname),
                        os.path.join(directory, "current"))
        except shutil.Error:
            continue
        if config.DEBUG:
            print "Handling %s" % fname,
            sys.stdout.flush()
        fname = os.path.join(directory, "current", fname)
        if fname.endswith('.gz'):
            fdesc = gzip.open(fname)
        else:
            fdesc = open(fname)
        handled_ok = True
        for line in fdesc:
            try:
                proc.stdin.write(line)
            except ValueError:
                proc = create_process(progname, fname_sensor)
                procs[fname_sensor] = proc
                # Second (and last) try
                try:
                    proc.stdin.write(line)
                except ValueError:
                    handled_ok = False
        fdesc.close()
        if handled_ok:
            os.unlink(fname)
        if config.DEBUG:
            if handled_ok:
                print "OK"
            else:
                print "KO!"
    # SHUTDOWN
    for sensor in procs:
        procs[sensor].stdin.close()
        procs[sensor].wait()
Exemple #23
0
 def create_local_dirs(self):
     """Create local directories used to manage the agent"""
     for dirname in ['input', 'remoteinput', 'remotecur', 'remoteoutput']:
         utils.makedirs(self.get_local_path(dirname))