Пример #1
0
def create_zipped_application_packages(basedir):

    basedir = basedir.replace('$LOGINSPECT_HOME', homing.LOGINSPECT_HOME)
    if os.path.exists(basedir):
        shutil.rmtree(basedir)

    disk.prepare_path(basedir)

    applications = []
    apps_path = homing.home_join('storage/col/fileinspect_applications/')
    for path in os.listdir(apps_path):
        if os.path.isdir(os.path.join(apps_path, path)):
            applications.append(path)

    for dirname, subdirs, files in os.walk(apps_path):
        for f in files:
            if f.endswith(".pyc"):
                os.unlink(os.path.join(dirname, f))

    for app in applications:
        outfilename = os.path.join(basedir, '%s.fi' % app)
        try:
            zf = zipfile.PyZipFile(outfilename, mode='w')
            zf.writepy(os.path.join(apps_path, app))
        finally:
            zf.close()
    return
Пример #2
0
    def __init__(self, conn, server, config, db_file):
        ftpserver.FTPHandler.__init__(self, conn, server)

        self.config = config = textual.utf8(config)
        self.ip = inet.get_ip(conn.getpeername())
        self.db_file = db_file

        self.config_ip = config_reader.get_config_ip(self.ip, config)
        if not self.config_ip:
            conn.send(
                'Please add your device %s to ftp_collector in LogInspect to send logs.\n'
                % self.ip)
            self.close()
            return

        self.profiles = config['client_map'][self.config_ip]

        # TODO use hashed password in config file
        self.authorizer = ftpserver.DummyAuthorizer()

        for user, profile in self.profiles.iteritems():
            password = profile['password']
            permission = profile['permission']

            basedir = config['basedir'].replace('$LOGINSPECT_HOME',
                                                homing.LOGINSPECT_HOME)
            home = profile['home'].lstrip('/')  # let home not be absolute path

            user_home = os.path.join(basedir, home)
            disk.prepare_path(user_home + '/')

            self.authorizer.add_user(user, password, user_home, permission)
Пример #3
0
            def __init__(self, conn, server, config, db_file, parser_name_only):
                ftpserver.FTPHandler.__init__(self, conn, server)

                self.config = config = textual.utf8(config)
                self.ip = inet.get_ip(conn.getpeername())
                self.db_file = db_file
                self.parser_name_only = parser_name_only
                self.config_ip = config_reader.get_config_ip(self.ip, config)
                if not self.config_ip:
                    conn.send("Please add your device %s to ftp_collector in LogInspect to send logs.\n" % self.ip)
                    self.close()
                    return

                self.profiles = config["client_map"][self.config_ip]

                self.authorizer = ftpserver.DummyAuthorizer()

                for user, profile in self.profiles.iteritems():
                    password = outself.get_decrypted_password(profile["password"])

                    permission = profile["permission"]

                    basedir = config["basedir"].replace("$LOGINSPECT_HOME", homing.LOGINSPECT_HOME)
                    home = profile["home"].lstrip("/")  # let home not be absolute path

                    user_home = os.path.join(basedir, home)
                    disk.prepare_path(user_home + "/")

                    self.authorizer.add_user(user, password, user_home, permission)
def main():
    config = _parse_args()
    zmq_context = zmq.Context()

    db_file = homing.home_join('storage/col/filesystem_collector', 'checksums.shelve')
    disk.prepare_path(db_file)

    cursor_shelve = shelve.open(db_file, protocol=2)
    watcher.monitor(config, cursor_shelve, zmq_context)
Пример #5
0
def generate_certificate(ip, object_name, sic_one_timer_password, secret_key):
    #if all the cert files are present, no need to extract the certificates
    ip_path = os.path.join(storage_path, ip)
    if is_certificate_present(ip) == True:
        logging.warn("Cretificates already present.")
        return {'success': True, 'msg': 'Certificate already present'}

    #if not all the necessary files are present, remove all and proceed to generating fresh certificates
    elif not os.path.exists(ip_path):
        pass
    else:
        remove_certificate_if_exists(ip)

    #if the path to ip is not present it is created
    disk.prepare_path(ip_path + '/')

    os.chdir(ip_path)
    logging.warn("Creating Opsec Certificate...")
    sp = subprocess.Popen([os.path.join(certificate_path, 'opsec_pull_cert'), '-h', ip, '-n', object_name, '-p',\
                                sic_one_timer_password], stdout=subprocess.PIPE, stderr=subprocess.PIPE)

    sp.communicate()
    if not os.path.exists(os.path.join(storage_path, ip, 'opsec.p12')):
        return {
            'success':
            False,
            'msg':
            'couldnt retrieve certificate from the server, RESET SIC in the server \
                                          and try again'
        }
    logging.warn("Successfully Created Certificate : %r" % 'opsec.p12')

    logging.warn("Creating SSL Authorization Files...")
    sp = subprocess.Popen([os.path.join(certificate_path, 'opsec_putkey'), '-ssl', '-port', '18184', '-p',\
                                secret_key, ip], stdout=subprocess.PIPE, stderr=subprocess.PIPE)

    sp.communicate()
    if not (os.path.exists(os.path.join(storage_path, ip, 'sslauthkeys.C')) or \
                os.path.exists(os.path.join(storage_path, ip, 'sslsess.C'))):
        return {
            'success':
            False,
            'msg':
            'couldnt retrieve ssl authorization certificates. Did you forget to do a \
                                        fw putkey command for the LI on your server'
        }

    logging.warn("Successfully Created SSL Authorization Files : %r, %r" %
                 ('sslauthkeys.C', 'sslsess.C'))

    logging.warn("Process Complete")
    return {
        'success': True,
        'msg': 'Certificate Sucessfully Created. Happy Opsecing!'
    }
Пример #6
0
def fetch_job(sid, config, db_file):
    log.debug('fetching files for sid:%s', sid)

    source = config['client_map'][sid]
    basedir = config['basedir'].replace('$LOGINSPECT_HOME',
                                        homing.LOGINSPECT_HOME)

    ip = source['ip']
    port = source['port']
    user = source['user']
    path = source['path']
    password = source['password']
    parser = source['parser']
    charset = source['charset']
    device_name = source['device_name']

    localdir = os.path.join(basedir, ip, base64.urlsafe_b64encode(sid))

    ftp = ftpclient.login(ip, port, user, password)

    for remotefile, mtime in ftpclient.fetch_file_mtime(ftp, path):
        disk.prepare_path(localdir + '/')
        vc = shelves.VersionChecker(db_file, sid, remotefile, mtime)
        if vc.is_older_version():
            continue

        localfile = os.path.join(localdir,
                                 base64.urlsafe_b64encode(remotefile))
        log.info('Downloading remote file %r to %r', remotefile, localfile)

        try:
            ftpclient.download(ftp, remotefile, localfile)
        except Exception, err:
            log.warn("fetching failed; remotefile=%s; sid=%s; error=%r",
                     remotefile, sid, err)
            continue

        col_ts = time.time()
        cursor = vc.get_old_cursor(localfile)
        if cursor < 0:
            continue
        file_handler.main(sid,
                          col_ts,
                          parser,
                          localfile,
                          charset,
                          device_name,
                          source['normalizer'],
                          source['repo'],
                          cursor,
                          source.get('regex_pattern'),
                          source.get('regexparser_name'),
                          conf_path=config.get('wiring_conf_path') or None)
Пример #7
0
def get_config_file_path(ip, opsec_sic_name, lea_server_opsec_entity_sic_name):
    #make a new lea.cong file and return its path
    #config_path = homing.home_join('storage/col/opsec_fetcher/%s/' % ip)
    config_path = '%s%s/' % (storage_path, ip)
    lea_conf_path = os.path.join(config_path, 'lea.conf')
    opsecp12_path = os.path.join(config_path, 'opsec.p12')
    disk.prepare_path(config_path)
    config_content = conf_template % ('ssl_opsec', ip, 18184, opsec_sic_name,
                                      opsecp12_path,
                                      lea_server_opsec_entity_sic_name)
    with open(lea_conf_path, 'w') as writer:
        writer.write(re.sub(pattern, '', config_content))
    return lea_conf_path
Пример #8
0
def extract(filepath, db, config, updater):
    repo = config["repo"]
    repoinfo = get_repoinfo(db, repo)
    localdir = repoinfo.get("extracted_dir")
    if localdir:
        logging.warn("file already extracted at %r", localdir)
        return localdir

    logging.warn("extracting %r", filepath)
    updater.update_stat("extracting")
    cwd = os.path.abspath(os.curdir)

    destination = os.path.join(os.path.dirname(filepath), "extracted")
    disk.prepare_path(destination + '/')

    subtype = repoinfo["subtype"]
    if subtype in ["x-gzip"]:
        mode = 'r:gz'
    elif subtype in ["x-bzip2"]:
        mode = 'r:bz2'
    elif subtype in ["x-tar"]:
        mode = 'r:'
    else:
        raise Exception(
            "File %s has mime subtype %s which is not supported. Only 'x-gzip' and 'x-bzip2' supported"
            % (filepath, subtype))

    try:
        tfile = tarfile.open(filepath, mode)
        tfile.extractall(destination)
    except:
        for key in ["downloaded_file", "subtype", "time"]:
            db.datatransport.update({"repo": repo}, {"$unset": {key: 1}})
        db.datatransport.update({"repo": repo}, {
            "$set": {
                "status":
                "Uploaded file is corrupted. Please upload valid file."
            }
        })
        os.unlink(filepath)
        shutil.rmtree(destination)
        raise

    db.datatransport.update({"repo": repo},
                            {"$set": {
                                "extracted_dir": destination
                            }})

    os.chdir(cwd)
    updater.update_stat("extracted")
    return destination
    def __init__(self, ip, object_name, sic_pwd, putkey_pwd):
        self.ip = ip
        self.object_name = object_name
        self.sic_pwd = sic_pwd
        self.putkey_pwd = putkey_pwd

        self.cert_path = os.path.join(STORAGE_PATH, ip) + "/"
        self.opsec_pull_cert = os.path.join(PULLERS_PATH, "opsec_pull_cert")
        self.opsec_putkey = os.path.join(PULLERS_PATH, "opsec_putkey")

        #make new dir for this ip to store its certificates
        disk.prepare_path(self.cert_path)
        #cd to the newly created dir to perform certificate actions
        os.chdir(self.cert_path)
Пример #10
0
    def fetch_job(self):
        log.debug("fetching files for sid:%s", self.sid)

        config = self.fetcher_runner.get_config()
        try:
            source = config["client_map"][self.sid]
        except KeyError:
            log.debug("source for sid=%s has been deleted" % (self.sid))
            return

        basedir = config["basedir"].replace('$LOGINSPECT_HOME',
                                            homing.LOGINSPECT_HOME)

        scp_shelves_file = os.path.join(basedir, "scp.shelves")
        disk.prepare_path(scp_shelves_file)

        pd = pdict.PersistentDict(scp_shelves_file)
        if pd.get(self.sid):
            first_fetch = False
        else:
            first_fetch = True
            pd[self.sid] = True
            pd.sync()

        db_file = os.path.join(basedir, "checksums.pdict")

        remotepath = self.remotepath
        if remotepath.startswith('~'):
            remotepath = '.' + remotepath[1:]

        if '%' in self.device_ip:
            old_empty_dir = os.path.join(basedir, self.device_ip)
            if os.path.exists(old_empty_dir):
                try:
                    shutil.rmtree(old_empty_dir)
                except:
                    pass
            ip_dir = self.device_ip.replace("%", "_")
        else:
            ip_dir = self.device_ip

        localdir = os.path.join(basedir, ip_dir,
                                base64.urlsafe_b64encode(self.sid))

        try:
            password = self.get_decrypted_password(self.password)
            scp.setup(self.device_ip, self.port, self.user, password)
        except (ssh.SSHException, EOFError, SystemExit), err:
            log.warn("error while setting up connection; sid=%s", self.sid)
            return
Пример #11
0
def _get_conf_file_path(ip, opsec_sic_name, lea_server_opsec_entity_sic_name):
    #make a new lea.cong file and return its path
    config_path = homing.home_join('storage/col/opsec_fetcher/', ip,
                                   'lea.conf')
    disk.prepare_path(config_path)
    with open(config_path, 'w') as conf_file:
        conf_file.write("lea_server auth_type ssl_opsec\n")
        conf_file.write("lea_server ip %s\n" % ip)
        conf_file.write("lea_server auth_port 18184\n")
        conf_file.write("opsec_sic_name %s\n" % opsec_sic_name)
        conf_file.write("opsec_sslca_file %s\n" % os.path.abspath("lea.conf"))
        conf_file.write("lea_server_opsec_entity_sic_name %s" %
                        lea_server_opsec_entity_sic_name)

    return config_path
Пример #12
0
    def initialize_memory(self, path_name):
        """
        Call this function if memory feature for a fetcher
        is required.

        storage_path = storage/col/<path_name>
        sid_path = storage/col/path_name/<device_ip>
        mem_path = storage/col/path_name/<device_ip>/memory.mem

        These path will be created when memory initialization is done
        """
        #Make path to the storage dir
        self.storage_path = os.path.join(COL_STORAGE_PATH, path_name) + "/"
        #Ensure path creation
        disk.prepare_path(self.storage_path)
        #Make path for this sid
        self.sid_path = os.path.join(self.storage_path, self.device_ip)
        #Make a class variable to use with the pd
        self.mem_path = os.path.join(self.sid_path, MEMORY_FILE)
        #Create memory.mem file at self.mem_path
        disk.prepare_path(self.mem_path)
Пример #13
0
def listen(db, config, updater):
    repo = config["repo"]
    localfile = get_downloaded_file(db, repo)
    if localfile:
        logging.warn("file already downloaded at %r", localfile)
        return localfile

    channel = config["upload_channel"]
    username = channel["username"]
    password = channel["password"]
    home = channel["home"].replace("$LOGINSPECT_HOME", homing.LOGINSPECT_HOME).replace("$repo", repo)
    disk.prepare_path(home + '/')
    address = ('0.0.0.0', channel["port"])

    authorizer = ftpserver.DummyAuthorizer()
    authorizer.add_user(username, password, home, "elradfmwM")
    FTPHandler.authorizer = authorizer

    ftpd = ftpserver.FTPServer(address, lambda conn, server: FTPHandler(conn, server, db, repo))
    logging.warn("ftp server starting at %r", address)
    ftpd.serve_forever()
    return DOWNLOADED_FILE
Пример #14
0
class SCPFetcher(Fetcher):
    def __init__(self, **args):
        super(SCPFetcher, self).__init__(**args)

    def fetch_job(self):
        log.debug("fetching files for sid:%s", self.sid)

        config = self.fetcher_runner.get_config()
        try:
            source = config["client_map"][self.sid]
        except KeyError:
            log.debug("source for sid=%s has been deleted" % (self.sid))
            return

        basedir = config["basedir"].replace('$LOGINSPECT_HOME',
                                            homing.LOGINSPECT_HOME)

        scp_shelves_file = os.path.join(basedir, "scp.shelves")
        disk.prepare_path(scp_shelves_file)

        pd = pdict.PersistentDict(scp_shelves_file)
        if pd.get(self.sid):
            first_fetch = False
        else:
            first_fetch = True
            pd[self.sid] = True
            pd.sync()

        db_file = os.path.join(basedir, "checksums.pdict")

        remotepath = self.remotepath
        if remotepath.startswith('~'):
            remotepath = '.' + remotepath[1:]

        if '%' in self.device_ip:
            old_empty_dir = os.path.join(basedir, self.device_ip)
            if os.path.exists(old_empty_dir):
                try:
                    shutil.rmtree(old_empty_dir)
                except:
                    pass
            ip_dir = self.device_ip.replace("%", "_")
        else:
            ip_dir = self.device_ip

        localdir = os.path.join(basedir, ip_dir,
                                base64.urlsafe_b64encode(self.sid))

        try:
            password = self.get_decrypted_password(self.password)
            scp.setup(self.device_ip, self.port, self.user, password)
        except (ssh.SSHException, EOFError, SystemExit), err:
            log.warn("error while setting up connection; sid=%s", self.sid)
            return

        try:
            for remotefile, mtime in scp.fetch_file_mtime(
                    remotepath, self.name_pattern):
                disk.prepare_path(localdir + '/')

                if first_fetch:
                    vc = shelves.VersionChecker(db_file,
                                                self.sid,
                                                remotefile,
                                                mtime=mtime,
                                                old_logs=self.old_logs)
                else:
                    vc = shelves.VersionChecker(db_file,
                                                self.sid,
                                                remotefile,
                                                mtime=mtime)

                if vc.is_older_version():
                    continue

                localfile = os.path.join(localdir,
                                         base64.urlsafe_b64encode(remotefile))
                log.info('Downloading remote file %r to %r', remotefile,
                         localfile)

                try:
                    scp.scp_get(remotefile, localfile)
                except (ssh.SSHException, EOFError, SystemExit), err:
                    log.warn(
                        "fetching failed; sid=%s; remotefile=%s; error=%r",
                        self.sid, remotefile, err)
                    continue

                col_ts = time.time()
                cursor = vc.get_old_cursor(localfile)
                if cursor < 0:
                    continue

                conf_path = self.fetcher_runner.get_field_value_from_config(
                    "wiring_conf_path") or None
                col_type = self.fetcher_runner.get_field_value_from_config(
                    "col_type")
                client_map = self.get_client_map()

                file_handler.main(self.sid,
                                  col_type,
                                  col_ts,
                                  self.parser,
                                  localfile,
                                  self.charset,
                                  self.device_name,
                                  client_map["normalizer"],
                                  client_map["repo"],
                                  cursor,
                                  client_map.get("regex_pattern"),
                                  client_map.get("regexparser_name"),
                                  self.device_ip,
                                  conf_path=conf_path,
                                  source_name=remotefile)
        except gevent.GreenletExit:
            raise
        except (Exception, ssh.SSHException, EOFError, SystemExit), err:
            log.warn('exception while running job; sid=%s; err=%r', self.sid,
                     err)
Пример #15
0
def create_shelve(db_file):
    disk.prepare_path(db_file)
    s = pdict.PersistentDict(db_file, 'c', format='pickle')
    return s
Пример #16
0
def _save_endings(endings, filename):
    from pylib import disk
    import cPickle as pickle
    disk.prepare_path(filename)
    pickle.dump(endings, open(filename, "wb"), pickle.HIGHEST_PROTOCOL)
Пример #17
0
                pass
        ip_dir = ip.replace("%", "_")
    else:
        ip_dir = ip

    localdir = os.path.join(basedir, ip_dir, base64.urlsafe_b64encode(sid))

    try:
        scp.setup(ip, port, user, password)
    except (ssh.SSHException, EOFError, SystemExit), err:
        log.warn('error while setting up connection; sid=%s', sid)
        return

    try:
        for remotefile, mtime in scp.fetch_file_mtime(remotepath):
            disk.prepare_path(localdir + '/')
            vc = shelves.VersionChecker(db_file, sid, remotefile, mtime=mtime)
            if vc.is_older_version():
                continue

            localfile = os.path.join(localdir,
                                     base64.urlsafe_b64encode(remotefile))
            log.info('Downloading remote file %r to %r', remotefile, localfile)

            try:
                scp.scp_get(remotefile, localfile)
            except (ssh.SSHException, EOFError, SystemExit), err:
                log.warn("fetching failed; sid=%s; remotefile=%s; error=%r",
                         sid, remotefile, err)
                continue
Пример #18
0
    def pre_create(self):
        id = is_mongokit_objectid(self.params.get("alert_id"))
        if id:
            alert = dboperation.read("AlertRules", {"_id": id}, True)
            if alert:
                notifications = alert.get("notification", [])
                if notifications:
                    for notification in notifications:
                        if notification.get("type") == "newtestemail":
                            notifications.remove(notification)
                            break
                if self.params.get("notify_newtestemail") == "on":
                    email_template = textual.utf8(
                        self.params.get('email_template'))
                    try:
                        template = Template(email_template)
                    except TemplateSyntaxError:
                        return ((0, 800), {})

                    email_emails = self.params.get('email_emails')

                    if email_emails:
                        email_emails = json.loads(email_emails)
                        email_pattern = re.compile(
                            r"^[-!#$%&'*+/0-9=?A-Z^_a-z{|}~](\.?[-!#$%&'*+/0-9=?A-Z^_a-z{|}~])*@[a-zA-Z](-?[a-zA-Z0-9])*(\.[a-zA-Z](-?[a-zA-Z0-9])*)*$"
                        )
                        invalid_emails = []
                        for email in email_emails:
                            if not bool(email_pattern.match(email)):
                                invalid_emails.append(email)

                        if invalid_emails:
                            return ((0, 801), {
                                "errors": {
                                    "invalid_emails": invalid_emails
                                }
                            })
                    else:
                        return ((0, 801), {})

                    email_threshold_option = self.params.get(
                        "email_threshold_option")
                    email_threshold_value = self.params.get(
                        "email_threshold_value")
                    if email_threshold_value:
                        email_threshold_value = int(email_threshold_value)
                    template_file = ""
                    if email_template:
                        disk.prepare_path(ALERT_TEMPLATES_PATH)
                        user_id = dboperation.read(
                            "User", {'username': self.user.get_user_name()},
                            True)
                        template_file = 'alert_%s_%s.tmp' % (str(
                            user_id['_id']), base64.b32encode(alert["name"]))
                        template_file_path = os.path.join(
                            ALERT_TEMPLATES_PATH, template_file)
                        email_template = Markup(
                            email_template.decode('utf-8')).unescape()
                        with open(template_file_path, 'w') as f:
                            email_template = email_template.encode('utf-8')
                            format_template = re.sub(
                                '\|\s*(readable|date|time|datetime)\s*}}',
                                self._regex_replacer, email_template)
                            f.write(format_template)
                    else:
                        email_template = "<br>"
                        disk.prepare_path(ALERT_TEMPLATES_PATH)
                        template_file = 'alert_%s_%s.tmp' % (
                            self.user.get_user_name().encode(
                                'ascii', 'ignore'), base64.b32encode(name))
                        template_file_path = os.path.join(
                            ALERT_TEMPLATES_PATH, template_file)
                        email_template = Markup(
                            email_template.decode('utf-8')).unescape()
                        with open(template_file_path, 'w') as f:
                            email_template = email_template.encode('utf-8')
                            format_template = re.sub(
                                '\|\s*(readable|date|time|datetime)\s*}}',
                                self._regex_replacer, email_template)
                            f.write(format_template)

                    notifications.append({'template_file':template_file,'type':'newtestemail', 'notify_newtestemail':True, 'email_emails':email_emails,\
                                          'email_template':email_template, 'threshold':email_threshold_value, 'threshold_option':email_threshold_option})

                return {"notification": notifications}