Example #1
0
    def backup_smb_dbs(self, private_dir, samdb, lp, logger):
        # First, determine if DB backend is MDB.  Assume not unless there is a
        # 'backendStore' attribute on @PARTITION containing the text 'mdb'
        store_label = "backendStore"
        res = samdb.search(base="@PARTITION",
                           scope=ldb.SCOPE_BASE,
                           attrs=[store_label])
        mdb_backend = store_label in res[0] and res[0][store_label][0] == 'mdb'

        sam_ldb_path = os.path.join(private_dir, 'sam.ldb')
        copy_function = None
        if mdb_backend:
            logger.info('MDB backend detected.  Using mdb backup function.')
            copy_function = self.offline_mdb_copy
        else:
            logger.info('Starting transaction on ' + sam_ldb_path)
            copy_function = self.offline_tdb_copy
            sam_obj = Ldb(sam_ldb_path, lp=lp)
            sam_obj.transaction_start()

        logger.info('   backing up ' + sam_ldb_path)
        self.offline_tdb_copy(sam_ldb_path)
        sam_ldb_d = sam_ldb_path + '.d'
        for sam_file in os.listdir(sam_ldb_d):
            sam_file = os.path.join(sam_ldb_d, sam_file)
            if sam_file.endswith('.ldb'):
                logger.info('   backing up locked/related file ' + sam_file)
                copy_function(sam_file)
            else:
                logger.info('   copying locked/related file ' + sam_file)
                shutil.copyfile(sam_file, sam_file + self.backup_ext)

        if not mdb_backend:
            sam_obj.transaction_cancel()
Example #2
0
 def backup_secrets(self, private_dir, lp, logger):
     secrets_path = os.path.join(private_dir, 'secrets')
     secrets_obj = Ldb(secrets_path + '.ldb', lp=lp)
     logger.info('Starting transaction on ' + secrets_path)
     secrets_obj.transaction_start()
     self.offline_tdb_copy(secrets_path + '.ldb')
     self.offline_tdb_copy(secrets_path + '.tdb')
     secrets_obj.transaction_cancel()
Example #3
0
def ldif_to_samdb(dburl, lp, ldif_file, forced_local_dsa=None):
    """Routine to import all objects and attributes that are relevent
    to the KCC algorithms from a previously exported LDIF file.

    The point of this function is to allow a programmer/debugger to
    import an LDIF file with non-security relevent information that
    was previously extracted from a DC database.  The LDIF file is used
    to create a temporary abbreviated database.  The KCC algorithm can
    then run against this abbreviated database for debug or test
    verification that the topology generated is computationally the
    same between different OSes and algorithms.

    :param dburl: path to the temporary abbreviated db to create
    :param ldif_file: path to the ldif file to import
    """
    if os.path.exists(dburl):
        raise LdifError("Specify a database (%s) that doesn't already exist." %
                        dburl)

    # Use ["modules:"] as we are attempting to build a sam
    # database as opposed to start it here.
    tmpdb = Ldb(url=dburl,
                session_info=system_session(),
                lp=lp,
                options=["modules:"])

    tmpdb.transaction_start()
    try:
        data = read_and_sub_file(ldif_file, None)
        tmpdb.add_ldif(data, None)
        if forced_local_dsa:
            tmpdb.modify_ldif("""dn: @ROOTDSE
changetype: modify
replace: dsServiceName
dsServiceName: CN=NTDS Settings,%s
            """ % forced_local_dsa)

        tmpdb.add_ldif("""dn: @MODULES
@LIST: rootdse,extended_dn_in,extended_dn_out_ldb,objectguid
-
""")

    except Exception as estr:
        tmpdb.transaction_cancel()
        raise LdifError("Failed to import %s: %s" % (ldif_file, estr))

    tmpdb.transaction_commit()

    # We have an abbreviated list of options here because we have built
    # an abbreviated database.  We use the rootdse and extended-dn
    # modules only during this re-open
    samdb = SamDB(url=dburl, session_info=system_session(), lp=lp)
    return samdb
Example #4
0
def ldif_to_samdb(dburl, lp, ldif_file, forced_local_dsa=None):
    """Routine to import all objects and attributes that are relevent
    to the KCC algorithms from a previously exported LDIF file.

    The point of this function is to allow a programmer/debugger to
    import an LDIF file with non-security relevent information that
    was previously extracted from a DC database.  The LDIF file is used
    to create a temporary abbreviated database.  The KCC algorithm can
    then run against this abbreviated database for debug or test
    verification that the topology generated is computationally the
    same between different OSes and algorithms.

    :param dburl: path to the temporary abbreviated db to create
    :param ldif_file: path to the ldif file to import
    """
    if os.path.exists(dburl):
        raise LdifError("Specify a database (%s) that doesn't already exist." %
                        dburl)

    # Use ["modules:"] as we are attempting to build a sam
    # database as opposed to start it here.
    tmpdb = Ldb(url=dburl, session_info=system_session(),
                lp=lp, options=["modules:"])

    tmpdb.transaction_start()
    try:
        data = read_and_sub_file(ldif_file, None)
        tmpdb.add_ldif(data, None)
        if forced_local_dsa:
            tmpdb.modify_ldif("""dn: @ROOTDSE
changetype: modify
replace: dsServiceName
dsServiceName: CN=NTDS Settings,%s
            """ % forced_local_dsa)

        tmpdb.add_ldif("""dn: @MODULES
@LIST: rootdse,extended_dn_in,extended_dn_out_ldb,objectguid
-
""")

    except Exception as estr:
        tmpdb.transaction_cancel()
        raise LdifError("Failed to import %s: %s" % (ldif_file, estr))

    tmpdb.transaction_commit()

    # We have an abbreviated list of options here because we have built
    # an abbreviated database.  We use the rootdse and extended-dn
    # modules only during this re-open
    samdb = SamDB(url=dburl, session_info=system_session(), lp=lp)
    return samdb
Example #5
0
    def run(self, sambaopts=None, targetdir=None):

        logger = logging.getLogger()
        logger.setLevel(logging.DEBUG)
        logger.addHandler(logging.StreamHandler(sys.stdout))

        # Get the absolute paths of all the directories we're going to backup
        lp = sambaopts.get_loadparm()

        paths = samba.provision.provision_paths_from_lp(lp, lp.get('realm'))
        if not (paths.samdb and os.path.exists(paths.samdb)):
            raise CommandError('No sam.db found.  This backup ' +
                               'tool is only for AD DCs')

        check_targetdir(logger, targetdir)

        samdb = SamDB(url=paths.samdb, session_info=system_session(), lp=lp)
        sid = get_sid_for_restore(samdb)

        backup_dirs = [
            paths.private_dir, paths.state_dir,
            os.path.dirname(paths.smbconf)
        ]  # etc dir
        logger.info('running backup on dirs: {}'.format(backup_dirs))

        # Recursively get all file paths in the backup directories
        all_files = []
        for backup_dir in backup_dirs:
            for (working_dir, _, filenames) in os.walk(backup_dir):
                if working_dir.startswith(paths.sysvol):
                    continue

                for filename in filenames:
                    if filename in all_files:
                        continue

                    # Assume existing backup files are from a previous backup.
                    # Delete and ignore.
                    if filename.endswith(self.backup_ext):
                        os.remove(os.path.join(working_dir, filename))
                        continue
                    all_files.append(os.path.join(working_dir, filename))

        # Backup secrets, sam.ldb and their downstream files
        self.backup_secrets(paths.private_dir, lp, logger)
        self.backup_smb_dbs(paths.private_dir, samdb, lp, logger)

        # Open the new backed up samdb, flag it as backed up, and write
        # the next SID so the restore tool can add objects.
        # WARNING: Don't change this code unless you know what you're doing.
        #          Writing to a .bak file only works because the DN being
        #          written to happens to be top level.
        samdb = SamDB(url=paths.samdb + self.backup_ext,
                      session_info=system_session(),
                      lp=lp)
        time_str = get_timestamp()
        add_backup_marker(samdb, "backupDate", time_str)
        add_backup_marker(samdb, "sidForRestore", sid)

        # Now handle all the LDB and TDB files that are not linked to
        # anything else.  Use transactions for LDBs.
        for path in all_files:
            if not os.path.exists(path + self.backup_ext):
                if path.endswith('.ldb'):
                    logger.info('Starting transaction on solo db: ' + path)
                    ldb_obj = Ldb(path, lp=lp)
                    ldb_obj.transaction_start()
                    logger.info('   running tdbbackup on the same file')
                    self.offline_tdb_copy(path)
                    ldb_obj.transaction_cancel()
                elif path.endswith('.tdb'):
                    logger.info('running tdbbackup on lone tdb file ' + path)
                    self.offline_tdb_copy(path)

        # Now make the backup tar file and add all
        # backed up files and any other files to it.
        temp_tar_dir = tempfile.mkdtemp(dir=targetdir,
                                        prefix='INCOMPLETEsambabackupfile')
        temp_tar_name = os.path.join(temp_tar_dir, "samba-backup.tar.bz2")
        tar = tarfile.open(temp_tar_name, 'w:bz2')

        logger.info('running offline ntacl backup of sysvol')
        sysvol_tar_fn = 'sysvol.tar.gz'
        sysvol_tar = os.path.join(temp_tar_dir, sysvol_tar_fn)
        backup_offline(paths.sysvol, sysvol_tar, samdb, paths.smbconf)
        tar.add(sysvol_tar, sysvol_tar_fn)
        os.remove(sysvol_tar)

        create_log_file(temp_tar_dir, lp, "offline", "localhost", True)
        backup_fn = os.path.join(temp_tar_dir, "backup.txt")
        tar.add(backup_fn, os.path.basename(backup_fn))
        os.remove(backup_fn)

        logger.info('building backup tar')
        for path in all_files:
            arc_path = self.get_arc_path(path, paths)

            if os.path.exists(path + self.backup_ext):
                logger.info('   adding backup ' + arc_path + self.backup_ext +
                            ' to tar and deleting file')
                tar.add(path + self.backup_ext, arcname=arc_path)
                os.remove(path + self.backup_ext)
            elif path.endswith('.ldb') or path.endswith('.tdb'):
                logger.info('   skipping ' + arc_path)
            else:
                logger.info('   adding misc file ' + arc_path)
                tar.add(path, arcname=arc_path)

        tar.close()
        os.rename(
            temp_tar_name,
            os.path.join(targetdir,
                         'samba-backup-{}.tar.bz2'.format(time_str)))
        os.rmdir(temp_tar_dir)
        logger.info('Backup succeeded.')
Example #6
0
class OpenChangeDBWithLdbBackend(object):
    """The OpenChange database."""

    def __init__(self, url):
        self.url = url
        self.ldb = Ldb(self.url)
        self.nttime = samba.unix2nttime(int(time.time()))

    def reopen(self):
        self.ldb = Ldb(self.url)

    def remove(self):
        """Remove an existing OpenChangeDB file."""
        if os.path.exists(self.url):
            os.remove(self.url)
        self.reopen()

    def setup(self, names=None):
        self.ldb.add_ldif("""
dn: @OPTIONS
checkBaseOnSearch: TRUE

dn: @INDEXLIST
@IDXATTR: cn

dn: @ATTRIBUTES
cn: CASE_INSENSITIVE
dn: CASE_INSENSITIVE

""")
        self.reopen()
        if names:
            self.add_rootDSE(names.ocserverdn, names.firstorg, names.firstou)

    def add_rootDSE(self, ocserverdn, firstorg, firstou):
        self.ldb.add({"dn": "@ROOTDSE",
                      "defaultNamingContext": "CN=%s,CN=%s,%s" % (firstou, firstorg, ocserverdn),
                      "rootDomainNamingContext": ocserverdn,
                      "vendorName": "OpenChange Team (http://www.openchange.org)"})

    def add_server(self, names):
        self.ldb.add({"dn": names.ocserverdn,
                      "objectClass": ["top", "server"],
                      "cn": names.netbiosname,
                      "GlobalCount": "1",
                      "ChangeNumber": "1",
                      "ReplicaID": "1"})
        self.ldb.add({"dn": "CN=%s,%s" % (names.firstorg, names.ocserverdn),
                      "objectClass": ["top", "org"],
                      "cn": names.firstorg})
        self.ldb.add({"dn": "CN=%s,CN=%s,%s" % (names.firstou, names.firstorg, names.ocserverdn),
                      "objectClass": ["top", "ou"],
                      "cn": names.firstou})

    def add_root_public_folder(self, dn, fid, change_num, SystemIdx, childcount):
        self.ldb.add({"dn": dn,
                      "objectClass": ["publicfolder"],
                      "cn": fid,
                      "PidTagFolderId": fid,
                      "PidTagChangeNumber": change_num,
                      "PidTagDisplayName": "Public Folder Root",
                      "PidTagCreationTime": "%d" % self.nttime,
                      "PidTagLastModificationTime": "%d" % self.nttime,
                      "PidTagSubFolders": str(childcount != 0).upper(),
                      "PidTagFolderChildCount": str(childcount),
                      "SystemIdx": str(SystemIdx)})

    def add_sub_public_folder(self, dn, parentfid, fid, change_num, name, SystemIndex, childcount):
        self.ldb.add({"dn": dn,
                      "objectClass": ["publicfolder"],
                      "cn": fid,
                      "PidTagFolderId": fid,
                      "PidTagParentFolderId": parentfid,
                      "PidTagChangeNumber": change_num,
                      "PidTagDisplayName": name,
                      "PidTagCreationTime": "%d" % self.nttime,
                      "PidTagLastModificationTime": "%d" % self.nttime,
                      "PidTagAttributeHidden": str(0),
                      "PidTagAttributeReadOnly": str(0),
                      "PidTagAttributeSystem": str(0),
                      "PidTagContainerClass": "IPF.Note (check this)",
                      "PidTagSubFolders": str(childcount != 0).upper(),
                      "PidTagFolderChildCount": str(childcount),
                      "FolderType": str(1),
                      "SystemIdx": str(SystemIndex)})

    def add_one_public_folder(self, parent_fid, path, children, SystemIndex, names, dn_prefix = None):

        name = path[-1]
        GlobalCount = self.get_message_GlobalCount(names.netbiosname)
        ChangeNumber = self.get_message_ChangeNumber(names.netbiosname)
        ReplicaID = self.get_message_ReplicaID(names.netbiosname)
        if dn_prefix is None:
            dn_prefix = "CN=publicfolders,CN=%s,CN=%s,%s" % (names.firstou, names.firstorg, names.ocserverdn)
        fid = gen_mailbox_folder_fid(GlobalCount, ReplicaID)
        dn = "CN=%s,%s" % (fid, dn_prefix)
        change_num = gen_mailbox_folder_fid(ChangeNumber, ReplicaID)
        childcount = len(children)
        print "\t* %-40s: 0x%.16x (%s)" % (name, int(fid, 10), fid)
        if parent_fid == 0:
            self.add_root_public_folder(dn, fid, change_num, SystemIndex, childcount)
        else:
            self.add_sub_public_folder(dn, parent_fid, fid, change_num, name, SystemIndex, childcount)

        GlobalCount += 1
        self.set_message_GlobalCount(names.netbiosname, GlobalCount=GlobalCount)
        ChangeNumber += 1
        self.set_message_ChangeNumber(names.netbiosname, ChangeNumber=ChangeNumber)

        for name, grandchildren in children.iteritems():
            self.add_one_public_folder(fid, path + (name,), grandchildren[0], grandchildren[1], names, dn)

    def add_public_folders(self, names):
        pfstoreGUID = str(uuid.uuid4())
        self.ldb.add({"dn": "CN=publicfolders,CN=%s,CN=%s,%s" % (names.firstou, names.firstorg, names.ocserverdn),
                      "objectClass": ["container"],
                      "cn": "publicfolders",
                      "StoreGUID": pfstoreGUID,
                      "ReplicaID": str(1)})
        public_folders = _public_folders_meta(names)
        self.add_one_public_folder(0, ("Public Folder Root",), public_folders[0], public_folders[1], names)

    def lookup_server(self, cn, attributes=[]):
        # Step 1. Search Server object
        filter = "(&(objectClass=server)(cn=%s))" % cn
        res = self.ldb.search("", scope=ldb.SCOPE_SUBTREE,
                           expression=filter, attrs=attributes)
        if len(res) != 1:
            raise NoSuchServer(cn)
        return res[0]

    def lookup_mailbox_user(self, server, username, attributes=[]):
        """Check if a user already exists in openchange database.

        :param server: Server object name
        :param username: Username object
        :return: LDB Object of the user
        """
        server_dn = self.lookup_server(server, []).dn

        # Step 2. Search User object
        filter = "(&(objectClass=mailbox)(cn=%s))" % (username)
        return self.ldb.search(server_dn, scope=ldb.SCOPE_SUBTREE,
                           expression=filter, attrs=attributes)

    def lookup_public_folder(self, server, displayname, attributes=[]):
        """Retrieve the record for a public folder matching a specific display name

        :param server: Server Object Name
        :param displayname: Display Name of the Folder
        :param attributes: Requested Attributes
        :return: LDB Object of the Folder
        """
        server_dn = self.lookup_server(server, []).dn

        filter = "(&(objectClass=publicfolder)(PidTagDisplayName=%s))" % (displayname)
        return self.ldb.search(server_dn, scope=ldb.SCOPE_SUBTREE,
                               expression=filter, attrs=attributes)

    def get_message_attribute(self, server, attribute):
        """Retrieve attribute value from given message database (server).

        :param server: Server object name
        """
        return int(self.lookup_server(server, [attribute])[attribute][0], 10)

    def get_message_ReplicaID(self, server):
        """Retrieve current mailbox Replica ID for given message database (server).

        :param server: Server object name
        """
        return self.get_message_attribute(server, "ReplicaID")

    def get_message_GlobalCount(self, server):
        """Retrieve current mailbox Global Count for given message database (server).

        :param server: Server object name
        """
        return self.get_message_attribute(server, "GlobalCount")


    def set_message_GlobalCount(self, server, GlobalCount):
        """Update current mailbox GlobalCount for given message database (server).

        :param server: Server object name
        :param index: Mailbox new GlobalCount value
        """
        server_dn = self.lookup_server(server, []).dn

        newGlobalCount = """
dn: %s
changetype: modify
replace: GlobalCount
GlobalCount: %d
""" % (server_dn, GlobalCount)

        self.ldb.transaction_start()
        try:
            self.ldb.modify_ldif(newGlobalCount)
        finally:
            self.ldb.transaction_commit()

    def get_message_ChangeNumber(self, server):
        """Retrieve current mailbox Global Count for given message database (server).

        :param server: Server object name
        """
        return self.get_message_attribute(server, "ChangeNumber")


    def set_message_ChangeNumber(self, server, ChangeNumber):
        """Update current mailbox ChangeNumber for given message database (server).

        :param server: Server object name
        :param index: Mailbox new ChangeNumber value
        """
        server_dn = self.lookup_server(server, []).dn

        newChangeNumber = """
dn: %s
changetype: modify
replace: ChangeNumber
ChangeNumber: %d
""" % (server_dn, ChangeNumber)

        self.ldb.transaction_start()
        try:
            self.ldb.modify_ldif(newChangeNumber)
        finally:
            self.ldb.transaction_commit()
Example #7
0
class Schema(object):
    def __init__(self, setup_path, domain_sid, schemadn=None,
                 serverdn=None, files=None, prefixmap=None):
        """Load schema for the SamDB from the AD schema files and samba4_schema.ldif
        
        :param samdb: Load a schema into a SamDB.
        :param setup_path: Setup path function.
        :param schemadn: DN of the schema
        :param serverdn: DN of the server
        
        Returns the schema data loaded, to avoid double-parsing when then needing to add it to the db
        """

        self.schemadn = schemadn
        self.ldb = Ldb()
        self.schema_data = read_ms_schema(setup_path('ad-schema/MS-AD_Schema_2K8_R2_Attributes.txt'),
                                          setup_path('ad-schema/MS-AD_Schema_2K8_R2_Classes.txt'))

        if files is not None:
            for file in files:
                self.schema_data += open(file, 'r').read()

        self.schema_data = substitute_var(self.schema_data, {"SCHEMADN": schemadn})
        check_all_substituted(self.schema_data)

        self.schema_dn_modify = read_and_sub_file(setup_path("provision_schema_basedn_modify.ldif"),
                                                  {"SCHEMADN": schemadn,
                                                   "SERVERDN": serverdn,
                                                   })

        descr = b64encode(get_schema_descriptor(domain_sid))
        self.schema_dn_add = read_and_sub_file(setup_path("provision_schema_basedn.ldif"),
                                               {"SCHEMADN": schemadn,
                                                "DESCRIPTOR": descr
                                                })

        self.prefixmap_data = open(setup_path("prefixMap.txt"), 'r').read()

        if prefixmap is not None:
            for map in prefixmap:
                self.prefixmap_data += "%s\n" % map

        self.prefixmap_data = b64encode(self.prefixmap_data)

        

        # We don't actually add this ldif, just parse it
        prefixmap_ldif = "dn: cn=schema\nprefixMap:: %s\n\n" % self.prefixmap_data
        self.ldb.set_schema_from_ldif(prefixmap_ldif, self.schema_data)

    def write_to_tmp_ldb(self, schemadb_path):
        self.ldb.connect(schemadb_path)
        self.ldb.transaction_start()
    
        self.ldb.add_ldif("""dn: @ATTRIBUTES
linkID: INTEGER

dn: @INDEXLIST
@IDXATTR: linkID
@IDXATTR: attributeSyntax
""")
        # These bits of LDIF are supplied when the Schema object is created
        self.ldb.add_ldif(self.schema_dn_add)
        self.ldb.modify_ldif(self.schema_dn_modify)
        self.ldb.add_ldif(self.schema_data)
        self.ldb.transaction_commit()

    # Return a hash with the forward attribute as a key and the back as the value 
    def linked_attributes(self):
        return get_linked_attributes(self.schemadn, self.ldb)

    def dnsyntax_attributes(self):
        return get_dnsyntax_attributes(self.schemadn, self.ldb)
Example #8
0
class OpenChangeDBWithLdbBackend(object):
    """The OpenChange database."""
    def __init__(self, url):
        self.url = url
        self.ldb = Ldb(self.url)
        self.nttime = samba.unix2nttime(int(time.time()))

    def reopen(self):
        self.ldb = Ldb(self.url)

    def remove(self):
        """Remove an existing OpenChangeDB file."""
        if os.path.exists(self.url):
            os.remove(self.url)
        self.reopen()

    def setup(self, names=None):
        self.ldb.add_ldif("""
dn: @OPTIONS
checkBaseOnSearch: TRUE

dn: @INDEXLIST
@IDXATTR: cn

dn: @ATTRIBUTES
cn: CASE_INSENSITIVE
dn: CASE_INSENSITIVE

""")
        self.reopen()
        if names:
            self.add_rootDSE(names.ocserverdn, names.firstorg, names.firstou)

    def add_rootDSE(self, ocserverdn, firstorg, firstou):
        self.ldb.add({
            "dn":
            "@ROOTDSE",
            "defaultNamingContext":
            "CN=%s,CN=%s,%s" % (firstou, firstorg, ocserverdn),
            "rootDomainNamingContext":
            ocserverdn,
            "vendorName":
            "OpenChange Team (http://www.openchange.org)"
        })

    def add_server(self, names):
        self.ldb.add({
            "dn": names.ocserverdn,
            "objectClass": ["top", "server"],
            "cn": names.netbiosname,
            "GlobalCount": "1",
            "ChangeNumber": "1",
            "ReplicaID": "1"
        })
        self.ldb.add({
            "dn": "CN=%s,%s" % (names.firstorg, names.ocserverdn),
            "objectClass": ["top", "org"],
            "cn": names.firstorg
        })
        self.ldb.add({
            "dn":
            "CN=%s,CN=%s,%s" %
            (names.firstou, names.firstorg, names.ocserverdn),
            "objectClass": ["top", "ou"],
            "cn":
            names.firstou
        })

    def add_root_public_folder(self, dn, fid, change_num, SystemIdx,
                               childcount):
        self.ldb.add({
            "dn": dn,
            "objectClass": ["publicfolder"],
            "cn": fid,
            "PidTagFolderId": fid,
            "PidTagChangeNumber": change_num,
            "PidTagDisplayName": "Public Folder Root",
            "PidTagCreationTime": "%d" % self.nttime,
            "PidTagLastModificationTime": "%d" % self.nttime,
            "PidTagSubFolders": str(childcount != 0).upper(),
            "PidTagFolderChildCount": str(childcount),
            "SystemIdx": str(SystemIdx)
        })

    def add_sub_public_folder(self, dn, parentfid, fid, change_num, name,
                              SystemIndex, childcount):
        self.ldb.add({
            "dn": dn,
            "objectClass": ["publicfolder"],
            "cn": fid,
            "PidTagFolderId": fid,
            "PidTagParentFolderId": parentfid,
            "PidTagChangeNumber": change_num,
            "PidTagDisplayName": name,
            "PidTagCreationTime": "%d" % self.nttime,
            "PidTagLastModificationTime": "%d" % self.nttime,
            "PidTagAttributeHidden": str(0),
            "PidTagAttributeReadOnly": str(0),
            "PidTagAttributeSystem": str(0),
            "PidTagContainerClass": "IPF.Note (check this)",
            "PidTagSubFolders": str(childcount != 0).upper(),
            "PidTagFolderChildCount": str(childcount),
            "FolderType": str(1),
            "SystemIdx": str(SystemIndex)
        })

    def add_one_public_folder(self,
                              parent_fid,
                              path,
                              children,
                              SystemIndex,
                              names,
                              dn_prefix=None):

        name = path[-1]
        GlobalCount = self.get_message_GlobalCount(names.netbiosname)
        ChangeNumber = self.get_message_ChangeNumber(names.netbiosname)
        ReplicaID = self.get_message_ReplicaID(names.netbiosname)
        if dn_prefix is None:
            dn_prefix = "CN=publicfolders,CN=%s,CN=%s,%s" % (
                names.firstou, names.firstorg, names.ocserverdn)
        fid = gen_mailbox_folder_fid(GlobalCount, ReplicaID)
        dn = "CN=%s,%s" % (fid, dn_prefix)
        change_num = gen_mailbox_folder_fid(ChangeNumber, ReplicaID)
        childcount = len(children)
        print "\t* %-40s: 0x%.16x (%s)" % (name, int(fid, 10), fid)
        if parent_fid == 0:
            self.add_root_public_folder(dn, fid, change_num, SystemIndex,
                                        childcount)
        else:
            self.add_sub_public_folder(dn, parent_fid, fid, change_num, name,
                                       SystemIndex, childcount)

        GlobalCount += 1
        self.set_message_GlobalCount(names.netbiosname,
                                     GlobalCount=GlobalCount)
        ChangeNumber += 1
        self.set_message_ChangeNumber(names.netbiosname,
                                      ChangeNumber=ChangeNumber)

        for name, grandchildren in children.iteritems():
            self.add_one_public_folder(fid, path + (name, ), grandchildren[0],
                                       grandchildren[1], names, dn)

    def add_public_folders(self, names):
        pfstoreGUID = str(uuid.uuid4())
        self.ldb.add({
            "dn":
            "CN=publicfolders,CN=%s,CN=%s,%s" %
            (names.firstou, names.firstorg, names.ocserverdn),
            "objectClass": ["container"],
            "cn":
            "publicfolders",
            "StoreGUID":
            pfstoreGUID,
            "ReplicaID":
            str(1)
        })
        public_folders = _public_folders_meta(names)
        self.add_one_public_folder(0, ("Public Folder Root", ),
                                   public_folders[0], public_folders[1], names)

    def lookup_server(self, cn, attributes=[]):
        # Step 1. Search Server object
        filter = "(&(objectClass=server)(cn=%s))" % cn
        res = self.ldb.search("",
                              scope=ldb.SCOPE_SUBTREE,
                              expression=filter,
                              attrs=attributes)
        if len(res) != 1:
            raise NoSuchServer(cn)
        return res[0]

    def lookup_mailbox_user(self, server, username, attributes=[]):
        """Check if a user already exists in openchange database.

        :param server: Server object name
        :param username: Username object
        :return: LDB Object of the user
        """
        server_dn = self.lookup_server(server, []).dn

        # Step 2. Search User object
        filter = "(&(objectClass=mailbox)(cn=%s))" % (username)
        return self.ldb.search(server_dn,
                               scope=ldb.SCOPE_SUBTREE,
                               expression=filter,
                               attrs=attributes)

    def lookup_public_folder(self, server, displayname, attributes=[]):
        """Retrieve the record for a public folder matching a specific display name

        :param server: Server Object Name
        :param displayname: Display Name of the Folder
        :param attributes: Requested Attributes
        :return: LDB Object of the Folder
        """
        server_dn = self.lookup_server(server, []).dn

        filter = "(&(objectClass=publicfolder)(PidTagDisplayName=%s))" % (
            displayname)
        return self.ldb.search(server_dn,
                               scope=ldb.SCOPE_SUBTREE,
                               expression=filter,
                               attrs=attributes)

    def get_message_attribute(self, server, attribute):
        """Retrieve attribute value from given message database (server).

        :param server: Server object name
        """
        return int(self.lookup_server(server, [attribute])[attribute][0], 10)

    def get_message_ReplicaID(self, server):
        """Retrieve current mailbox Replica ID for given message database (server).

        :param server: Server object name
        """
        return self.get_message_attribute(server, "ReplicaID")

    def get_message_GlobalCount(self, server):
        """Retrieve current mailbox Global Count for given message database (server).

        :param server: Server object name
        """
        return self.get_message_attribute(server, "GlobalCount")

    def set_message_GlobalCount(self, server, GlobalCount):
        """Update current mailbox GlobalCount for given message database (server).

        :param server: Server object name
        :param index: Mailbox new GlobalCount value
        """
        server_dn = self.lookup_server(server, []).dn

        newGlobalCount = """
dn: %s
changetype: modify
replace: GlobalCount
GlobalCount: %d
""" % (server_dn, GlobalCount)

        self.ldb.transaction_start()
        try:
            self.ldb.modify_ldif(newGlobalCount)
        finally:
            self.ldb.transaction_commit()

    def get_message_ChangeNumber(self, server):
        """Retrieve current mailbox Global Count for given message database (server).

        :param server: Server object name
        """
        return self.get_message_attribute(server, "ChangeNumber")

    def set_message_ChangeNumber(self, server, ChangeNumber):
        """Update current mailbox ChangeNumber for given message database (server).

        :param server: Server object name
        :param index: Mailbox new ChangeNumber value
        """
        server_dn = self.lookup_server(server, []).dn

        newChangeNumber = """
dn: %s
changetype: modify
replace: ChangeNumber
ChangeNumber: %d
""" % (server_dn, ChangeNumber)

        self.ldb.transaction_start()
        try:
            self.ldb.modify_ldif(newChangeNumber)
        finally:
            self.ldb.transaction_commit()
Example #9
0
class Schema(object):
    def __init__(self,
                 setup_path,
                 domain_sid,
                 schemadn=None,
                 serverdn=None,
                 files=None,
                 prefixmap=None):
        """Load schema for the SamDB from the AD schema files and samba4_schema.ldif
        
        :param samdb: Load a schema into a SamDB.
        :param setup_path: Setup path function.
        :param schemadn: DN of the schema
        :param serverdn: DN of the server
        
        Returns the schema data loaded, to avoid double-parsing when then needing to add it to the db
        """

        self.schemadn = schemadn
        self.ldb = Ldb()
        self.schema_data = read_ms_schema(
            setup_path('ad-schema/MS-AD_Schema_2K8_Attributes.txt'),
            setup_path('ad-schema/MS-AD_Schema_2K8_Classes.txt'))

        if files is not None:
            for file in files:
                self.schema_data += open(file, 'r').read()

        self.schema_data = substitute_var(self.schema_data,
                                          {"SCHEMADN": schemadn})
        check_all_substituted(self.schema_data)

        self.schema_dn_modify = read_and_sub_file(
            setup_path("provision_schema_basedn_modify.ldif"), {
                "SCHEMADN": schemadn,
                "SERVERDN": serverdn,
            })

        descr = b64encode(get_schema_descriptor(domain_sid))
        self.schema_dn_add = read_and_sub_file(
            setup_path("provision_schema_basedn.ldif"), {
                "SCHEMADN": schemadn,
                "DESCRIPTOR": descr
            })

        self.prefixmap_data = open(setup_path("prefixMap.txt"), 'r').read()

        if prefixmap is not None:
            for map in prefixmap:
                self.prefixmap_data += "%s\n" % map

        self.prefixmap_data = b64encode(self.prefixmap_data)

        # We don't actually add this ldif, just parse it
        prefixmap_ldif = "dn: cn=schema\nprefixMap:: %s\n\n" % self.prefixmap_data
        self.ldb.set_schema_from_ldif(prefixmap_ldif, self.schema_data)

    def write_to_tmp_ldb(self, schemadb_path):
        self.ldb.connect(schemadb_path)
        self.ldb.transaction_start()

        self.ldb.add_ldif("""dn: @ATTRIBUTES
linkID: INTEGER

dn: @INDEXLIST
@IDXATTR: linkID
@IDXATTR: attributeSyntax
""")
        # These bits of LDIF are supplied when the Schema object is created
        self.ldb.add_ldif(self.schema_dn_add)
        self.ldb.modify_ldif(self.schema_dn_modify)
        self.ldb.add_ldif(self.schema_data)
        self.ldb.transaction_commit()

    # Return a hash with the forward attribute as a key and the back as the value
    def linked_attributes(self):
        return get_linked_attributes(self.schemadn, self.ldb)

    def dnsyntax_attributes(self):
        return get_dnsyntax_attributes(self.schemadn, self.ldb)
Example #10
0
opts = parser.parse_args()[0]
lp = sambaopts.get_loadparm()
smbconf = lp.configfile

if not opts.database:
	print "Parameter database is mandatory"
	sys.exit(1)
creds = credopts.get_credentials(lp)
creds.set_kerberos_state(DONT_USE_KERBEROS)
session = system_session()
empty = ldb.Message()
newname="%s.new"%(opts.database)
if os.path.exists(newname):
	os.remove(newname)
old_ldb = Ldb(opts.database, session_info=session, credentials=creds,lp=lp)
new_ldb = Ldb(newname,session_info=session, credentials=creds,lp=lp)

new_ldb.transaction_start()
res = old_ldb.search(expression="(dn=*)",base="", scope=SCOPE_SUBTREE)
for i in range(0,len(res)):
	if str(res[i].dn) == "@BASEINFO":
		continue
	if str(res[i].dn).startswith("@INDEX:"):
		continue
	delta = new_ldb.msg_diff(empty,res[i])
	delta.dn = res[i].dn
	delta.remove("distinguishedName")
	new_ldb.add(delta)

new_ldb.transaction_commit()
Example #11
0
opts = parser.parse_args()[0]
lp = sambaopts.get_loadparm()
smbconf = lp.configfile

if not opts.database:
    print "Parameter database is mandatory"
    sys.exit(1)
creds = credopts.get_credentials(lp)
creds.set_kerberos_state(DONT_USE_KERBEROS)
session = system_session()
empty = ldb.Message()
newname = "%s.new" % (opts.database)
if os.path.exists(newname):
    os.remove(newname)
old_ldb = Ldb(opts.database, session_info=session, credentials=creds, lp=lp)
new_ldb = Ldb(newname, session_info=session, credentials=creds, lp=lp)

new_ldb.transaction_start()
res = old_ldb.search(expression="(dn=*)", base="", scope=SCOPE_SUBTREE)
for i in range(0, len(res)):
    if str(res[i].dn) == "@BASEINFO":
        continue
    if str(res[i].dn).startswith("@INDEX:"):
        continue
    delta = new_ldb.msg_diff(empty, res[i])
    delta.dn = res[i].dn
    delta.remove("distinguishedName")
    new_ldb.add(delta)

new_ldb.transaction_commit()