示例#1
0
    def load_db(self, args, sync_database):
        self.session.set_login_credentials(args)
        # fabric = Fabric.get_deep(self.session.session)[0]
        # fabric.populate_children(deep=True, include_concrete=True)

        fabric = None
        if not self.initialized:
            if not APIC:
                fabric = Fabric.get_deep(self.session.session,
                                         include_concrete=True)[0]
                self.index.add_atk_objects(fabric)
                self.store.add_atk_objects(fabric)
                self.initialized = True
            else:
                self.index.session = self.session.session
                self.store.session = self.session.session
        print("done loading initial database")
        if sync_database is True:
            print("in updating")
            self.update_db_thread = Update_db_on_event(self)
            self.update_db_thread.subscribed_classes = []
            self.update_db_thread.session = self.session.session
            self.update_db_thread.index = self.index
            self.update_db_thread.store = self.store
            self.update_db_thread.subscribed_classes = fabric.update_db(
                self.session.session, self.update_db_thread.subscribed_classes,
                True)
            self.update_db_thread.daemon = True
            self.update_db_thread.start()
示例#2
0
    def load_db(self, args, sync_database):
        self.session.set_login_credentials(args)
        # fabric = Fabric.get_deep(self.session.session)[0]
        # fabric.populate_children(deep=True, include_concrete=True)

        fabric = None
        if not self.initialized:
            if not APIC:
                fabric = Fabric.get_deep(self.session.session, include_concrete=True)[0]
                self.index.add_atk_objects(fabric)
                self.store.add_atk_objects(fabric)
                self.initialized = True
            else:
                self.index.session = self.session.session
                self.store.session = self.session.session
        print "done loading initial database"
        if sync_database is True:
            print "in updating"
            self.update_db_thread = Update_db_on_event(self)
            self.update_db_thread.subscribed_classes = []
            self.update_db_thread.session = self.session.session
            self.update_db_thread.index = self.index
            self.update_db_thread.store = self.store
            self.update_db_thread.subscribed_classes = fabric.update_db(self.session.session, self.update_db_thread.subscribed_classes, True)
            self.update_db_thread.daemon = True
            self.update_db_thread.start()
示例#3
0
    def load_db(self, force_reload=False):

        """
        Will load the search data from a saved search.p file if it exists, otherwise it will
        create a new one by reading the APIC

        If the force_reload option is true, it will reload the data from the APIC and save it irrespective of whether
        the search.p file already exists

        :param force_reload:
        """
        # TODO: provide a way to save multiple different APIC dBs.
        if not self.file_exists(self.save_file) or force_reload:
            print 'load from APIC',
            fabric = Fabric.get(self.session)[0]
            print '.',
            fabric.populate_children(deep=True, include_concrete=True)
            print '...done'

            searchables = fabric.get_searchable()
            print 'Indexing',
            self.index_searchables(searchables)
            print '.',
            self.save_db()
            print '...done'
        else:
            print 'loading from file',
            p_file = open(self.save_file, "rb")
            print '.',
            (self.by_key_value, self.by_key, self.by_value) = pickle.load(p_file)
            print '..done'

        self.keywords = self.get_keywords()
        self.values = self.get_values()
示例#4
0
    def load_db(self, force_reload=False):
        """
        Will load the search data from a saved search.p file if it exists, otherwise it will
        create a new one by reading the APIC

        If the force_reload option is true, it will reload the data from the APIC and save it irrespective of whether
        the search.p file already exists

        :param force_reload:
        """
        # TODO: provide a way to save multiple different APIC dBs.
        if not self.file_exists(self.save_file) or force_reload:
            print 'load from APIC',
            fabric = Fabric.get(self.session)[0]
            print '.',
            fabric.populate_children(deep=True, include_concrete=True)
            print '...done'

            searchables = fabric.get_searchable()
            print 'Indexing',
            self.index_searchables(searchables)
            print '.',
            self.save_db()
            print '...done'
        else:
            print 'loading from file',
            p_file = open(self.save_file, "rb")
            print '.',
            (self.by_key_value, self.by_key,
             self.by_value) = pickle.load(p_file)
            print '..done'

        self.keywords = self.get_keywords()
        self.values = self.get_values()
示例#5
0
    def load_db(self, args):
        self.session.set_login_credentials(args)
        fabric = Fabric.get(self.session.session)[0]
        fabric.populate_children(deep=True, include_concrete=True)

        self.index.add_atk_objects(fabric)
        self.store.add_atk_objects(fabric)
        self.initialized = True
示例#6
0
    def load_db(self, args):
        self.session.set_login_credentials(args)
        # fabric = Fabric.get_deep(self.session.session)[0]
        # fabric.populate_children(deep=True, include_concrete=True)

        if not APIC:
            fabric = Fabric.get_deep(self.session.session, include_concrete=True)[0]
            self.index.add_atk_objects(fabric)
            self.store.add_atk_objects(fabric)
            self.initialized = True
        else:
            self.index.session = self.session.session
            self.store.session = self.session.session
示例#7
0
    def load_db(self, args):
        self.session.set_login_credentials(args)
        # fabric = Fabric.get_deep(self.session.session)[0]
        # fabric.populate_children(deep=True, include_concrete=True)

        if not APIC:
            fabric = Fabric.get_deep(self.session.session,
                                     include_concrete=True)[0]
            self.index.add_atk_objects(fabric)
            self.store.add_atk_objects(fabric)
            self.initialized = True
        else:
            self.index.session = self.session.session
            self.store.session = self.session.session
示例#8
0
    def load_db(args=None):

        """
        Will load the data from the APIC and prepare the dB

        :param args:
        """
        sdb = SearchDb(args)
        print 'loading from APIC',
        fabric = Fabric.get(sdb.session)[0]
        fabric.populate_children(deep=True, include_concrete=True)
        print '-done'

        print 'Indexing',
        searchables = fabric.get_searchable()
        sdb._index_searchables(searchables)
        sdb._create_object_directory(fabric)
        sdb.keywords = sdb.get_keywords()
        sdb.values = sdb.get_values()
        sdb._cross_reference_objects()
        print '-done'
        return sdb
示例#9
0
def main():
    """
    Main execution path when run from the command line
    """
    # Get all the arguments
    description = 'Search tool for APIC.'
    creds = Credentials('apic', description)
    creds.add_argument('--force',
                       action="store_true",
                       default=False,
                       help='Force a rebuild of the search index')

    args = creds.get()
    print(args)
    # load all objects
    session = SearchSession(args)
    try:
        fabric = Fabric.get(session.session)[0]
    except (LoginError, Timeout, ConnectionError):
        print('%% Could not login to APIC')
        sys.exit(0)

    fabric.populate_children(deep=True, include_concrete=True)

    index = SearchIndexLookup()
    store = SearchObjectStore()

    index.add_atk_objects(fabric)
    store.add_atk_objects(fabric)

    uids = index.search(args.find)
    result = store.get_by_uids_short(uids)

    count = 0
    for res in result:
        count += 1
        print(res)
示例#10
0
def main():
    """
    Main execution path when run from the command line
    """
    # Get all the arguments
    description = 'Search tool for APIC.'
    creds = Credentials('apic', description)
    creds.add_argument('--force',
                       action="store_true",
                       default=False,
                       help='Force a rebuild of the search index')

    args = creds.get()
    print args
    # load all objects
    session = SearchSession(args)
    try:
        fabric = Fabric.get(session.session)[0]
    except (LoginError, Timeout, ConnectionError):
        print '%% Could not login to APIC'
        sys.exit(0)

    fabric.populate_children(deep=True, include_concrete=True)

    index = SearchIndexLookup()
    store = SearchObjectStore()

    index.add_atk_objects(fabric)
    store.add_atk_objects(fabric)

    uids = index.search(args.find)
    result = store.get_by_uids_short(uids)

    count = 0
    for res in result:
        count += 1
        print res