Beispiel #1
0
    def load_db(self, force_reload=False):

        """
        Will load the search data from a saved search.p file if it exists, otherwise it will
        create a new one by reading the APIC

        If the force_reload option is true, it will reload the data from the APIC and save it irrespective of whether
        the search.p file already exists

        :param force_reload:
        """
        # TODO: provide a way to save multiple different APIC dBs.
        if not self.file_exists(self.save_file) or force_reload:
            print 'load from APIC',
            fabric = Fabric.get(self.session)[0]
            print '.',
            fabric.populate_children(deep=True, include_concrete=True)
            print '...done'

            searchables = fabric.get_searchable()
            print 'Indexing',
            self.index_searchables(searchables)
            print '.',
            self.save_db()
            print '...done'
        else:
            print 'loading from file',
            p_file = open(self.save_file, "rb")
            print '.',
            (self.by_key_value, self.by_key, self.by_value) = pickle.load(p_file)
            print '..done'

        self.keywords = self.get_keywords()
        self.values = self.get_values()
Beispiel #2
0
    def load_db(self, force_reload=False):
        """
        Will load the search data from a saved search.p file if it exists, otherwise it will
        create a new one by reading the APIC

        If the force_reload option is true, it will reload the data from the APIC and save it irrespective of whether
        the search.p file already exists

        :param force_reload:
        """
        # TODO: provide a way to save multiple different APIC dBs.
        if not self.file_exists(self.save_file) or force_reload:
            print 'load from APIC',
            fabric = Fabric.get(self.session)[0]
            print '.',
            fabric.populate_children(deep=True, include_concrete=True)
            print '...done'

            searchables = fabric.get_searchable()
            print 'Indexing',
            self.index_searchables(searchables)
            print '.',
            self.save_db()
            print '...done'
        else:
            print 'loading from file',
            p_file = open(self.save_file, "rb")
            print '.',
            (self.by_key_value, self.by_key,
             self.by_value) = pickle.load(p_file)
            print '..done'

        self.keywords = self.get_keywords()
        self.values = self.get_values()
    def load_db(self, args):
        self.session.set_login_credentials(args)
        fabric = Fabric.get(self.session.session)[0]
        fabric.populate_children(deep=True, include_concrete=True)

        self.index.add_atk_objects(fabric)
        self.store.add_atk_objects(fabric)
        self.initialized = True
Beispiel #4
0
    def load_db(args=None):

        """
        Will load the data from the APIC and prepare the dB

        :param args:
        """
        sdb = SearchDb(args)
        print 'loading from APIC',
        fabric = Fabric.get(sdb.session)[0]
        fabric.populate_children(deep=True, include_concrete=True)
        print '-done'

        print 'Indexing',
        searchables = fabric.get_searchable()
        sdb._index_searchables(searchables)
        sdb._create_object_directory(fabric)
        sdb.keywords = sdb.get_keywords()
        sdb.values = sdb.get_values()
        sdb._cross_reference_objects()
        print '-done'
        return sdb
Beispiel #5
0
def main():
    """
    Main execution path when run from the command line
    """
    # Get all the arguments
    description = 'Search tool for APIC.'
    creds = Credentials('apic', description)
    creds.add_argument('--force',
                       action="store_true",
                       default=False,
                       help='Force a rebuild of the search index')

    args = creds.get()
    print(args)
    # load all objects
    session = SearchSession(args)
    try:
        fabric = Fabric.get(session.session)[0]
    except (LoginError, Timeout, ConnectionError):
        print('%% Could not login to APIC')
        sys.exit(0)

    fabric.populate_children(deep=True, include_concrete=True)

    index = SearchIndexLookup()
    store = SearchObjectStore()

    index.add_atk_objects(fabric)
    store.add_atk_objects(fabric)

    uids = index.search(args.find)
    result = store.get_by_uids_short(uids)

    count = 0
    for res in result:
        count += 1
        print(res)
Beispiel #6
0
def main():
    """
    Main execution path when run from the command line
    """
    # Get all the arguments
    description = 'Search tool for APIC.'
    creds = Credentials('apic', description)
    creds.add_argument('--force',
                       action="store_true",
                       default=False,
                       help='Force a rebuild of the search index')

    args = creds.get()
    print args
    # load all objects
    session = SearchSession(args)
    try:
        fabric = Fabric.get(session.session)[0]
    except (LoginError, Timeout, ConnectionError):
        print '%% Could not login to APIC'
        sys.exit(0)

    fabric.populate_children(deep=True, include_concrete=True)

    index = SearchIndexLookup()
    store = SearchObjectStore()

    index.add_atk_objects(fabric)
    store.add_atk_objects(fabric)

    uids = index.search(args.find)
    result = store.get_by_uids_short(uids)

    count = 0
    for res in result:
        count += 1
        print res