Beispiel #1
0
    def initialize(self, config, repository_url, repository_type):
        """Initializes the mongostore by connecting to the mongodb, creating the project in the project collection \
        and setting up processes (see: :class:`pyvcsshark.datastores.mongostore.CommitStorageProcess`, which
        read commits out of the commitqueue, process them and store them into the mongodb.

        :param config: all configuration
        :param repository_url: url of the repository, which is to be analyzed
        :param repository_type: type of the repository, which is to be analyzed (e.g. "git")
        """

        logger.setLevel(config.debug_level)
        logger.info("Initializing MongoStore...")

        # Create queue for multiprocessing
        self.commit_queue = multiprocessing.JoinableQueue()
        # We define, that the user we authenticate with is in the admin database
        logger.info("Connecting to MongoDB...")

        uri = create_mongodb_uri_string(config.db_user, config.db_password,
                                        config.db_hostname, config.db_port,
                                        config.db_authentication,
                                        config.ssl_enabled)
        connect(config.db_database, host=uri, connect=False)

        # Get project_id
        try:
            project_id = Project.objects(name=config.project_name).get().id
        except DoesNotExist:
            logger.error('Project with name "%s" does not exist in database!' %
                         config.project_name)
            sys.exit(1)

        # Check if vcssystem already exist, and use upsert
        vcs_system_id = VCSSystem.objects(url=repository_url).upsert_one(
            url=repository_url,
            repository_type=repository_type,
            last_updated=datetime.datetime.today(),
            project_id=project_id).id

        # Get the last commit by date of the project (if there is any)
        last_commit = Commit.objects(vcs_system_id=vcs_system_id)\
            .only('committer_date').order_by('-committer_date').first()

        if last_commit is not None:
            last_commit_date = last_commit.committer_date
        else:
            last_commit_date = None

        # Start worker, they will wait till something comes into the queue and then process it
        for i in range(self.NUMBER_OF_PROCESSES):
            name = "StorageProcess-%d" % i
            process = CommitStorageProcess(self.commit_queue, vcs_system_id,
                                           last_commit_date, config, name)
            process.daemon = True
            process.start()

        logger.info("Starting storage Process...")
Beispiel #2
0
 def __init__(self, queue, vcs_system_id, config, name):
     multiprocessing.Process.__init__(self)
     uri = create_mongodb_uri_string(config.db_user, config.db_password,
                                     config.db_hostname, config.db_port,
                                     config.db_authentication,
                                     config.ssl_enabled)
     connect(config.db_database, host=uri, connect=False)
     self.queue = queue
     self.vcs_system_id = vcs_system_id
     self.proc_name = name
Beispiel #3
0
def main(args):
    if args.log_level and hasattr(logging, args.log_level):
        log.setLevel(getattr(logging, args.log_level))

    uri = create_mongodb_uri_string(args.db_user, args.db_password,
                                    args.db_hostname, args.db_port,
                                    args.db_authentication, args.ssl)
    connect(args.db_database, host=uri)

    c = SmartsharkPlugin(args)
    c.start_mining(args.release_commit)
Beispiel #4
0
def main(args):
    # timing
    start = timeit.default_timer()

    if args.log_level and hasattr(logging, args.log_level):
        log.setLevel(getattr(logging, args.log_level))

    uri = create_mongodb_uri_string(args.db_user, args.db_password, args.db_hostname, args.db_port,
                                    args.db_authentication, args.ssl)
    connect(args.db_database, host=uri)

    # Get the id of the project for which the code entities shall be merged
    try:
        project_id = Project.objects(name=args.project_name).get().id
    except DoesNotExist:
        log.error('Project %s not found!' % args.project_name)
        sys.exit(1)

    vcs = VCSSystem.objects(project_id=project_id).get()

    log.info("Starting commit labeling")

    # import every approach defined or all
    if args.approaches == 'all':
        # just list every module in the package and import it
        basepath = os.path.dirname(os.path.abspath(__file__))
        for app in os.listdir(os.path.join(basepath, 'approaches/')):
            if app.endswith('.py') and app != '__init__.py':
                __import__('approaches.{}'.format(app[:-3]))
    else:
        # if we have a list of approaches import only those
        for app in args.approaches.split(','):
            __import__('approaches.{}'.format(app))

    # add specific configs
    labelshark = LabelSHARK()
    commit_count = Commit.objects(vcs_system_id=vcs.id).count()

    for i,commit in enumerate(Commit.objects(vcs_system_id=vcs.id).only('id', 'revision_hash', 'vcs_system_id', 'message', 'linked_issue_ids', 'parents', 'fixed_issue_ids', 'szz_issue_ids').timeout(False)):
        if i%100 == 0:
            log.info("%i/%i  commits finished", i, commit_count)
        labelshark.set_commit(commit)
        labels = labelshark.get_labels()

        #log.info('commit: {}, labels: {}'.format(commit.revision_hash, labels))

        # save the labels
        if labels:
            tmp = {'set__labels__{}'.format(k): v for k, v in labels}
            Commit.objects(id=commit.id).upsert_one(**tmp)

    end = timeit.default_timer() - start
    log.info("Finished commit labeling in {:.5f}s".format(end))
Beispiel #5
0
    def start(self, cfg):
        """
        Starts the collection process

        :param cfg: holds all configuration parameters. Object of class :class:`~issueshark.config.Config`
        """
        logger.setLevel(cfg.get_debug_level())
        start_time = timeit.default_timer()

        # Connect to mongodb
        uri = create_mongodb_uri_string(cfg.user, cfg.password, cfg.host,
                                        cfg.port, cfg.authentication_db,
                                        cfg.ssl_enabled)
        connect(cfg.database, host=uri)

        # Get the project for which issue data is collected
        try:
            project_id = Project.objects(name=cfg.project_name).get().id
        except DoesNotExist:
            logger.error('Project %s not found!' % cfg.project_name)
            sys.exit(1)

        # Create issue system if not already there
        try:
            issue_system = IssueSystem.objects(url=cfg.tracking_url).get()
        except DoesNotExist:
            issue_system = IssueSystem(project_id=project_id,
                                       url=cfg.tracking_url).save()
        issue_system.last_updated = datetime.datetime.now()
        issue_system.save()

        # Find correct backend
        backend = BaseBackend.find_fitting_backend(cfg, issue_system.id,
                                                   project_id)
        logger.debug("Using backend: %s" % backend.identifier)

        # Process the issues for the corresponding project_id
        backend.process()

        elapsed = timeit.default_timer() - start_time
        logger.info("Execution time: %0.5f s" % elapsed)
Beispiel #6
0
    def __init__(self,
                 logger,
                 database,
                 user,
                 password,
                 host,
                 port,
                 authentication,
                 ssl,
                 project_name,
                 vcs_url,
                 repo_path,
                 repo_from_db=False):
        self._log = logger
        self._repo_path = repo_path
        self._project_name = project_name

        uri = create_mongodb_uri_string(user, password, host, port,
                                        authentication, ssl)
        connect(database, host=uri)

        pr = Project.objects.get(name=project_name)

        if vcs_url:
            vcs = VCSSystem.objects.get(project_id=pr.id, url=vcs_url)
        else:
            vcs = VCSSystem.objects.get(project_id=pr.id)

        its = IssueSystem.objects.get(project_id=pr.id)

        if 'jira' not in its.url:
            raise Exception('only jira issue systems are supported!')

        self._vcs_id = vcs.id
        self._its_id = its.id
        self._jira_key = its.url.split('project=')[-1]

        # we need to extract the repository from the MongoDB
        if repo_from_db:
            self.extract_repository(vcs, repo_path, project_name)
    def __init__(self, input_path, output, project_name, revision, url,
                 makefile_contents, db_name, db_host, db_port, db_user,
                 db_password, db_authentication, debug_level, ssl_enabled):
        """
        Main runner of the mecoshark app

        :param input: path to the revision that is used as input
        :param output: path to an output directory, where files can be stored
        :param project_name:
        :param revision: string of the revision hash
        :param url: url of the project that is analyzed
        :param makefile_contents: contents of the makefile (e.g., for the c processor)
        :param db_name: name of the database
        :param db_host: name of the host where the mongodb is running
        :param db_port: port on which the mongodb listens on
        :param db_user: username of the mongodb user
        :param db_password: password for the mongodb user
        :param db_authentication: name of the database that is used as authentication
        :param debug_level: debug level like defined in :mod:`logging`

        .. WARNING:: URL must be the same as the url that was stored in the mongodb by vcsSHARK!
        """
        home_folder = os.path.expanduser('~') + "/"
        logger.setLevel(debug_level)
        self.project_name = project_name
        self.debug_level = debug_level
        self.input_path = input_path.replace("~", home_folder)
        self.output_path = output.replace("~", home_folder)
        self.makefile_contents = makefile_contents
        self.revision = revision
        self.url = url

        uri = create_mongodb_uri_string(db_user, db_password, db_host, db_port,
                                        db_authentication, ssl_enabled)
        # connect to mongodb
        connect(db_name, host=uri)
Beispiel #8
0
    def initialize(self, config, repository_url, repository_type):
        """Initializes the mongostore by connecting to the mongodb, creating the project in the project collection \
        and setting up processes (see: :class:`pyvcsshark.datastores.mongostore.CommitStorageProcess`, which
        read commits out of the commitqueue, process them and store them into the mongodb.

        :param config: all configuration
        :param repository_url: url of the repository, which is to be analyzed
        :param repository_type: type of the repository, which is to be analyzed (e.g. "git")
        """

        logger.setLevel(config.debug_level)
        logger.info("Initializing MongoStore...")

        # Create queue for multiprocessing
        self.commit_queue = multiprocessing.JoinableQueue()

        # we need an extra queue for branches because all commits need to be finished before we can process branches
        self.branch_queue = multiprocessing.JoinableQueue()
        self.config = config
        self.cores_per_job = config.cores_per_job

        # We define, that the user we authenticate with is in the admin database
        logger.info("Connecting to MongoDB...")

        uri = create_mongodb_uri_string(config.db_user, config.db_password,
                                        config.db_hostname, config.db_port,
                                        config.db_authentication,
                                        config.ssl_enabled)
        connect(config.db_database, host=uri, connect=False)

        # Get project_id
        try:
            project_id = Project.objects(name=config.project_name).get().id
        except DoesNotExist:
            logger.error('Project with name "%s" does not exist in database!' %
                         config.project_name)
            sys.exit(1)

        # Check if vcssystem already exist, and use upsert
        vcs_system = VCSSystem.objects(url=repository_url).upsert_one(
            url=repository_url,
            repository_type=repository_type,
            last_updated=datetime.datetime.today(),
            project_id=project_id)
        self.vcs_system_id = vcs_system.id

        # Tar.gz name based on project name
        tar_gz_name = '{}.tar.gz'.format(config.project_name)

        # Tar.gz of repository folder
        with tarfile.open(tar_gz_name, "w:gz") as tar:
            tar.add(config.path, arcname=config.project_name)

        # Add repository to gridfs if not existent
        if vcs_system.repository_file.grid_id is None:
            logger.info('Copying project to gridfs...')

            # Store in gridfs
            with open(tar_gz_name, 'rb') as tar_file:
                vcs_system.repository_file.put(tar_file,
                                               content_type='application/gzip',
                                               filename=tar_gz_name)
                vcs_system.save()
        else:
            # replace file if not existent
            logger.info('Replacing project file in gridfs...')
            with open(tar_gz_name, 'rb') as tar_file:
                vcs_system.repository_file.replace(
                    tar_file,
                    content_type='application/gzip',
                    filename=tar_gz_name)
                vcs_system.save()

        # Delete tar.gz file
        os.remove(tar_gz_name)

        # Get the last commit by date of the project (if there is any)
        last_commit = Commit.objects(vcs_system_id=self.vcs_system_id)\
            .only('committer_date').order_by('-committer_date').first()

        if last_commit is not None:
            last_commit_date = last_commit.committer_date
        else:
            last_commit_date = None

        # Start worker, they will wait till something comes into the queue and then process it
        for i in range(self.cores_per_job):
            name = "StorageProcess-%d" % i
            process = CommitStorageProcess(self.commit_queue,
                                           self.vcs_system_id,
                                           last_commit_date, self.config, name)
            process.daemon = True
            process.start()

        logger.info("Starting storage Process...")
Beispiel #9
0
        """
        path = default_path
        if os.path.exists(path):
            with open(path, 'rt') as f:
                config = json.load(f)
            logging.config.dictConfig(config)
        else:
            logging.basicConfig(level=default_level)


def start():
    start = timeit.default_timer()
    setup_logging()
    logger = logging.getLogger("main")
    logger.info("Starting pymweSHARK...")

    parser = get_base_argparser('', '0.0.1')
    parser.add_argument('--output', help='Output Folder', required='true')

    parser.add_argument('--debug', help='Sets the debug level.', default='DEBUG',
                        choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'])
						
	print("Hello World")

    args = parser.parse_args()
    uri = create_mongodb_uri_string(args.db_user, args.db_password, args.db_hostname, args.db_port, args.db_authentication, args.ssl)


if __name__ == "__main__":
    start()
Beispiel #10
0
import sys
import pandas as pd
import re

from mongoengine import connect, DoesNotExist
from pycoshark.mongomodels import Commit, FileAction, File, CodeEntityState, Project, VCSSystem, Hunk, Issue, Event
from pycoshark.utils import create_mongodb_uri_string
from datetime import datetime

uri = create_mongodb_uri_string(mongo.user, mongo.pwd, mongo.host, mongo.port,
                                mongo.db, False)
connect(mongo.db, host=uri, alias='default')
connect("smartshark_test", host=uri, alias='default')

date_start = datetime(2017, 1, 1)
date_end = datetime(2018, 1, 1)

vcs_systems = [
    ('archiva', 'refs/remotes/origin/master'),
    ('cayenne', 'refs/remotes/origin/master'),
    ('commons-math', 'refs/remotes/origin/master'),
    ('deltaspike', 'refs/remotes/origin/master'),
    ('falcon', 'refs/remotes/origin/master'),
    ('kafka', 'refs/remotes/origin/trunk'),
    ('kylin', 'refs/remotes/origin/master'),
    ('nutch', 'refs/remotes/origin/master'),
    ('storm', 'refs/remotes/origin/master'),
    ('struts', 'refs/remotes/origin/master'),
    ('tez', 'refs/remotes/origin/master'),
    ('tika', 'refs/remotes/origin/master'),
    ('wss4j', 'refs/remotes/origin/trunk'),
Beispiel #11
0
    def start(self, cfg):
        """
        Executes the linkSHARK.
        :param cfg: configuration object that is used
        """
        self._log.setLevel(cfg.get_debug_level())
        start_time = timeit.default_timer()

        uri = create_mongodb_uri_string(cfg.user, cfg.password, cfg.host,
                                        cfg.port, cfg.authentication_db,
                                        cfg.ssl_enabled)
        connect(cfg.database, host=uri)

        # Get the id of the project for which the code entities shall be merged
        try:
            project_id = Project.objects(name=cfg.project_name).get().id
        except DoesNotExist:
            self._log.error('Project %s not found!' % cfg.project_name)
            sys.exit(1)

        vcs_system = VCSSystem.objects(project_id=project_id).get()
        self._itss = []
        self._log.info('found the following issue tracking systems:')
        for its in IssueSystem.objects(project_id=project_id).order_by('url'):
            self._log.info(its.url)
            self._itss.append(its)

        if len(cfg.correct_key) > 0:
            correct_keys_per_its = cfg.correct_key.split(';')
            if len(correct_keys_per_its) != len(self._itss):
                self._log_critical(
                    '--correct-key must correct keys for all issue tracking systems if specified'
                )
                sys.exit(1)
            for i, correct_key in enumerate(correct_keys_per_its):
                self._correct_key[self._itss[i].url] = correct_key
        if len(cfg.broken_keys) > 0:
            broken_keys_per_its = cfg.broken_keys.split(';')

            if len(broken_keys_per_its) != len(self._itss):
                self._log_critical(
                    '--broken-keys must correct keys for all issue tracking systems if specified. If there are no keys to correct for one of the ITS just use the name of the correct key twice itself'
                )
                sys.exit(1)
            for i, broken_keys in enumerate(broken_keys_per_its):
                self._broken_keys[self._itss[i].url] = broken_keys.split(',')

        self._log.info("Starting issue linking")
        commit_count = Commit.objects(vcs_system_id=vcs_system.id).count()

        issue_map = {}
        for i, issue_system in enumerate(self._itss):
            project_id_string = correct_keys_per_its[i]

            for issue in Issue.objects(issue_system_id=issue_system.id):
                if issue.external_id.startswith(project_id_string):
                    try:
                        issue_number = [
                            int(s) for s in issue.external_id.split('-')
                            if s.isdigit()
                        ][0]
                    except IndexError:
                        self._log.error(
                            "index error because SZZ currently only support JIRA, may not link all issues correctly:",
                            issue.external_id)
                        continue
                    if issue_number not in issue_map:
                        issue_map[issue_number] = [issue]
                    else:
                        issue_map[issue_number].append(issue)

        for i, commit in enumerate(
                Commit.objects(vcs_system_id=vcs_system.id).only(
                    'id', 'revision_hash', 'vcs_system_id', 'message',
                    'author_id', 'committer_id')):
            if i % 100 == 0:
                self._log.info("%i/%i  commits finished", i, commit_count)
            issue_links = self._get_issue_links(commit)
            if len(issue_links) > 0:
                commit.linked_issue_ids = issue_links
                commit.save()
            szz_links = self._get_szz_issue_links(commit, issue_map)
            if len(szz_links) > 0:
                commit.szz_issue_ids = szz_links
                commit.save()

        elapsed = timeit.default_timer() - start_time
        self._log.info("Execution time: %0.5f s" % elapsed)
Beispiel #12
0
def main(args):
    # timing
    start = timeit.default_timer()

    if args.log_level and hasattr(logging, args.log_level):
        log.setLevel(getattr(logging, args.log_level))

    uri = create_mongodb_uri_string(args.db_user, args.db_password,
                                    args.db_hostname, args.db_port,
                                    args.db_authentication, args.ssl)
    connect(args.db_database, host=uri)

    vcs = VCSSystem.objects.get(url=args.url)

    itss = []
    if args.issue_systems == 'all':
        for its in IssueSystem.objects.filter(project_id=vcs.project_id):
            itss.append(its)
    else:
        for url in args.issue_systems.split(','):
            its = IssueSystem.objects.get(url=url)
            itss.append(its)

    log.info("Starting commit labeling")

    # import every approach defined or all
    if args.approaches == 'all':
        # just list every module in the package and import it
        basepath = os.path.dirname(os.path.abspath(__file__))
        for app in os.listdir(os.path.join(basepath, 'approaches/')):
            if app.endswith('.py') and app != '__init__.py':
                __import__('approaches.{}'.format(app[:-3]))
    else:
        # if we have a list of approaches import only those
        for app in args.approaches.split(','):
            __import__('approaches.{}'.format(app))

    # add specific configs
    config = {'itss': itss, 'args': args}
    a = LabelSHARK()
    a.configure(config)

    if args.linking_approach:
        log.info('using approach {} for issue links'.format(
            args.linking_approach))

    for commit in Commit.objects.filter(vcs_system_id=vcs.id):
        a.set_commit(commit)
        labels = a.get_labels()
        issue_links = a.get_issue_links()

        # we get a dict of approach_name => [issue_link_ids]
        for k, v in issue_links.items():
            # log.info('commit: {}, links: {}, from approach: {}'.format(commit.revision_hash, v, k))
            if args.linking_approach and k == args.linking_approach:
                if v:
                    log.info('commit: {}, linked to: {}'.format(
                        commit.revision_hash, ','.join([str(l) for l in v])))
                commit.linked_issue_ids = v
                commit.save()

        log.info('commit: {}, labels: {}'.format(commit.revision_hash, labels))

        # save the labels
        if labels:
            tmp = {'set__labels__{}'.format(k): v for k, v in labels}
            Commit.objects(id=commit.id).upsert_one(**tmp)

    end = timeit.default_timer() - start
    log.info("Finished commit labeling in {:.5f}s".format(end))
Beispiel #13
0
    def start(self, cfg):
        """
        Starts the program

        :param cfg: configuration of class :class:`mailingshark.config.Config`
        """
        logger.setLevel(cfg.get_debug_level())
        start_time = timeit.default_timer()

        # Connect to mongodb
        uri = create_mongodb_uri_string(cfg.user, cfg.password, cfg.host, cfg.port, cfg.authentication_db,
                                        cfg.ssl_enabled)
        connect(cfg.database, host=uri)

        # Get the project for which issue data is collected
        try:
            project_id = Project.objects(name=cfg.project_name).get().id
        except DoesNotExist:
            logger.error('Project not found. Use vcsSHARK beforehand!')
            sys.exit(1)

        # Try to create the mailing_list in database
        try:
            mailing_list = MailingList.objects(project_id=project_id, name=cfg.mailing_url).get()
        except DoesNotExist:
            mailing_list = MailingList(project_id=project_id, name=cfg.mailing_url)
        mailing_list_id = mailing_list.save().id

        # Find correct backend
        backend = BaseDataCollector.find_fitting_backend(cfg, project_id)
        logger.debug("Using backend: %s" % backend.identifier)

        # Get a list of all file paths to boxes
        found_files = backend.download_mail_boxes(mailing_list)
        logger.debug("Got the following files: %s" % found_files)

        # Unpack boxes (if necessary)
        boxes_to_analyze = self._unpack_files(found_files, cfg.temporary_dir)
        logger.info("Analyzing the following files: %s" % boxes_to_analyze)

        stored_messages, non_stored = (0, 0)
        for path_to_box in boxes_to_analyze:
            box = mailbox.mbox(path_to_box, create=False)
            logger.info("Analyzing: %s" % path_to_box)
            for i in range(0, len(box)):
                try:
                    parsed_message = ParsedMessage(cfg, box.get(i))
                    logger.debug('Got the following message: %s' % parsed_message)
                    self._store_message(parsed_message, mailing_list_id)
                    stored_messages += 1
                except Exception as e:
                    logger.error("Could not parse message. Error: %s" % e)
                    non_stored += 1

        # Update mailing list
        mailing_list.last_updated = datetime.datetime.now()
        mailing_list.save()

        logger.info("%d messages stored in database %s" % (stored_messages, cfg.database))
        logger.info("%d messages ignored by the parser" % non_stored)

        elapsed = timeit.default_timer() - start_time
        logger.info("Execution time: %0.5f s" % elapsed)
import csv
import numpy as np
import seaborn as sns


# Database credentials
user = something
password = something
host = something
port = something
authentication_db = 'smartshark'
database = "smartshark"
ssl_enabled = None

# Establish connection
uri = create_mongodb_uri_string(user, password, host, port, authentication_db, ssl_enabled)
connect(database, host=uri)

# Fetch project id and version control system id for the 'kafka' project
# The only() decides the data that is actually retrieved from the MongoDB. Always restrict this to the field that you require!

projects = ['ant-ivy', 'archiva', 'calcite', 'cayenne', 'commons-bcel', 'commons-beanutils', 'commons-codec', 'commons-collections',
            'commons-compress', 'commons-configuration', 'commons-dbcp', 'commons-digester', 'commons-io',
            'commons-jcs', 'commons-jexl', 'commons-lang', 'commons-math', 'commons-net', 'commons-rdf', 'commons-scxml']

rows_list = []

for projectName in projects:
    project = Project.objects(name=projectName).only('id').get()
    #vcs_system = VCSSystem.objects(project_id=project.id).only('id','url').get()
def start():
    """
    Compares the commits and code_entity_states of two MongoDBs, whereas the first MongoDB is
    is condensed with the memeSHARK and the second MongoDB is verbose.
    """
    setup_logging()
    logger = logging.getLogger("main")
    logger.info("Starting consistency checker...")

    parser = argparse.ArgumentParser(description='DB consistency checker.')

    parser.add_argument('-v',
                        '--version',
                        help='Shows the version',
                        action='version',
                        version='0.1.0')
    parser.add_argument('-U1',
                        '--db-user1',
                        help='Database user name',
                        default=None)
    parser.add_argument('-P1',
                        '--db-password1',
                        help='Database user password',
                        default=None)
    parser.add_argument('-DB1',
                        '--db-database1',
                        help='Database name',
                        default='smartshark')
    parser.add_argument(
        '-H1',
        '--db-hostname1',
        help='Name of the host, where the database server is running',
        default='localhost')
    parser.add_argument('-p1',
                        '--db-port1',
                        help='Port, where the database server is listening',
                        default=27017,
                        type=int)
    parser.add_argument('-a1',
                        '--db-authentication1',
                        help='Name of the authentication database',
                        default=None)
    parser.add_argument('--ssl1',
                        help='Enables SSL',
                        default=False,
                        action='store_true')

    parser.add_argument('-U2',
                        '--db-user2',
                        help='Database user name',
                        default=None)
    parser.add_argument('-P2',
                        '--db-password2',
                        help='Database user password',
                        default=None)
    parser.add_argument('-DB2',
                        '--db-database2',
                        help='Database name',
                        default='smartshark_backup')
    parser.add_argument(
        '-H2',
        '--db-hostname2',
        help='Name of the host, where the database server is running',
        default='localhost')
    parser.add_argument('-p2',
                        '--db-port2',
                        help='Port, where the database server is listening',
                        default=27017,
                        type=int)
    parser.add_argument('-a2',
                        '--db-authentication2',
                        help='Name of the authentication database',
                        default=None)
    parser.add_argument('--ssl2',
                        help='Enables SSL',
                        default=False,
                        action='store_true')

    parser.add_argument(
        '--debug',
        help='Sets the debug level.',
        default='DEBUG',
        choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'])
    parser.add_argument('--project-name1',
                        help='Name of the project.',
                        default=None)
    parser.add_argument('--project-name2',
                        help='Name of the project.',
                        default=None)

    args = parser.parse_args()

    logger.info(args)

    logger.info("connecting to database 1 (condensed)...")
    uri1 = create_mongodb_uri_string(args.db_user1, args.db_password1,
                                     args.db_hostname1, args.db_port1,
                                     args.db_authentication1, args.ssl1)
    logger.info(uri1)
    connect(args.db_database1, host=uri1, alias='default')

    logger.info("connecting to database 2 (verbose)...")
    uri2 = create_mongodb_uri_string(args.db_user2, args.db_password2,
                                     args.db_hostname2, args.db_port2,
                                     args.db_authentication2, args.ssl2)
    logger.info(uri2)
    connect(args.db_database2, host=uri2, alias='db-verbose')

    # fetch all verbose commmits
    commits_verbose = []
    with switch_db(Commit, 'db-verbose') as CommitVerbose:
        # fetch only commits for selected project
        try:
            project_id = Project.objects(name=args.project_name2).get().id
        except DoesNotExist:
            logger.error('Project %s not found!' % args.project_name2)
            sys.exit(1)
        vcs_systems = VCSSystem.objects(project_id=project_id).get().id
        logger.info("vcs_system_id: %s", vcs_systems)
        commit_objects = Commit.objects(vcs_system_id=vcs_systems)

        for cur_commit_verbose in commit_objects:
            commits_verbose.append(cur_commit_verbose)

    with switch_db(VCSSystem, 'default') as VCSSystemCondensed:
        # fetch only commits for selected project
        try:
            project_id = Project.objects(name=args.project_name1).get().id
        except DoesNotExist:
            logger.error('Project %s not found!' % args.project_name1)
            sys.exit(1)
        vcs_systems_condensed = VCSSystemCondensed.objects(
            project_id=project_id).get().id

    # fetch files verbose
    with switch_db(File, 'db-verbose') as FilesVerbose:
        files_verbose = {}
        for cur_file_verbose in FilesVerbose.objects(
                vcs_system_id=vcs_systems):
            files_verbose[cur_file_verbose.id] = cur_file_verbose.path

    with switch_db(File, 'default') as FilesCondensed:
        files_condensed = {}
        for cur_file_condensed in FilesCondensed.objects(
                vcs_system_id=vcs_systems_condensed):
            files_condensed[cur_file_condensed.id] = cur_file_condensed.path

    num_commits_verbose = len(commits_verbose)
    logger.info("num commits verbose: %i", num_commits_verbose)
    for commit_nr, commit_verbose in enumerate(commits_verbose):
        logger.info("processing commit %s (%i / %i)", commit_verbose.id,
                    commit_nr + 1, num_commits_verbose)
        # fetch verbose CES
        ces_verbose = {}
        ces_verbose_by_id = {}
        with switch_db(CodeEntityState,
                       'db-verbose') as CodeEntityStateVerbose:
            for cur_ces_verbose in CodeEntityStateVerbose.objects(
                    commit_id=commit_verbose.id):
                ces_verbose[
                    cur_ces_verbose.long_name +
                    files_verbose[cur_ces_verbose.file_id]] = cur_ces_verbose
                ces_verbose_by_id[cur_ces_verbose.id] = cur_ces_verbose

        # fetch same commit in condensed DB
        with switch_db(Commit, 'default') as CommitCondensed:
            try:
                commit_condensed = CommitCondensed.objects(
                    revision_hash=commit_verbose.revision_hash,
                    vcs_system_id=vcs_systems_condensed).get()
            except:
                logger.info("commit %s not found in condensed db",
                            commit_verbose.revision_hash)
                continue

        # fetch CES from condensed DB
        ces_condensed = {}
        ces_condensed_by_id = {}
        with switch_db(CodeEntityState, 'default') as CodeEntityStateCondensed:
            for ces_id in commit_condensed.code_entity_states:
                cur_ces_condensed = CodeEntityStateCondensed.objects(
                    id=ces_id).get()
                ces_condensed[cur_ces_condensed.long_name + files_condensed[
                    cur_ces_condensed.file_id]] = cur_ces_condensed
                ces_condensed_by_id[cur_ces_condensed.id] = cur_ces_condensed

        logger.info("num CES verbose  : %i", len(ces_verbose.keys()))
        logger.info("num CES condensed: %i", len(ces_condensed.keys()))

        ces_unequal = 0
        # compare CES
        for long_name_verbose, cur_ces_verbose in ces_verbose.items():
            if long_name_verbose not in ces_condensed:
                logger.error(
                    "CES with long_name %s not found in condensed DB!",
                    long_name_verbose)
                ces_unequal += 1
                continue

            cur_ces_condensed = ces_condensed[long_name_verbose]
            old, new = compare_dicts(cur_ces_verbose, cur_ces_condensed, {
                'id', 's_key', 'commit_id', 'ce_parent_id', 'cg_ids', 'file_id'
            })
            if len(new.keys()) > 0 or len(old.keys()) > 0:
                logger.error(
                    "CES with long_name %s (id verbose: %s /id condensed %s) not equal!",
                    long_name_verbose, cur_ces_verbose.id,
                    cur_ces_condensed.id)
                logger.error("verbose  : %s", old)
                logger.error("condensed: %s", new)
                ces_unequal += 1
                continue

            # check if CES parent is equal
            ces_parent_verbose = ces_verbose_by_id[cur_ces_verbose.id]
            ces_parent_condensed = ces_condensed_by_id[cur_ces_condensed.id]
            old, new = compare_dicts\
                (ces_parent_verbose, ces_parent_condensed,
                 {'id', 's_key', 'commit_id', 'ce_parent_id', 'cg_ids', 'file_id'})
            if len(new.keys()) > 0 or len(old.keys()) > 0:
                logger.error("ce_parent of CES with long_name %s not equal!",
                             long_name_verbose)
                logger.error("verbose  : %s", old)
                logger.error("condensed: %s", new)
                ces_unequal += 1
                continue

        logger.info("num CES from verbose not matched: %i", ces_unequal)