示例#1
0
def whoosh_index(repo_location, full_index):
    from kallithea.lib.indexers.daemon import WhooshIndexingDaemon
    celerylib.get_session()  # initialize database connection

    index_location = config['index_dir']
    WhooshIndexingDaemon(index_location=index_location,
                         repo_location=repo_location) \
                         .run(full_index=full_index)
示例#2
0
def whoosh_index(repo_location, full_index):
    from kallithea.lib.indexers.daemon import WhooshIndexingDaemon
    DBS = get_session()

    index_location = config['index_dir']
    WhooshIndexingDaemon(index_location=index_location,
                         repo_location=repo_location, sa=DBS)\
                         .run(full_index=full_index)
示例#3
0
def whoosh_index(repo_location, full_index):
    from kallithea.lib.indexers.daemon import WhooshIndexingDaemon
    DBS = get_session()

    index_location = config['index_dir']
    WhooshIndexingDaemon(index_location=index_location,
                         repo_location=repo_location, sa=DBS) \
                         .run(full_index=full_index)
示例#4
0
def get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit=100):
    log = get_logger(get_commits_stats)
    DBS = get_session()
    lockkey = __get_lockkey('get_commits_stats', repo_name, ts_min_y, ts_max_y)
    lockkey_path = config['app_conf']['cache_dir']

    log.info('running task with lockkey %s' % lockkey)

    try:
        lock = l = DaemonLock(file_=jn(lockkey_path, lockkey))

        # for js data compatibility cleans the key for person from '
        akc = lambda k: person(k).replace('"', "")

        co_day_auth_aggr = {}
        commits_by_day_aggregate = {}
        repo = Repository.get_by_repo_name(repo_name)
        if repo is None:
            return True

        repo = repo.scm_instance
        repo_size = repo.count()
        # return if repo have no revisions
        if repo_size < 1:
            lock.release()
            return True

        skip_date_limit = True
        parse_limit = int(config['app_conf'].get('commit_parse_limit'))
        last_rev = None
        last_cs = None
        timegetter = itemgetter('time')

        dbrepo = DBS.query(Repository)\
            .filter(Repository.repo_name == repo_name).scalar()
        cur_stats = DBS.query(Statistics)\
            .filter(Statistics.repository == dbrepo).scalar()

        if cur_stats is not None:
            last_rev = cur_stats.stat_on_revision

        if last_rev == repo.get_changeset().revision and repo_size > 1:
            # pass silently without any work if we're not on first revision or
            # current state of parsing revision(from db marker) is the
            # last revision
            lock.release()
            return True

        if cur_stats:
            commits_by_day_aggregate = OrderedDict(
                json.loads(cur_stats.commit_activity_combined))
            co_day_auth_aggr = json.loads(cur_stats.commit_activity)

        log.debug('starting parsing %s' % parse_limit)
        lmktime = mktime

        last_rev = last_rev + 1 if last_rev >= 0 else 0
        log.debug('Getting revisions from %s to %s' %
                  (last_rev, last_rev + parse_limit))
        for cs in repo[last_rev:last_rev + parse_limit]:
            log.debug('parsing %s' % cs)
            last_cs = cs  # remember last parsed changeset
            k = lmktime([
                cs.date.timetuple()[0],
                cs.date.timetuple()[1],
                cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0
            ])

            if akc(cs.author) in co_day_auth_aggr:
                try:
                    l = [
                        timegetter(x)
                        for x in co_day_auth_aggr[akc(cs.author)]['data']
                    ]
                    time_pos = l.index(k)
                except ValueError:
                    time_pos = None

                if time_pos >= 0 and time_pos is not None:

                    datadict = \
                        co_day_auth_aggr[akc(cs.author)]['data'][time_pos]

                    datadict["commits"] += 1
                    datadict["added"] += len(cs.added)
                    datadict["changed"] += len(cs.changed)
                    datadict["removed"] += len(cs.removed)

                else:
                    if k >= ts_min_y and k <= ts_max_y or skip_date_limit:

                        datadict = {
                            "time": k,
                            "commits": 1,
                            "added": len(cs.added),
                            "changed": len(cs.changed),
                            "removed": len(cs.removed),
                        }
                        co_day_auth_aggr[akc(cs.author)]['data']\
                            .append(datadict)

            else:
                if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
                    co_day_auth_aggr[akc(cs.author)] = {
                        "label":
                        akc(cs.author),
                        "data": [{
                            "time": k,
                            "commits": 1,
                            "added": len(cs.added),
                            "changed": len(cs.changed),
                            "removed": len(cs.removed),
                        }],
                        "schema": ["commits"],
                    }

            #gather all data by day
            if k in commits_by_day_aggregate:
                commits_by_day_aggregate[k] += 1
            else:
                commits_by_day_aggregate[k] = 1

        overview_data = sorted(commits_by_day_aggregate.items(),
                               key=itemgetter(0))

        if not co_day_auth_aggr:
            co_day_auth_aggr[akc(repo.contact)] = {
                "label": akc(repo.contact),
                "data": [0, 1],
                "schema": ["commits"],
            }

        stats = cur_stats if cur_stats else Statistics()
        stats.commit_activity = json.dumps(co_day_auth_aggr)
        stats.commit_activity_combined = json.dumps(overview_data)

        log.debug('last revision %s' % last_rev)
        leftovers = len(repo.revisions[last_rev:])
        log.debug('revisions to parse %s' % leftovers)

        if last_rev == 0 or leftovers < parse_limit:
            log.debug('getting code trending stats')
            stats.languages = json.dumps(__get_codes_stats(repo_name))

        try:
            stats.repository = dbrepo
            stats.stat_on_revision = last_cs.revision if last_cs else 0
            DBS.add(stats)
            DBS.commit()
        except:
            log.error(traceback.format_exc())
            DBS.rollback()
            lock.release()
            return False

        # final release
        lock.release()

        # execute another task if celery is enabled
        if len(repo.revisions) > 1 and CELERY_ON and recurse_limit > 0:
            recurse_limit -= 1
            run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y,
                     recurse_limit)
        if recurse_limit <= 0:
            log.debug('Breaking recursive mode due to reach of recurse limit')
        return True
    except LockHeld:
        log.info('LockHeld')
        return 'Task with key %s already running' % lockkey
示例#5
0
def create_repo_fork(form_data, cur_user):
    """
    Creates a fork of repository using interval VCS methods

    :param form_data:
    :param cur_user:
    """
    from kallithea.model.repo import RepoModel
    from kallithea.model.user import UserModel

    log = get_logger(create_repo_fork)
    DBS = get_session()

    base_path = Repository.base_path()
    cur_user = UserModel(DBS)._get_user(cur_user)

    repo_name = form_data['repo_name']  # fork in this case
    repo_name_full = form_data['repo_name_full']

    repo_type = form_data['repo_type']
    owner = cur_user
    private = form_data['private']
    clone_uri = form_data.get('clone_uri')
    repo_group = form_data['repo_group']
    landing_rev = form_data['landing_rev']
    copy_fork_permissions = form_data.get('copy_permissions')

    try:
        fork_of = RepoModel(DBS)._get_repo(form_data.get('fork_parent_id'))

        RepoModel(DBS)._create_repo(
            repo_name=repo_name_full,
            repo_type=repo_type,
            description=form_data['description'],
            owner=owner,
            private=private,
            clone_uri=clone_uri,
            repo_group=repo_group,
            landing_rev=landing_rev,
            fork_of=fork_of,
            copy_fork_permissions=copy_fork_permissions)
        action_logger(cur_user, 'user_forked_repo:%s' % repo_name_full,
                      fork_of.repo_name, '', DBS)
        DBS.commit()

        update_after_clone = form_data['update_after_clone']  # FIXME - unused!
        source_repo_path = os.path.join(base_path, fork_of.repo_name)

        # now create this repo on Filesystem
        RepoModel(DBS)._create_filesystem_repo(
            repo_name=repo_name,
            repo_type=repo_type,
            repo_group=RepoModel(DBS)._get_repo_group(repo_group),
            clone_uri=source_repo_path,
        )
        repo = Repository.get_by_repo_name(repo_name_full)
        log_create_repository(repo.get_dict(), created_by=owner.username)

        # update repo changeset caches initially
        repo.update_changeset_cache()

        # set new created state
        repo.set_state(Repository.STATE_CREATED)
        DBS.commit()
    except Exception, e:
        log.warning('Exception %s occurred when forking repository, '
                    'doing cleanup...' % e)
        #rollback things manually !
        repo = Repository.get_by_repo_name(repo_name_full)
        if repo:
            Repository.delete(repo.repo_id)
            DBS.commit()
            RepoModel(DBS)._delete_filesystem_repo(repo)
        raise
示例#6
0
def create_repo(form_data, cur_user):
    from kallithea.model.repo import RepoModel
    from kallithea.model.user import UserModel
    from kallithea.model.db import Setting

    log = get_logger(create_repo)
    DBS = get_session()

    cur_user = UserModel(DBS)._get_user(cur_user)

    owner = cur_user
    repo_name = form_data['repo_name']
    repo_name_full = form_data['repo_name_full']
    repo_type = form_data['repo_type']
    description = form_data['repo_description']
    private = form_data['repo_private']
    clone_uri = form_data.get('clone_uri')
    repo_group = form_data['repo_group']
    landing_rev = form_data['repo_landing_rev']
    copy_fork_permissions = form_data.get('copy_permissions')
    copy_group_permissions = form_data.get('repo_copy_permissions')
    fork_of = form_data.get('fork_parent_id')
    state = form_data.get('repo_state', Repository.STATE_PENDING)

    # repo creation defaults, private and repo_type are filled in form
    defs = Setting.get_default_repo_settings(strip_prefix=True)
    enable_statistics = defs.get('repo_enable_statistics')
    enable_locking = defs.get('repo_enable_locking')
    enable_downloads = defs.get('repo_enable_downloads')

    try:
        repo = RepoModel(DBS)._create_repo(
            repo_name=repo_name_full,
            repo_type=repo_type,
            description=description,
            owner=owner,
            private=private,
            clone_uri=clone_uri,
            repo_group=repo_group,
            landing_rev=landing_rev,
            fork_of=fork_of,
            copy_fork_permissions=copy_fork_permissions,
            copy_group_permissions=copy_group_permissions,
            enable_statistics=enable_statistics,
            enable_locking=enable_locking,
            enable_downloads=enable_downloads,
            state=state)

        action_logger(cur_user, 'user_created_repo',
                      form_data['repo_name_full'], '', DBS)

        DBS.commit()
        # now create this repo on Filesystem
        RepoModel(DBS)._create_filesystem_repo(
            repo_name=repo_name,
            repo_type=repo_type,
            repo_group=RepoModel(DBS)._get_repo_group(repo_group),
            clone_uri=clone_uri,
        )
        repo = Repository.get_by_repo_name(repo_name_full)
        log_create_repository(repo.get_dict(), created_by=owner.username)

        # update repo changeset caches initially
        repo.update_changeset_cache()

        # set new created state
        repo.set_state(Repository.STATE_CREATED)
        DBS.commit()
    except Exception, e:
        log.warning('Exception %s occurred when forking repository, '
                    'doing cleanup...' % e)
        # rollback things manually !
        repo = Repository.get_by_repo_name(repo_name_full)
        if repo:
            Repository.delete(repo.repo_id)
            DBS.commit()
            RepoModel(DBS)._delete_filesystem_repo(repo)
        raise
示例#7
0
def get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit=100):
    DBS = celerylib.get_session()
    lockkey = celerylib.__get_lockkey('get_commits_stats', repo_name, ts_min_y,
                                      ts_max_y)
    lockkey_path = config.get('cache_dir') or config['app_conf'][
        'cache_dir']  # Backward compatibility for TurboGears < 2.4

    log.info('running task with lockkey %s', lockkey)

    try:
        lock = celerylib.DaemonLock(os.path.join(lockkey_path, lockkey))

        co_day_auth_aggr = {}
        commits_by_day_aggregate = {}
        repo = Repository.get_by_repo_name(repo_name)
        if repo is None:
            return True

        repo = repo.scm_instance
        repo_size = repo.count()
        # return if repo have no revisions
        if repo_size < 1:
            lock.release()
            return True

        skip_date_limit = True
        parse_limit = int(config.get('commit_parse_limit'))
        last_rev = None
        last_cs = None
        timegetter = itemgetter('time')

        dbrepo = DBS.query(Repository) \
            .filter(Repository.repo_name == repo_name).scalar()
        cur_stats = DBS.query(Statistics) \
            .filter(Statistics.repository == dbrepo).scalar()

        if cur_stats is not None:
            last_rev = cur_stats.stat_on_revision

        if last_rev == repo.get_changeset().revision and repo_size > 1:
            # pass silently without any work if we're not on first revision or
            # current state of parsing revision(from db marker) is the
            # last revision
            lock.release()
            return True

        if cur_stats:
            commits_by_day_aggregate = OrderedDict(
                ext_json.loads(cur_stats.commit_activity_combined))
            co_day_auth_aggr = ext_json.loads(cur_stats.commit_activity)

        log.debug('starting parsing %s', parse_limit)

        last_rev = last_rev + 1 if last_rev and last_rev >= 0 else 0
        log.debug('Getting revisions from %s to %s', last_rev,
                  last_rev + parse_limit)
        for cs in repo[last_rev:last_rev + parse_limit]:
            log.debug('parsing %s', cs)
            last_cs = cs  # remember last parsed changeset
            tt = cs.date.timetuple()
            k = mktime(tt[:3] + (0, 0, 0, 0, 0, 0))

            if akc(cs.author) in co_day_auth_aggr:
                try:
                    l = [
                        timegetter(x)
                        for x in co_day_auth_aggr[akc(cs.author)]['data']
                    ]
                    time_pos = l.index(k)
                except ValueError:
                    time_pos = None

                if time_pos is not None and time_pos >= 0:
                    datadict = \
                        co_day_auth_aggr[akc(cs.author)]['data'][time_pos]

                    datadict["commits"] += 1
                    datadict["added"] += len(cs.added)
                    datadict["changed"] += len(cs.changed)
                    datadict["removed"] += len(cs.removed)

                else:
                    if k >= ts_min_y and k <= ts_max_y or skip_date_limit:

                        datadict = {
                            "time": k,
                            "commits": 1,
                            "added": len(cs.added),
                            "changed": len(cs.changed),
                            "removed": len(cs.removed),
                        }
                        co_day_auth_aggr[akc(cs.author)]['data'] \
                            .append(datadict)

            else:
                if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
                    co_day_auth_aggr[akc(cs.author)] = {
                        "label":
                        akc(cs.author),
                        "data": [{
                            "time": k,
                            "commits": 1,
                            "added": len(cs.added),
                            "changed": len(cs.changed),
                            "removed": len(cs.removed),
                        }],
                        "schema": ["commits"],
                    }

            # gather all data by day
            if k in commits_by_day_aggregate:
                commits_by_day_aggregate[k] += 1
            else:
                commits_by_day_aggregate[k] = 1

        overview_data = sorted(commits_by_day_aggregate.items(),
                               key=itemgetter(0))

        if not co_day_auth_aggr:
            co_day_auth_aggr[akc(repo.contact)] = {
                "label": akc(repo.contact),
                "data": [0, 1],
                "schema": ["commits"],
            }

        stats = cur_stats if cur_stats else Statistics()
        stats.commit_activity = ascii_bytes(ext_json.dumps(co_day_auth_aggr))
        stats.commit_activity_combined = ascii_bytes(
            ext_json.dumps(overview_data))

        log.debug('last revision %s', last_rev)
        leftovers = len(repo.revisions[last_rev:])
        log.debug('revisions to parse %s', leftovers)

        if last_rev == 0 or leftovers < parse_limit:
            log.debug('getting code trending stats')
            stats.languages = ascii_bytes(
                ext_json.dumps(__get_codes_stats(repo_name)))

        try:
            stats.repository = dbrepo
            stats.stat_on_revision = last_cs.revision if last_cs else 0
            DBS.add(stats)
            DBS.commit()
        except:
            log.error(traceback.format_exc())
            DBS.rollback()
            lock.release()
            return False

        # final release
        lock.release()

        # execute another task if celery is enabled
        if len(repo.revisions
               ) > 1 and kallithea.CELERY_APP and recurse_limit > 0:
            get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit - 1)
        elif recurse_limit <= 0:
            log.debug('Not recursing - limit has been reached')
        else:
            log.debug('Not recursing')
    except celerylib.LockHeld:
        log.info('Task with key %s already running', lockkey)
        return 'Task with key %s already running' % lockkey
示例#8
0
def get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit=100):
    log = get_logger(get_commits_stats)
    DBS = get_session()
    lockkey = __get_lockkey('get_commits_stats', repo_name, ts_min_y,
                            ts_max_y)
    lockkey_path = config['app_conf']['cache_dir']

    log.info('running task with lockkey %s', lockkey)

    try:
        lock = l = DaemonLock(file_=jn(lockkey_path, lockkey))

        # for js data compatibility cleans the key for person from '
        akc = lambda k: person(k).replace('"', "")

        co_day_auth_aggr = {}
        commits_by_day_aggregate = {}
        repo = Repository.get_by_repo_name(repo_name)
        if repo is None:
            return True

        repo = repo.scm_instance
        repo_size = repo.count()
        # return if repo have no revisions
        if repo_size < 1:
            lock.release()
            return True

        skip_date_limit = True
        parse_limit = int(config['app_conf'].get('commit_parse_limit'))
        last_rev = None
        last_cs = None
        timegetter = itemgetter('time')

        dbrepo = DBS.query(Repository) \
            .filter(Repository.repo_name == repo_name).scalar()
        cur_stats = DBS.query(Statistics) \
            .filter(Statistics.repository == dbrepo).scalar()

        if cur_stats is not None:
            last_rev = cur_stats.stat_on_revision

        if last_rev == repo.get_changeset().revision and repo_size > 1:
            # pass silently without any work if we're not on first revision or
            # current state of parsing revision(from db marker) is the
            # last revision
            lock.release()
            return True

        if cur_stats:
            commits_by_day_aggregate = OrderedDict(json.loads(
                                        cur_stats.commit_activity_combined))
            co_day_auth_aggr = json.loads(cur_stats.commit_activity)

        log.debug('starting parsing %s', parse_limit)
        lmktime = mktime

        last_rev = last_rev + 1 if last_rev >= 0 else 0
        log.debug('Getting revisions from %s to %s',
             last_rev, last_rev + parse_limit
        )
        for cs in repo[last_rev:last_rev + parse_limit]:
            log.debug('parsing %s', cs)
            last_cs = cs  # remember last parsed changeset
            k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1],
                          cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0])

            if akc(cs.author) in co_day_auth_aggr:
                try:
                    l = [timegetter(x) for x in
                         co_day_auth_aggr[akc(cs.author)]['data']]
                    time_pos = l.index(k)
                except ValueError:
                    time_pos = None

                if time_pos >= 0 and time_pos is not None:

                    datadict = \
                        co_day_auth_aggr[akc(cs.author)]['data'][time_pos]

                    datadict["commits"] += 1
                    datadict["added"] += len(cs.added)
                    datadict["changed"] += len(cs.changed)
                    datadict["removed"] += len(cs.removed)

                else:
                    if k >= ts_min_y and k <= ts_max_y or skip_date_limit:

                        datadict = {"time": k,
                                    "commits": 1,
                                    "added": len(cs.added),
                                    "changed": len(cs.changed),
                                    "removed": len(cs.removed),
                                   }
                        co_day_auth_aggr[akc(cs.author)]['data'] \
                            .append(datadict)

            else:
                if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
                    co_day_auth_aggr[akc(cs.author)] = {
                                        "label": akc(cs.author),
                                        "data": [{"time":k,
                                                 "commits":1,
                                                 "added":len(cs.added),
                                                 "changed":len(cs.changed),
                                                 "removed":len(cs.removed),
                                                 }],
                                        "schema": ["commits"],
                                        }

            #gather all data by day
            if k in commits_by_day_aggregate:
                commits_by_day_aggregate[k] += 1
            else:
                commits_by_day_aggregate[k] = 1

        overview_data = sorted(commits_by_day_aggregate.items(),
                               key=itemgetter(0))

        if not co_day_auth_aggr:
            co_day_auth_aggr[akc(repo.contact)] = {
                "label": akc(repo.contact),
                "data": [0, 1],
                "schema": ["commits"],
            }

        stats = cur_stats if cur_stats else Statistics()
        stats.commit_activity = json.dumps(co_day_auth_aggr)
        stats.commit_activity_combined = json.dumps(overview_data)

        log.debug('last revision %s', last_rev)
        leftovers = len(repo.revisions[last_rev:])
        log.debug('revisions to parse %s', leftovers)

        if last_rev == 0 or leftovers < parse_limit:
            log.debug('getting code trending stats')
            stats.languages = json.dumps(__get_codes_stats(repo_name))

        try:
            stats.repository = dbrepo
            stats.stat_on_revision = last_cs.revision if last_cs else 0
            DBS.add(stats)
            DBS.commit()
        except:
            log.error(traceback.format_exc())
            DBS.rollback()
            lock.release()
            return False

        # final release
        lock.release()

        # execute another task if celery is enabled
        if len(repo.revisions) > 1 and CELERY_ON and recurse_limit > 0:
            recurse_limit -= 1
            run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y,
                     recurse_limit)
        if recurse_limit <= 0:
            log.debug('Breaking recursive mode due to reach of recurse limit')
        return True
    except LockHeld:
        log.info('LockHeld')
        return 'Task with key %s already running' % lockkey
示例#9
0
def create_repo_fork(form_data, cur_user):
    """
    Creates a fork of repository using interval VCS methods

    :param form_data:
    :param cur_user:
    """
    from kallithea.model.repo import RepoModel
    from kallithea.model.user import UserModel

    log = get_logger(create_repo_fork)
    DBS = get_session()

    base_path = Repository.base_path()
    cur_user = UserModel(DBS)._get_user(cur_user)

    repo_name = form_data['repo_name']  # fork in this case
    repo_name_full = form_data['repo_name_full']

    repo_type = form_data['repo_type']
    owner = cur_user
    private = form_data['private']
    clone_uri = form_data.get('clone_uri')
    repo_group = form_data['repo_group']
    landing_rev = form_data['landing_rev']
    copy_fork_permissions = form_data.get('copy_permissions')

    try:
        fork_of = RepoModel(DBS)._get_repo(form_data.get('fork_parent_id'))

        RepoModel(DBS)._create_repo(
            repo_name=repo_name_full,
            repo_type=repo_type,
            description=form_data['description'],
            owner=owner,
            private=private,
            clone_uri=clone_uri,
            repo_group=repo_group,
            landing_rev=landing_rev,
            fork_of=fork_of,
            copy_fork_permissions=copy_fork_permissions
        )
        action_logger(cur_user, 'user_forked_repo:%s' % repo_name_full,
                      fork_of.repo_name, '', DBS)
        DBS.commit()

        update_after_clone = form_data['update_after_clone'] # FIXME - unused!
        source_repo_path = os.path.join(base_path, fork_of.repo_name)

        # now create this repo on Filesystem
        RepoModel(DBS)._create_filesystem_repo(
            repo_name=repo_name,
            repo_type=repo_type,
            repo_group=RepoModel(DBS)._get_repo_group(repo_group),
            clone_uri=source_repo_path,
        )
        repo = Repository.get_by_repo_name(repo_name_full)
        log_create_repository(repo.get_dict(), created_by=owner.username)

        # update repo changeset caches initially
        repo.update_changeset_cache()

        # set new created state
        repo.set_state(Repository.STATE_CREATED)
        DBS.commit()
    except Exception as e:
        log.warning('Exception %s occurred when forking repository, '
                    'doing cleanup...' % e)
        #rollback things manually !
        repo = Repository.get_by_repo_name(repo_name_full)
        if repo:
            Repository.delete(repo.repo_id)
            DBS.commit()
            RepoModel(DBS)._delete_filesystem_repo(repo)
        raise

    # it's an odd fix to make celery fail task when exception occurs
    def on_failure(self, *args, **kwargs):
        pass

    return True
示例#10
0
def create_repo(form_data, cur_user):
    from kallithea.model.repo import RepoModel
    from kallithea.model.user import UserModel
    from kallithea.model.db import Setting

    log = get_logger(create_repo)
    DBS = get_session()

    cur_user = UserModel(DBS)._get_user(cur_user)

    owner = cur_user
    repo_name = form_data['repo_name']
    repo_name_full = form_data['repo_name_full']
    repo_type = form_data['repo_type']
    description = form_data['repo_description']
    private = form_data['repo_private']
    clone_uri = form_data.get('clone_uri')
    repo_group = form_data['repo_group']
    landing_rev = form_data['repo_landing_rev']
    copy_fork_permissions = form_data.get('copy_permissions')
    copy_group_permissions = form_data.get('repo_copy_permissions')
    fork_of = form_data.get('fork_parent_id')
    state = form_data.get('repo_state', Repository.STATE_PENDING)

    # repo creation defaults, private and repo_type are filled in form
    defs = Setting.get_default_repo_settings(strip_prefix=True)
    enable_statistics = defs.get('repo_enable_statistics')
    enable_locking = defs.get('repo_enable_locking')
    enable_downloads = defs.get('repo_enable_downloads')

    try:
        repo = RepoModel(DBS)._create_repo(
            repo_name=repo_name_full,
            repo_type=repo_type,
            description=description,
            owner=owner,
            private=private,
            clone_uri=clone_uri,
            repo_group=repo_group,
            landing_rev=landing_rev,
            fork_of=fork_of,
            copy_fork_permissions=copy_fork_permissions,
            copy_group_permissions=copy_group_permissions,
            enable_statistics=enable_statistics,
            enable_locking=enable_locking,
            enable_downloads=enable_downloads,
            state=state
        )

        action_logger(cur_user, 'user_created_repo',
                      form_data['repo_name_full'], '', DBS)

        DBS.commit()
        # now create this repo on Filesystem
        RepoModel(DBS)._create_filesystem_repo(
            repo_name=repo_name,
            repo_type=repo_type,
            repo_group=RepoModel(DBS)._get_repo_group(repo_group),
            clone_uri=clone_uri,
        )
        repo = Repository.get_by_repo_name(repo_name_full)
        log_create_repository(repo.get_dict(), created_by=owner.username)

        # update repo changeset caches initially
        repo.update_changeset_cache()

        # set new created state
        repo.set_state(Repository.STATE_CREATED)
        DBS.commit()
    except Exception as e:
        log.warning('Exception %s occurred when forking repository, '
                    'doing cleanup...' % e)
        # rollback things manually !
        repo = Repository.get_by_repo_name(repo_name_full)
        if repo:
            Repository.delete(repo.repo_id)
            DBS.commit()
            RepoModel(DBS)._delete_filesystem_repo(repo)
        raise

    # it's an odd fix to make celery fail task when exception occurs
    def on_failure(self, *args, **kwargs):
        pass

    return True