示例#1
0
    def __call__(self, environ, start_response):
        """
        Parse the request body as JSON, look up the method on the
        controller and if it exists, dispatch to it.
        """
        if 'CONTENT_LENGTH' not in environ:
            log.debug("No Content-Length")
            return jsonrpc_error(message="No Content-Length in request")
        else:
            length = environ['CONTENT_LENGTH'] or 0
            length = int(environ['CONTENT_LENGTH'])
            log.debug('Content-Length: %s' % length)

        if length == 0:
            log.debug("Content-Length is 0")
            return jsonrpc_error(message="Content-Length is 0")

        raw_body = environ['wsgi.input'].read(length)

        try:
            json_body = json.loads(urllib.unquote_plus(raw_body))
        except ValueError, e:
            # catch JSON errors Here
            return jsonrpc_error(message="JSON parse error ERR:%s RAW:%r" \
                                 % (e, urllib.unquote_plus(raw_body)))
示例#2
0
def pre_push(ui, repo, **kwargs):
    # pre push function, currently used to ban pushing when
    # repository is locked
    try:
        rc_extras = json.loads(os.environ.get('RC_SCM_DATA', "{}"))
    except:
        rc_extras = {}
    extras = dict(repo.ui.configitems('rhodecode_extras'))

    if 'username' in extras:
        username = extras['username']
        repository = extras['repository']
        scm = extras['scm']
        locked_by = extras['locked_by']
    elif 'username' in rc_extras:
        username = rc_extras['username']
        repository = rc_extras['repository']
        scm = rc_extras['scm']
        locked_by = rc_extras['locked_by']
    else:
        raise Exception('Missing data in repo.ui and os.environ')

    usr = User.get_by_username(username)
    if locked_by[0] and usr.user_id != int(locked_by[0]):
        locked_by = User.get(locked_by[0]).username
        raise HTTPLockedRC(repository, locked_by)
示例#3
0
    def __call__(self, environ, start_response):
        """
        Parse the request body as JSON, look up the method on the
        controller and if it exists, dispatch to it.
        """
        if 'CONTENT_LENGTH' not in environ:
            log.debug("No Content-Length")
            return jsonrpc_error(message="No Content-Length in request")
        else:
            length = environ['CONTENT_LENGTH'] or 0
            length = int(environ['CONTENT_LENGTH'])
            log.debug('Content-Length: %s' % length)

        if length == 0:
            log.debug("Content-Length is 0")
            return jsonrpc_error(message="Content-Length is 0")

        raw_body = environ['wsgi.input'].read(length)

        try:
            json_body = json.loads(urllib.unquote_plus(raw_body))
        except ValueError, e:
            # catch JSON errors Here
            return jsonrpc_error(message="JSON parse error ERR:%s RAW:%r" \
                                 % (e, urllib.unquote_plus(raw_body)))
示例#4
0
 def _compare_error(self, id_, expected, given):
     expected = jsonify({
         'id': id_,
         'error': expected,
         'result': None
     })
     given = json.loads(given)
     self.assertEqual(expected, given)
示例#5
0
    def test_api_get_repo_nodes(self, name, ret_type):
        rev = 'tip'
        path = '/'
        id_, params = _build_data(self.apikey, 'get_repo_nodes',
                                  repoid=self.REPO, revision=rev,
                                  root_path=path,
                                  ret_type=ret_type)
        response = api_call(self, params)

        # we don't the actual return types here since it's tested somewhere
        # else
        expected = json.loads(response.body)['result']
        self._compare_ok(id_, expected, given=response.body)
示例#6
0
def log_pull_action(ui, repo, **kwargs):
    """
    Logs user last pull action

    :param ui:
    :param repo:
    """
    try:
        rc_extras = json.loads(os.environ.get('RC_SCM_DATA', "{}"))
    except:
        rc_extras = {}
    extras = dict(repo.ui.configitems('rhodecode_extras'))
    if 'username' in extras:
        username = extras['username']
        repository = extras['repository']
        scm = extras['scm']
        make_lock = extras['make_lock']
        ip = extras['ip']
    elif 'username' in rc_extras:
        username = rc_extras['username']
        repository = rc_extras['repository']
        scm = rc_extras['scm']
        make_lock = rc_extras['make_lock']
        ip = rc_extras['ip']
    else:
        raise Exception('Missing data in repo.ui and os.environ')
    user = User.get_by_username(username)
    action = 'pull'
    action_logger(user, action, repository, ip, commit=True)
    # extension hook call
    from rhodecode import EXTENSIONS
    callback = getattr(EXTENSIONS, 'PULL_HOOK', None)

    if isfunction(callback):
        kw = {}
        kw.update(extras)
        callback(**kw)

    if make_lock is True:
        Repository.lock(Repository.get_by_repo_name(repository), user.user_id)
        #msg = 'Made lock on repo `%s`' % repository
        #sys.stdout.write(msg)

    return 0
示例#7
0
def _extract_extras(env=None):
    """
    Extracts the rc extras data from os.environ, and wraps it into named
    AttributeDict object
    """
    if not env:
        env = os.environ

    try:
        rc_extras = json.loads(env['RC_SCM_DATA'])
    except Exception:
        print os.environ
        print >> sys.stderr, traceback.format_exc()
        rc_extras = {}

    try:
        for k in ['username', 'repository', 'locked_by', 'scm', 'make_lock',
                  'action', 'ip']:
            rc_extras[k]
    except KeyError, e:
        raise Exception('Missing key %s in os.environ %s' % (e, rc_extras))
示例#8
0
    def index(self, repo_name):
        c.dbrepo = dbrepo = c.rhodecode_db_repo
        c.following = self.scm_model.is_following_repo(repo_name,
                                                self.rhodecode_user.user_id)

        def url_generator(**kw):
            return url('shortlog_home', repo_name=repo_name, size=10, **kw)

        c.repo_changesets = RepoPage(c.rhodecode_repo, page=1,
                                     items_per_page=10, url=url_generator)

        if self.rhodecode_user.username == 'default':
            # for default(anonymous) user we don't need to pass credentials
            username = ''
            password = ''
        else:
            username = str(self.rhodecode_user.username)
            password = '******'

        parsed_url = urlparse(url.current(qualified=True))

        default_clone_uri = '{scheme}://{user}{pass}{netloc}{path}'

        uri_tmpl = config.get('clone_uri', default_clone_uri)
        uri_tmpl = uri_tmpl.replace('{', '%(').replace('}', ')s')
        decoded_path = safe_unicode(urllib.unquote(parsed_url.path))
        uri_dict = {
           'user': username,
           'pass': password,
           'scheme': parsed_url.scheme,
           'netloc': parsed_url.netloc,
           'path': decoded_path
        }

        uri = uri_tmpl % uri_dict
        # generate another clone url by id
        uri_dict.update(
         {'path': decoded_path.replace(repo_name, '_%s' % c.dbrepo.repo_id)}
        )
        uri_id = uri_tmpl % uri_dict

        c.clone_repo_url = uri
        c.clone_repo_url_id = uri_id
        c.repo_tags = OrderedDict()
        for name, hash_ in c.rhodecode_repo.tags.items()[:10]:
            try:
                c.repo_tags[name] = c.rhodecode_repo.get_changeset(hash_)
            except ChangesetError:
                c.repo_tags[name] = EmptyChangeset(hash_)

        c.repo_branches = OrderedDict()
        for name, hash_ in c.rhodecode_repo.branches.items()[:10]:
            try:
                c.repo_branches[name] = c.rhodecode_repo.get_changeset(hash_)
            except ChangesetError:
                c.repo_branches[name] = EmptyChangeset(hash_)

        td = date.today() + timedelta(days=1)
        td_1m = td - timedelta(days=calendar.mdays[td.month])
        td_1y = td - timedelta(days=365)

        ts_min_m = mktime(td_1m.timetuple())
        ts_min_y = mktime(td_1y.timetuple())
        ts_max_y = mktime(td.timetuple())

        if dbrepo.enable_statistics:
            c.show_stats = True
            c.no_data_msg = _('No data loaded yet')
            run_task(get_commits_stats, c.dbrepo.repo_name, ts_min_y, ts_max_y)
        else:
            c.show_stats = False
            c.no_data_msg = _('Statistics are disabled for this repository')
        c.ts_min = ts_min_m
        c.ts_max = ts_max_y

        stats = self.sa.query(Statistics)\
            .filter(Statistics.repository == dbrepo)\
            .scalar()

        c.stats_percentage = 0

        if stats and stats.languages:
            c.no_data = False is dbrepo.enable_statistics
            lang_stats_d = json.loads(stats.languages)
            c.commit_data = stats.commit_activity
            c.overview_data = stats.commit_activity_combined

            lang_stats = ((x, {"count": y,
                               "desc": LANGUAGES_EXTENSIONS_MAP.get(x)})
                          for x, y in lang_stats_d.items())

            c.trending_languages = json.dumps(
                sorted(lang_stats, reverse=True, key=lambda k: k[1])[:10]
            )
            last_rev = stats.stat_on_revision + 1
            c.repo_last_rev = c.rhodecode_repo.count()\
                if c.rhodecode_repo.revisions else 0
            if last_rev == 0 or c.repo_last_rev == 0:
                pass
            else:
                c.stats_percentage = '%.2f' % ((float((last_rev)) /
                                                c.repo_last_rev) * 100)
        else:
            c.commit_data = json.dumps({})
            c.overview_data = json.dumps([[ts_min_y, 0], [ts_max_y, 10]])
            c.trending_languages = json.dumps({})
            c.no_data = True

        c.enable_downloads = dbrepo.enable_downloads
        if c.enable_downloads:
            c.download_options = self._get_download_links(c.rhodecode_repo)

        c.readme_data, c.readme_file = self.__get_readme_data(
            c.rhodecode_db_repo.repo_name, c.rhodecode_repo
        )
        return render('summary/summary.html')
示例#9
0
def get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit=100):
    log = get_logger(get_commits_stats)
    DBS = get_session()
    lockkey = __get_lockkey('get_commits_stats', repo_name, ts_min_y, ts_max_y)
    lockkey_path = config['app_conf']['cache_dir']

    log.info('running task with lockkey %s' % lockkey)

    try:
        lock = l = DaemonLock(file_=jn(lockkey_path, lockkey))

        # for js data compatibility cleans the key for person from '
        akc = lambda k: person(k).replace('"', "")

        co_day_auth_aggr = {}
        commits_by_day_aggregate = {}
        repo = Repository.get_by_repo_name(repo_name)
        if repo is None:
            return True

        repo = repo.scm_instance
        repo_size = repo.count()
        # return if repo have no revisions
        if repo_size < 1:
            lock.release()
            return True

        skip_date_limit = True
        parse_limit = int(config['app_conf'].get('commit_parse_limit'))
        last_rev = None
        last_cs = None
        timegetter = itemgetter('time')

        dbrepo = DBS.query(Repository)\
            .filter(Repository.repo_name == repo_name).scalar()
        cur_stats = DBS.query(Statistics)\
            .filter(Statistics.repository == dbrepo).scalar()

        if cur_stats is not None:
            last_rev = cur_stats.stat_on_revision

        if last_rev == repo.get_changeset().revision and repo_size > 1:
            # pass silently without any work if we're not on first revision or
            # current state of parsing revision(from db marker) is the
            # last revision
            lock.release()
            return True

        if cur_stats:
            commits_by_day_aggregate = OrderedDict(
                json.loads(cur_stats.commit_activity_combined))
            co_day_auth_aggr = json.loads(cur_stats.commit_activity)

        log.debug('starting parsing %s' % parse_limit)
        lmktime = mktime

        last_rev = last_rev + 1 if last_rev >= 0 else 0
        log.debug('Getting revisions from %s to %s' %
                  (last_rev, last_rev + parse_limit))
        for cs in repo[last_rev:last_rev + parse_limit]:
            log.debug('parsing %s' % cs)
            last_cs = cs  # remember last parsed changeset
            k = lmktime([
                cs.date.timetuple()[0],
                cs.date.timetuple()[1],
                cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0
            ])

            if akc(cs.author) in co_day_auth_aggr:
                try:
                    l = [
                        timegetter(x)
                        for x in co_day_auth_aggr[akc(cs.author)]['data']
                    ]
                    time_pos = l.index(k)
                except ValueError:
                    time_pos = None

                if time_pos >= 0 and time_pos is not None:

                    datadict = \
                        co_day_auth_aggr[akc(cs.author)]['data'][time_pos]

                    datadict["commits"] += 1
                    datadict["added"] += len(cs.added)
                    datadict["changed"] += len(cs.changed)
                    datadict["removed"] += len(cs.removed)

                else:
                    if k >= ts_min_y and k <= ts_max_y or skip_date_limit:

                        datadict = {
                            "time": k,
                            "commits": 1,
                            "added": len(cs.added),
                            "changed": len(cs.changed),
                            "removed": len(cs.removed),
                        }
                        co_day_auth_aggr[akc(cs.author)]['data']\
                            .append(datadict)

            else:
                if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
                    co_day_auth_aggr[akc(cs.author)] = {
                        "label":
                        akc(cs.author),
                        "data": [{
                            "time": k,
                            "commits": 1,
                            "added": len(cs.added),
                            "changed": len(cs.changed),
                            "removed": len(cs.removed),
                        }],
                        "schema": ["commits"],
                    }

            #gather all data by day
            if k in commits_by_day_aggregate:
                commits_by_day_aggregate[k] += 1
            else:
                commits_by_day_aggregate[k] = 1

        overview_data = sorted(commits_by_day_aggregate.items(),
                               key=itemgetter(0))

        if not co_day_auth_aggr:
            co_day_auth_aggr[akc(repo.contact)] = {
                "label": akc(repo.contact),
                "data": [0, 1],
                "schema": ["commits"],
            }

        stats = cur_stats if cur_stats else Statistics()
        stats.commit_activity = json.dumps(co_day_auth_aggr)
        stats.commit_activity_combined = json.dumps(overview_data)

        log.debug('last revison %s' % last_rev)
        leftovers = len(repo.revisions[last_rev:])
        log.debug('revisions to parse %s' % leftovers)

        if last_rev == 0 or leftovers < parse_limit:
            log.debug('getting code trending stats')
            stats.languages = json.dumps(__get_codes_stats(repo_name))

        try:
            stats.repository = dbrepo
            stats.stat_on_revision = last_cs.revision if last_cs else 0
            DBS.add(stats)
            DBS.commit()
        except:
            log.error(traceback.format_exc())
            DBS.rollback()
            lock.release()
            return False

        # final release
        lock.release()

        # execute another task if celery is enabled
        if len(repo.revisions) > 1 and CELERY_ON and recurse_limit > 0:
            recurse_limit -= 1
            run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y,
                     recurse_limit)
        if recurse_limit <= 0:
            log.debug('Breaking recursive mode due to reach of recurse limit')
        return True
    except LockHeld:
        log.info('LockHeld')
        return 'Task with key %s already running' % lockkey
示例#10
0
    def index(self, repo_name):
        c.dbrepo = dbrepo = c.rhodecode_db_repo
        c.following = self.scm_model.is_following_repo(
            repo_name, self.rhodecode_user.user_id)

        def url_generator(**kw):
            return url('shortlog_home', repo_name=repo_name, size=10, **kw)

        c.repo_changesets = RepoPage(c.rhodecode_repo,
                                     page=1,
                                     items_per_page=10,
                                     url=url_generator)

        if self.rhodecode_user.username == 'default':
            # for default(anonymous) user we don't need to pass credentials
            username = ''
            password = ''
        else:
            username = str(self.rhodecode_user.username)
            password = '******'

        parsed_url = urlparse(url.current(qualified=True))

        default_clone_uri = '{scheme}://{user}{pass}{netloc}{path}'

        uri_tmpl = config.get('clone_uri', default_clone_uri)
        uri_tmpl = uri_tmpl.replace('{', '%(').replace('}', ')s')
        decoded_path = safe_unicode(urllib.unquote(parsed_url.path))
        uri_dict = {
            'user': username,
            'pass': password,
            'scheme': parsed_url.scheme,
            'netloc': parsed_url.netloc,
            'path': decoded_path
        }

        uri = uri_tmpl % uri_dict
        # generate another clone url by id
        uri_dict.update({
            'path':
            decoded_path.replace(repo_name, '_%s' % c.dbrepo.repo_id)
        })
        uri_id = uri_tmpl % uri_dict

        c.clone_repo_url = uri
        c.clone_repo_url_id = uri_id
        c.repo_tags = OrderedDict()
        for name, hash_ in c.rhodecode_repo.tags.items()[:10]:
            try:
                c.repo_tags[name] = c.rhodecode_repo.get_changeset(hash_)
            except ChangesetError:
                c.repo_tags[name] = EmptyChangeset(hash_)

        c.repo_branches = OrderedDict()
        for name, hash_ in c.rhodecode_repo.branches.items()[:10]:
            try:
                c.repo_branches[name] = c.rhodecode_repo.get_changeset(hash_)
            except ChangesetError:
                c.repo_branches[name] = EmptyChangeset(hash_)

        td = date.today() + timedelta(days=1)
        td_1m = td - timedelta(days=calendar.mdays[td.month])
        td_1y = td - timedelta(days=365)

        ts_min_m = mktime(td_1m.timetuple())
        ts_min_y = mktime(td_1y.timetuple())
        ts_max_y = mktime(td.timetuple())

        if dbrepo.enable_statistics:
            c.show_stats = True
            c.no_data_msg = _('No data loaded yet')
            run_task(get_commits_stats, c.dbrepo.repo_name, ts_min_y, ts_max_y)
        else:
            c.show_stats = False
            c.no_data_msg = _('Statistics are disabled for this repository')
        c.ts_min = ts_min_m
        c.ts_max = ts_max_y

        stats = self.sa.query(Statistics)\
            .filter(Statistics.repository == dbrepo)\
            .scalar()

        c.stats_percentage = 0

        if stats and stats.languages:
            c.no_data = False is dbrepo.enable_statistics
            lang_stats_d = json.loads(stats.languages)
            c.commit_data = stats.commit_activity
            c.overview_data = stats.commit_activity_combined

            lang_stats = ((x, {
                "count": y,
                "desc": LANGUAGES_EXTENSIONS_MAP.get(x)
            }) for x, y in lang_stats_d.items())

            c.trending_languages = json.dumps(
                sorted(lang_stats, reverse=True, key=lambda k: k[1])[:10])
            last_rev = stats.stat_on_revision + 1
            c.repo_last_rev = c.rhodecode_repo.count()\
                if c.rhodecode_repo.revisions else 0
            if last_rev == 0 or c.repo_last_rev == 0:
                pass
            else:
                c.stats_percentage = '%.2f' % ((float(
                    (last_rev)) / c.repo_last_rev) * 100)
        else:
            c.commit_data = json.dumps({})
            c.overview_data = json.dumps([[ts_min_y, 0], [ts_max_y, 10]])
            c.trending_languages = json.dumps({})
            c.no_data = True

        c.enable_downloads = dbrepo.enable_downloads
        if c.enable_downloads:
            c.download_options = self._get_download_links(c.rhodecode_repo)

        c.readme_data, c.readme_file = self.__get_readme_data(
            c.rhodecode_db_repo.repo_name, c.rhodecode_repo)
        return render('summary/summary.html')
示例#11
0
    def index(self, repo_name):

        e = request.environ
        c.dbrepo = dbrepo = c.rhodecode_db_repo

        c.following = self.scm_model.is_following_repo(repo_name,
                                                self.rhodecode_user.user_id)

        def url_generator(**kw):
            return url('shortlog_home', repo_name=repo_name, size=10, **kw)

        c.repo_changesets = RepoPage(c.rhodecode_repo, page=1,
                                     items_per_page=10, url=url_generator)

        if self.rhodecode_user.username == 'default':
            #for default(anonymous) user we don't need to pass credentials
            username = ''
            password = ''
        else:
            username = str(self.rhodecode_user.username)
            password = '******'

        if e.get('wsgi.url_scheme') == 'https':
            split_s = 'https://'
        else:
            split_s = 'http://'

        qualified_uri = [split_s] + [url.current(qualified=True)\
                                     .split(split_s)[-1]]
        uri = u'%(proto)s%(user)s%(pass)s%(rest)s' \
                % {'user': username,
                     'pass': password,
                     'proto': qualified_uri[0],
                     'rest': qualified_uri[1]}
        c.clone_repo_url = uri
        c.repo_tags = OrderedDict()
        for name, hash in c.rhodecode_repo.tags.items()[:10]:
            try:
                c.repo_tags[name] = c.rhodecode_repo.get_changeset(hash)
            except ChangesetError:
                c.repo_tags[name] = EmptyChangeset(hash)

        c.repo_branches = OrderedDict()
        for name, hash in c.rhodecode_repo.branches.items()[:10]:
            try:
                c.repo_branches[name] = c.rhodecode_repo.get_changeset(hash)
            except ChangesetError:
                c.repo_branches[name] = EmptyChangeset(hash)

        td = date.today() + timedelta(days=1)
        td_1m = td - timedelta(days=calendar.mdays[td.month])
        td_1y = td - timedelta(days=365)

        ts_min_m = mktime(td_1m.timetuple())
        ts_min_y = mktime(td_1y.timetuple())
        ts_max_y = mktime(td.timetuple())

        if dbrepo.enable_statistics:
            c.show_stats = True
            c.no_data_msg = _('No data loaded yet')
            run_task(get_commits_stats, c.dbrepo.repo_name, ts_min_y, ts_max_y)
        else:
            c.show_stats = False
            c.no_data_msg = _('Statistics are disabled for this repository')
        c.ts_min = ts_min_m
        c.ts_max = ts_max_y

        stats = self.sa.query(Statistics)\
            .filter(Statistics.repository == dbrepo)\
            .scalar()

        c.stats_percentage = 0

        if stats and stats.languages:
            c.no_data = False is dbrepo.enable_statistics
            lang_stats_d = json.loads(stats.languages)
            c.commit_data = stats.commit_activity
            c.overview_data = stats.commit_activity_combined

            lang_stats = ((x, {"count": y,
                               "desc": LANGUAGES_EXTENSIONS_MAP.get(x)})
                          for x, y in lang_stats_d.items())

            c.trending_languages = json.dumps(
                sorted(lang_stats, reverse=True, key=lambda k: k[1])[:10]
            )
            last_rev = stats.stat_on_revision + 1
            c.repo_last_rev = c.rhodecode_repo.count()\
                if c.rhodecode_repo.revisions else 0
            if last_rev == 0 or c.repo_last_rev == 0:
                pass
            else:
                c.stats_percentage = '%.2f' % ((float((last_rev)) /
                                                c.repo_last_rev) * 100)
        else:
            c.commit_data = json.dumps({})
            c.overview_data = json.dumps([[ts_min_y, 0], [ts_max_y, 10]])
            c.trending_languages = json.dumps({})
            c.no_data = True

        c.enable_downloads = dbrepo.enable_downloads
        if c.enable_downloads:
            c.download_options = self._get_download_links(c.rhodecode_repo)

        return render('summary/summary.html')
示例#12
0
def get_commits_stats(repo_name, ts_min_y, ts_max_y):
    log = get_logger(get_commits_stats)
    DBS = get_session()
    lockkey = __get_lockkey('get_commits_stats', repo_name, ts_min_y,
                            ts_max_y)
    lockkey_path = config['here']

    log.info('running task with lockkey %s' % lockkey)

    try:
        lock = l = DaemonLock(file_=jn(lockkey_path, lockkey))

        # for js data compatibility cleans the key for person from '
        akc = lambda k: person(k).replace('"', "")

        co_day_auth_aggr = {}
        commits_by_day_aggregate = {}
        repo = Repository.get_by_repo_name(repo_name)
        if repo is None:
            return True

        repo = repo.scm_instance
        repo_size = repo.count()
        # return if repo have no revisions
        if repo_size < 1:
            lock.release()
            return True

        skip_date_limit = True
        parse_limit = int(config['app_conf'].get('commit_parse_limit'))
        last_rev = None
        last_cs = None
        timegetter = itemgetter('time')

        dbrepo = DBS.query(Repository)\
            .filter(Repository.repo_name == repo_name).scalar()
        cur_stats = DBS.query(Statistics)\
            .filter(Statistics.repository == dbrepo).scalar()

        if cur_stats is not None:
            last_rev = cur_stats.stat_on_revision

        if last_rev == repo.get_changeset().revision and repo_size > 1:
            # pass silently without any work if we're not on first revision or
            # current state of parsing revision(from db marker) is the
            # last revision
            lock.release()
            return True

        if cur_stats:
            commits_by_day_aggregate = OrderedDict(json.loads(
                                        cur_stats.commit_activity_combined))
            co_day_auth_aggr = json.loads(cur_stats.commit_activity)

        log.debug('starting parsing %s' % parse_limit)
        lmktime = mktime

        last_rev = last_rev + 1 if last_rev >= 0 else 0
        log.debug('Getting revisions from %s to %s' % (
             last_rev, last_rev + parse_limit)
        )
        for cs in repo[last_rev:last_rev + parse_limit]:
            log.debug('parsing %s' % cs)
            last_cs = cs  # remember last parsed changeset
            k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1],
                          cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0])

            if akc(cs.author) in co_day_auth_aggr:
                try:
                    l = [timegetter(x) for x in
                         co_day_auth_aggr[akc(cs.author)]['data']]
                    time_pos = l.index(k)
                except ValueError:
                    time_pos = False

                if time_pos >= 0 and time_pos is not False:

                    datadict = \
                        co_day_auth_aggr[akc(cs.author)]['data'][time_pos]

                    datadict["commits"] += 1
                    datadict["added"] += len(cs.added)
                    datadict["changed"] += len(cs.changed)
                    datadict["removed"] += len(cs.removed)

                else:
                    if k >= ts_min_y and k <= ts_max_y or skip_date_limit:

                        datadict = {"time": k,
                                    "commits": 1,
                                    "added": len(cs.added),
                                    "changed": len(cs.changed),
                                    "removed": len(cs.removed),
                                   }
                        co_day_auth_aggr[akc(cs.author)]['data']\
                            .append(datadict)

            else:
                if k >= ts_min_y and k <= ts_max_y or skip_date_limit:
                    co_day_auth_aggr[akc(cs.author)] = {
                                        "label": akc(cs.author),
                                        "data": [{"time":k,
                                                 "commits":1,
                                                 "added":len(cs.added),
                                                 "changed":len(cs.changed),
                                                 "removed":len(cs.removed),
                                                 }],
                                        "schema": ["commits"],
                                        }

            #gather all data by day
            if k in commits_by_day_aggregate:
                commits_by_day_aggregate[k] += 1
            else:
                commits_by_day_aggregate[k] = 1

        overview_data = sorted(commits_by_day_aggregate.items(),
                               key=itemgetter(0))

        if not co_day_auth_aggr:
            co_day_auth_aggr[akc(repo.contact)] = {
                "label": akc(repo.contact),
                "data": [0, 1],
                "schema": ["commits"],
            }

        stats = cur_stats if cur_stats else Statistics()
        stats.commit_activity = json.dumps(co_day_auth_aggr)
        stats.commit_activity_combined = json.dumps(overview_data)

        log.debug('last revison %s' % last_rev)
        leftovers = len(repo.revisions[last_rev:])
        log.debug('revisions to parse %s' % leftovers)

        if last_rev == 0 or leftovers < parse_limit:
            log.debug('getting code trending stats')
            stats.languages = json.dumps(__get_codes_stats(repo_name))

        try:
            stats.repository = dbrepo
            stats.stat_on_revision = last_cs.revision if last_cs else 0
            DBS.add(stats)
            DBS.commit()
        except:
            log.error(traceback.format_exc())
            DBS.rollback()
            lock.release()
            return False

        # final release
        lock.release()

        # execute another task if celery is enabled
        if len(repo.revisions) > 1 and CELERY_ON:
            run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y)
        return True
    except LockHeld:
        log.info('LockHeld')
        return 'Task with key %s already running' % lockkey
示例#13
0
def _build_data(apikey, method, **kw):
    """
    Builds API data with given random ID

    :param random_id:
    :type random_id:
    """
    random_id = random.randrange(1, 9999)
    return random_id, json.dumps({
        "id": random_id,
        "api_key": apikey,
        "method": method,
        "args": kw
    })

jsonify = lambda obj: json.loads(json.dumps(obj))


def crash(*args, **kwargs):
    raise Exception('Total Crash !')


def api_call(test_obj, params):
    response = test_obj.app.post(API_URL, content_type='application/json',
                                 params=params)
    return response


TEST_USERS_GROUP = 'test_users_group'

示例#14
0
def handle_git_receive(repo_path, revs, env, hook_type='post'):
    """
    A really hacky method that is runned by git post-receive hook and logs
    an push action together with pushed revisions. It's executed by subprocess
    thus needs all info to be able to create a on the fly pylons enviroment,
    connect to database and run the logging code. Hacky as sh*t but works.

    :param repo_path:
    :type repo_path:
    :param revs:
    :type revs:
    :param env:
    :type env:
    """
    from paste.deploy import appconfig
    from sqlalchemy import engine_from_config
    from rhodecode.config.environment import load_environment
    from rhodecode.model import init_model
    from rhodecode.model.db import RhodeCodeUi
    from rhodecode.lib.utils import make_ui
    extras = json.loads(env['RHODECODE_EXTRAS'])

    path, ini_name = os.path.split(extras['config'])
    conf = appconfig('config:%s' % ini_name, relative_to=path)
    load_environment(conf.global_conf, conf.local_conf)

    engine = engine_from_config(conf, 'sqlalchemy.db1.')
    init_model(engine)

    baseui = make_ui('db')
    # fix if it's not a bare repo
    if repo_path.endswith(os.sep + '.git'):
        repo_path = repo_path[:-5]

    repo = Repository.get_by_full_path(repo_path)
    if not repo:
        raise OSError('Repository %s not found in database'
                      % (safe_str(repo_path)))

    _hooks = dict(baseui.configitems('hooks')) or {}

    for k, v in extras.items():
        baseui.setconfig('rhodecode_extras', k, v)
    repo = repo.scm_instance
    repo.ui = baseui

    if hook_type == 'pre':
        pre_push(baseui, repo)

    # if push hook is enabled via web interface
    elif hook_type == 'post' and _hooks.get(RhodeCodeUi.HOOK_PUSH):

        rev_data = []
        for l in revs:
            old_rev, new_rev, ref = l.split(' ')
            _ref_data = ref.split('/')
            if _ref_data[1] in ['tags', 'heads']:
                rev_data.append({'old_rev': old_rev,
                                 'new_rev': new_rev,
                                 'ref': ref,
                                 'type': _ref_data[1],
                                 'name': _ref_data[2].strip()})

        git_revs = []
        for push_ref  in rev_data:
            _type = push_ref['type']
            if _type == 'heads':
                if push_ref['old_rev'] == EmptyChangeset().raw_id:
                    cmd = "for-each-ref --format='%(refname)' 'refs/heads/*'"
                    heads = repo.run_git_command(cmd)[0]
                    heads = heads.replace(push_ref['ref'], '')
                    heads = ' '.join(map(lambda c: c.strip('\n').strip(),
                                         heads.splitlines()))
                    cmd = (('log %(new_rev)s' % push_ref) +
                           ' --reverse --pretty=format:"%H" --not ' + heads)
                    git_revs += repo.run_git_command(cmd)[0].splitlines()

                elif push_ref['new_rev'] == EmptyChangeset().raw_id:
                    #delete branch case
                    git_revs += ['delete_branch=>%s' % push_ref['name']]
                else:
                    cmd = (('log %(old_rev)s..%(new_rev)s' % push_ref) +
                           ' --reverse --pretty=format:"%H"')
                    git_revs += repo.run_git_command(cmd)[0].splitlines()

            elif _type == 'tags':
                git_revs += ['tag=>%s' % push_ref['name']]

        log_push_action(baseui, repo, _git_revs=git_revs)
示例#15
0
def log_push_action(ui, repo, **kwargs):
    """
    Maps user last push action to new changeset id, from mercurial

    :param ui:
    :param repo: repo object containing the `ui` object
    """

    try:
        rc_extras = json.loads(os.environ.get('RC_SCM_DATA', "{}"))
    except:
        rc_extras = {}

    extras = dict(repo.ui.configitems('rhodecode_extras'))
    if 'username' in extras:
        username = extras['username']
        repository = extras['repository']
        scm = extras['scm']
        make_lock = extras['make_lock']
    elif 'username' in rc_extras:
        username = rc_extras['username']
        repository = rc_extras['repository']
        scm = rc_extras['scm']
        make_lock = rc_extras['make_lock']
    else:
        raise Exception('Missing data in repo.ui and os.environ')

    action = 'push' + ':%s'

    if scm == 'hg':
        node = kwargs['node']

        def get_revs(repo, rev_opt):
            if rev_opt:
                revs = revrange(repo, rev_opt)

                if len(revs) == 0:
                    return (nullrev, nullrev)
                return (max(revs), min(revs))
            else:
                return (len(repo) - 1, 0)

        stop, start = get_revs(repo, [node + ':'])
        h = binascii.hexlify
        revs = [h(repo[r].node()) for r in xrange(start, stop + 1)]
    elif scm == 'git':
        revs = kwargs.get('_git_revs', [])
        if '_git_revs' in kwargs:
            kwargs.pop('_git_revs')

    action = action % ','.join(revs)

    action_logger(username, action, repository, extras['ip'], commit=True)

    # extension hook call
    from rhodecode import EXTENSIONS
    callback = getattr(EXTENSIONS, 'PUSH_HOOK', None)
    if isfunction(callback):
        kw = {'pushed_revs': revs}
        kw.update(extras)
        callback(**kw)

    if make_lock is False:
        Repository.unlock(Repository.get_by_repo_name(repository))
        msg = 'Released lock on repo `%s`\n' % repository
        sys.stdout.write(msg)

    return 0