def statistics(self, repo_name): if c.db_repo.enable_statistics: c.show_stats = True c.no_data_msg = _('No data ready yet') else: c.show_stats = False c.no_data_msg = _('Statistics are disabled for this repository') td = date.today() + timedelta(days=1) td_1m = td - timedelta(days=calendar.mdays[td.month]) td_1y = td - timedelta(days=365) ts_min_m = mktime(td_1m.timetuple()) ts_min_y = mktime(td_1y.timetuple()) ts_max_y = mktime(td.timetuple()) c.ts_min = ts_min_m c.ts_max = ts_max_y stats = Statistics.query() \ .filter(Statistics.repository == c.db_repo) \ .scalar() c.stats_percentage = 0 if stats and stats.languages: c.no_data = False is c.db_repo.enable_statistics lang_stats_d = json.loads(stats.languages) c.commit_data = json.loads(stats.commit_activity) c.overview_data = json.loads(stats.commit_activity_combined) lang_stats = ((x, {"count": y, "desc": LANGUAGES_EXTENSIONS_MAP.get(x)}) for x, y in lang_stats_d.items()) c.trending_languages = ( sorted(lang_stats, reverse=True, key=lambda k: k[1])[:10] ) last_rev = stats.stat_on_revision + 1 c.repo_last_rev = c.db_repo_scm_instance.count() \ if c.db_repo_scm_instance.revisions else 0 if last_rev == 0 or c.repo_last_rev == 0: pass else: c.stats_percentage = '%.2f' % ((float((last_rev)) / c.repo_last_rev) * 100) else: c.commit_data = {} c.overview_data = ([[ts_min_y, 0], [ts_max_y, 10]]) c.trending_languages = {} c.no_data = True recurse_limit = 500 # don't recurse more than 500 times when parsing get_commits_stats(c.db_repo.repo_name, ts_min_y, ts_max_y, recurse_limit) return render('summary/statistics.html')
def _handle_request(self, environ, start_response): start = time.time() ip_addr = self.ip_addr = self._get_ip_addr(environ) self._req_id = None if 'CONTENT_LENGTH' not in environ: log.debug("No Content-Length") return jsonrpc_error(retid=self._req_id, message="No Content-Length in request") else: length = environ['CONTENT_LENGTH'] or 0 length = int(environ['CONTENT_LENGTH']) log.debug('Content-Length: %s' % length) if length == 0: log.debug("Content-Length is 0") return jsonrpc_error(retid=self._req_id, message="Content-Length is 0") raw_body = environ['wsgi.input'].read(length) try: json_body = json.loads(raw_body) except ValueError, e: # catch JSON errors Here return jsonrpc_error(retid=self._req_id, message="JSON parse error ERR:%s RAW:%r" % (e, raw_body))
def index(self, repo_name): _load_changelog_summary() username = '' if self.authuser.username != User.DEFAULT_USER: username = safe_str(self.authuser.username) _def_clone_uri = _def_clone_uri_by_id = c.clone_uri_tmpl if '{repo}' in _def_clone_uri: _def_clone_uri_by_id = _def_clone_uri.replace( '{repo}', '_{repoid}') elif '{repoid}' in _def_clone_uri: _def_clone_uri_by_id = _def_clone_uri.replace( '_{repoid}', '{repo}') c.clone_repo_url = c.db_repo.clone_url(user=username, uri_tmpl=_def_clone_uri) c.clone_repo_url_id = c.db_repo.clone_url( user=username, uri_tmpl=_def_clone_uri_by_id) if c.db_repo.enable_statistics: c.show_stats = True else: c.show_stats = False stats = self.sa.query(Statistics)\ .filter(Statistics.repository == c.db_repo)\ .scalar() c.stats_percentage = 0 if stats and stats.languages: c.no_data = False is c.db_repo.enable_statistics lang_stats_d = json.loads(stats.languages) lang_stats = ((x, { "count": y, "desc": LANGUAGES_EXTENSIONS_MAP.get(x) }) for x, y in lang_stats_d.items()) c.trending_languages = json.dumps( sorted(lang_stats, reverse=True, key=lambda k: k[1])[:10]) else: c.no_data = True c.trending_languages = json.dumps([]) c.enable_downloads = c.db_repo.enable_downloads c.readme_data, c.readme_file = \ self.__get_readme_data(c.db_repo) return render('summary/summary.html')
def index(self, repo_name): _load_changelog_summary() if request.authuser.is_default_user: username = '' else: username = safe_str(request.authuser.username) _def_clone_uri = _def_clone_uri_by_id = c.clone_uri_tmpl if '{repo}' in _def_clone_uri: _def_clone_uri_by_id = _def_clone_uri.replace('{repo}', '_{repoid}') elif '{repoid}' in _def_clone_uri: _def_clone_uri_by_id = _def_clone_uri.replace('_{repoid}', '{repo}') c.clone_repo_url = c.db_repo.clone_url(user=username, uri_tmpl=_def_clone_uri) c.clone_repo_url_id = c.db_repo.clone_url(user=username, uri_tmpl=_def_clone_uri_by_id) if c.db_repo.enable_statistics: c.show_stats = True else: c.show_stats = False stats = Statistics.query() \ .filter(Statistics.repository == c.db_repo) \ .scalar() c.stats_percentage = 0 if stats and stats.languages: c.no_data = False is c.db_repo.enable_statistics lang_stats_d = json.loads(stats.languages) lang_stats = ((x, {"count": y, "desc": LANGUAGES_EXTENSIONS_MAP.get(x)}) for x, y in lang_stats_d.items()) c.trending_languages = ( sorted(lang_stats, reverse=True, key=lambda k: k[1])[:10] ) else: c.no_data = True c.trending_languages = [] c.enable_downloads = c.db_repo.enable_downloads c.readme_data, c.readme_file = \ self.__get_readme_data(c.db_repo) return render('summary/summary.html')
def _request(self, url, body=None, headers=None, method=None, noformat=False, empty_response_ok=False): _headers = { "Content-type": "application/json", "Accept": "application/json" } if self.user and self.passwd: authstring = base64.b64encode("%s:%s" % (self.user, self.passwd)) _headers["Authorization"] = "Basic %s" % authstring if headers: _headers.update(headers) log.debug("Sent crowd: \n%s" % (formatted_json({ "url": url, "body": body, "headers": _headers }))) request = urllib2.Request(url, body, _headers) if method: request.get_method = lambda: method global msg msg = "" try: rdoc = self.opener.open(request) msg = "".join(rdoc.readlines()) if not msg and empty_response_ok: rval = {} rval["status"] = True rval["error"] = "Response body was empty" elif not noformat: rval = json.loads(msg) rval["status"] = True else: rval = "".join(rdoc.readlines()) except Exception, e: if not noformat: rval = { "status": False, "body": body, "error": str(e) + "\n" + msg } else: rval = None
def api_call(apikey, apihost, method=None, **kw): import random import urllib2 import pprint """ Api_call wrapper for RhodeCode. :param apikey: :param apihost: :param format: formatting, pretty means prints and pprint of json json returns unparsed json :param method: :returns: json response from server """ def _build_data(random_id): """ Builds API data with given random ID :param random_id: """ return { "id": random_id, "api_key": apikey, "method": method, "args": kw } if not method: raise Exception('please specify method name !') id_ = random.randrange(1, 9999) req = urllib2.Request('%s/_admin/api' % apihost, data=json.dumps(_build_data(id_)), headers={'content-type': 'text/plain'}) ret = urllib2.urlopen(req) raw_json = ret.read() json_data = json.loads(raw_json) id_ret = json_data['id'] if id_ret == id_: return json_data else: _formatted_json = pprint.pformat(json_data) raise Exception('something went wrong. ' 'ID mismatch got %s, expected %s | %s' % (id_ret, id_, _formatted_json))
def api_call(apikey, apihost, method=None, **kw): """ Api_call wrapper for Kallithea. :param apikey: :param apihost: :param format: formatting, pretty means prints and pprint of json json returns unparsed json :param method: :returns: json response from server """ def _build_data(random_id): """ Builds API data with given random ID :param random_id: """ return { "id": random_id, "api_key": apikey, "method": method, "args": kw } if not method: raise Exception('please specify method name !') apihost = apihost.rstrip('/') id_ = random.randrange(1, 9999) req = urllib2.Request('%s/_admin/api' % apihost, data=json.dumps(_build_data(id_)), headers={'content-type': 'text/plain'}) ret = urllib2.urlopen(req) raw_json = ret.read() json_data = json.loads(raw_json) id_ret = json_data['id'] if id_ret == id_: return json_data else: _formatted_json = pprint.pformat(json_data) raise Exception('something went wrong. ' 'ID mismatch got %s, expected %s | %s' % ( id_ret, id_, _formatted_json))
def _extract_extras(env=None): """ Extracts the rc extras data from os.environ, and wraps it into named AttributeDict object """ if not env: env = os.environ try: rc_extras = json.loads(env['KALLITHEA_EXTRAS']) except KeyError: rc_extras = {} try: for k in ['username', 'repository', 'locked_by', 'scm', 'make_lock', 'action', 'ip']: rc_extras[k] except KeyError, e: raise Exception('Missing key %s in os.environ %s' % (e, rc_extras))
def _request(self, url, body=None, headers=None, method=None, noformat=False, empty_response_ok=False): _headers = {"Content-type": "application/json", "Accept": "application/json"} if self.user and self.passwd: authstring = base64.b64encode("%s:%s" % (self.user, self.passwd)) _headers["Authorization"] = "Basic %s" % authstring if headers: _headers.update(headers) log.debug("Sent crowd: \n%s", formatted_json({"url": url, "body": body, "headers": _headers})) request = urllib2.Request(url, body, _headers) if method: request.get_method = lambda: method global msg msg = "" try: rdoc = self.opener.open(request) msg = "".join(rdoc.readlines()) if not msg and empty_response_ok: rval = {} rval["status"] = True rval["error"] = "Response body was empty" elif not noformat: rval = json.loads(msg) rval["status"] = True else: rval = "".join(rdoc.readlines()) except Exception as e: if not noformat: rval = {"status": False, "body": body, "error": str(e) + "\n" + msg} else: rval = None return rval
def main(argv=None): #print ','.join(os.environ) #print os.environ['RC_SCM_DATA'] rc_scm_data = json.loads(os.environ['KALLITHEA_EXTRAS']) username = rc_scm_data['username'] repoid = rc_scm_data['repository'] apihost = rc_scm_data['server_url'] apikey = 'CHANGEME' method = 'get_repo' margs = {'repoid': repoid} #print 'Calling method %s(%s) => %s' % (method, margs, apihost) json_resp = api_call(apikey, apihost, method, **margs) if json_resp['error']: json_data = json_resp['error'] else: json_data = json_resp['result'] #print 'Server response \n%s' % (json.dumps(json_data, indent=4, sort_keys=True)) #print 'members:', json_data['members'] # if we are the owner, allow pushing #if json_data['owner'] == username: # return 0 # if we, or the default user, have write or admin permissions, allow pushing for member in json_data['members']: #if (member['username'] == 'default' or member['username'] == username) and (member['permission'] == 'repository.write' or member['permission'] == 'repository.admin'): if (member['name'] == 'default' or member['name'] == username) and ( member['permission'] == 'repository.write' or member['permission'] == 'repository.admin'): return 0 # otherwise deny pushing print 'You do not have the proper permissions to write to this repository! Ask the owner to add you.' return 1
def get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit=100): log = get_logger(get_commits_stats) DBS = get_session() lockkey = __get_lockkey('get_commits_stats', repo_name, ts_min_y, ts_max_y) lockkey_path = config['app_conf']['cache_dir'] log.info('running task with lockkey %s' % lockkey) try: lock = l = DaemonLock(file_=jn(lockkey_path, lockkey)) # for js data compatibility cleans the key for person from ' akc = lambda k: person(k).replace('"', "") co_day_auth_aggr = {} commits_by_day_aggregate = {} repo = Repository.get_by_repo_name(repo_name) if repo is None: return True repo = repo.scm_instance repo_size = repo.count() # return if repo have no revisions if repo_size < 1: lock.release() return True skip_date_limit = True parse_limit = int(config['app_conf'].get('commit_parse_limit')) last_rev = None last_cs = None timegetter = itemgetter('time') dbrepo = DBS.query(Repository)\ .filter(Repository.repo_name == repo_name).scalar() cur_stats = DBS.query(Statistics)\ .filter(Statistics.repository == dbrepo).scalar() if cur_stats is not None: last_rev = cur_stats.stat_on_revision if last_rev == repo.get_changeset().revision and repo_size > 1: # pass silently without any work if we're not on first revision or # current state of parsing revision(from db marker) is the # last revision lock.release() return True if cur_stats: commits_by_day_aggregate = OrderedDict( json.loads(cur_stats.commit_activity_combined)) co_day_auth_aggr = json.loads(cur_stats.commit_activity) log.debug('starting parsing %s' % parse_limit) lmktime = mktime last_rev = last_rev + 1 if last_rev >= 0 else 0 log.debug('Getting revisions from %s to %s' % (last_rev, last_rev + parse_limit)) for cs in repo[last_rev:last_rev + parse_limit]: log.debug('parsing %s' % cs) last_cs = cs # remember last parsed changeset k = lmktime([ cs.date.timetuple()[0], cs.date.timetuple()[1], cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0 ]) if akc(cs.author) in co_day_auth_aggr: try: l = [ timegetter(x) for x in co_day_auth_aggr[akc(cs.author)]['data'] ] time_pos = l.index(k) except ValueError: time_pos = None if time_pos >= 0 and time_pos is not None: datadict = \ co_day_auth_aggr[akc(cs.author)]['data'][time_pos] datadict["commits"] += 1 datadict["added"] += len(cs.added) datadict["changed"] += len(cs.changed) datadict["removed"] += len(cs.removed) else: if k >= ts_min_y and k <= ts_max_y or skip_date_limit: datadict = { "time": k, "commits": 1, "added": len(cs.added), "changed": len(cs.changed), "removed": len(cs.removed), } co_day_auth_aggr[akc(cs.author)]['data']\ .append(datadict) else: if k >= ts_min_y and k <= ts_max_y or skip_date_limit: co_day_auth_aggr[akc(cs.author)] = { "label": akc(cs.author), "data": [{ "time": k, "commits": 1, "added": len(cs.added), "changed": len(cs.changed), "removed": len(cs.removed), }], "schema": ["commits"], } #gather all data by day if k in commits_by_day_aggregate: commits_by_day_aggregate[k] += 1 else: commits_by_day_aggregate[k] = 1 overview_data = sorted(commits_by_day_aggregate.items(), key=itemgetter(0)) if not co_day_auth_aggr: co_day_auth_aggr[akc(repo.contact)] = { "label": akc(repo.contact), "data": [0, 1], "schema": ["commits"], } stats = cur_stats if cur_stats else Statistics() stats.commit_activity = json.dumps(co_day_auth_aggr) stats.commit_activity_combined = json.dumps(overview_data) log.debug('last revision %s' % last_rev) leftovers = len(repo.revisions[last_rev:]) log.debug('revisions to parse %s' % leftovers) if last_rev == 0 or leftovers < parse_limit: log.debug('getting code trending stats') stats.languages = json.dumps(__get_codes_stats(repo_name)) try: stats.repository = dbrepo stats.stat_on_revision = last_cs.revision if last_cs else 0 DBS.add(stats) DBS.commit() except: log.error(traceback.format_exc()) DBS.rollback() lock.release() return False # final release lock.release() # execute another task if celery is enabled if len(repo.revisions) > 1 and CELERY_ON and recurse_limit > 0: recurse_limit -= 1 run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y, recurse_limit) if recurse_limit <= 0: log.debug('Breaking recursive mode due to reach of recurse limit') return True except LockHeld: log.info('LockHeld') return 'Task with key %s already running' % lockkey
def _handle_request(self, environ, start_response): start = time.time() ip_addr = self.ip_addr = self._get_ip_addr(environ) self._req_id = None if 'CONTENT_LENGTH' not in environ: log.debug("No Content-Length") return jsonrpc_error(retid=self._req_id, message="No Content-Length in request") else: length = environ['CONTENT_LENGTH'] or 0 length = int(environ['CONTENT_LENGTH']) log.debug('Content-Length: %s', length) if length == 0: log.debug("Content-Length is 0") return jsonrpc_error(retid=self._req_id, message="Content-Length is 0") raw_body = environ['wsgi.input'].read(length) try: json_body = json.loads(raw_body) except ValueError as e: # catch JSON errors Here return jsonrpc_error(retid=self._req_id, message="JSON parse error ERR:%s RAW:%r" % (e, raw_body)) # check AUTH based on API key try: self._req_api_key = json_body['api_key'] self._req_id = json_body['id'] self._req_method = json_body['method'] self._request_params = json_body['args'] if not isinstance(self._request_params, dict): self._request_params = {} log.debug( 'method: %s, params: %s', self._req_method, self._request_params ) except KeyError as e: return jsonrpc_error(retid=self._req_id, message='Incorrect JSON query missing %s' % e) # check if we can find this session using api_key try: u = User.get_by_api_key(self._req_api_key) if u is None: return jsonrpc_error(retid=self._req_id, message='Invalid API key') auth_u = AuthUser(dbuser=u) if not AuthUser.check_ip_allowed(auth_u, ip_addr): return jsonrpc_error(retid=self._req_id, message='request from IP:%s not allowed' % (ip_addr,)) else: log.info('Access for IP:%s allowed', ip_addr) except Exception as e: return jsonrpc_error(retid=self._req_id, message='Invalid API key') self._error = None try: self._func = self._find_method() except AttributeError as e: return jsonrpc_error(retid=self._req_id, message=str(e)) # now that we have a method, add self._req_params to # self.kargs and dispatch control to WGIController argspec = inspect.getargspec(self._func) arglist = argspec[0][1:] defaults = map(type, argspec[3] or []) default_empty = types.NotImplementedType # kw arguments required by this method func_kwargs = dict(izip_longest(reversed(arglist), reversed(defaults), fillvalue=default_empty)) # this is little trick to inject logged in user for # perms decorators to work they expect the controller class to have # authuser attribute set self.authuser = auth_u # This attribute will need to be first param of a method that uses # api_key, which is translated to instance of user at that name USER_SESSION_ATTR = 'apiuser' if USER_SESSION_ATTR not in arglist: return jsonrpc_error( retid=self._req_id, message='This method [%s] does not support ' 'authentication (missing %s param)' % ( self._func.__name__, USER_SESSION_ATTR) ) # get our arglist and check if we provided them as args for arg, default in func_kwargs.iteritems(): if arg == USER_SESSION_ATTR: # USER_SESSION_ATTR is something translated from API key and # this is checked before so we don't need validate it continue # skip the required param check if it's default value is # NotImplementedType (default_empty) if default == default_empty and arg not in self._request_params: return jsonrpc_error( retid=self._req_id, message=( 'Missing non optional `%s` arg in JSON DATA' % arg ) ) self._rpc_args = {USER_SESSION_ATTR: u} self._rpc_args.update(self._request_params) self._rpc_args['action'] = self._req_method self._rpc_args['environ'] = environ self._rpc_args['start_response'] = start_response status = [] headers = [] exc_info = [] def change_content(new_status, new_headers, new_exc_info=None): status.append(new_status) headers.extend(new_headers) exc_info.append(new_exc_info) output = WSGIController.__call__(self, environ, change_content) output = list(output) headers.append(('Content-Length', str(len(output[0])))) replace_header(headers, 'Content-Type', 'application/json') start_response(status[0], headers, exc_info[0]) log.info('IP: %s Request to %s time: %.3fs' % ( self._get_ip_addr(environ), safe_unicode(_get_access_path(environ)), time.time() - start) ) return output
def _dispatch(self, state, remainder=None): """ Parse the request body as JSON, look up the method on the controller and if it exists, dispatch to it. """ # Since we are here we should respond as JSON response.content_type = 'application/json' environ = state.request.environ start = time.time() ip_addr = request.ip_addr = self._get_ip_addr(environ) self._req_id = None if 'CONTENT_LENGTH' not in environ: log.debug("No Content-Length") raise JSONRPCErrorResponse(retid=self._req_id, message="No Content-Length in request") else: length = environ['CONTENT_LENGTH'] or 0 length = int(environ['CONTENT_LENGTH']) log.debug('Content-Length: %s', length) if length == 0: raise JSONRPCErrorResponse(retid=self._req_id, message="Content-Length is 0") raw_body = environ['wsgi.input'].read(length) try: json_body = json.loads(raw_body) except ValueError as e: # catch JSON errors Here raise JSONRPCErrorResponse( retid=self._req_id, message="JSON parse error ERR:%s RAW:%r" % (e, raw_body)) # check AUTH based on API key try: self._req_api_key = json_body['api_key'] self._req_id = json_body['id'] self._req_method = json_body['method'] self._request_params = json_body['args'] if not isinstance(self._request_params, dict): self._request_params = {} log.debug('method: %s, params: %s', self._req_method, self._request_params) except KeyError as e: raise JSONRPCErrorResponse( retid=self._req_id, message='Incorrect JSON query missing %s' % e) # check if we can find this session using api_key try: u = User.get_by_api_key(self._req_api_key) if u is None: raise JSONRPCErrorResponse(retid=self._req_id, message='Invalid API key') auth_u = AuthUser(dbuser=u) if not AuthUser.check_ip_allowed(auth_u, ip_addr): raise JSONRPCErrorResponse( retid=self._req_id, message='request from IP:%s not allowed' % (ip_addr, )) else: log.info('Access for IP:%s allowed', ip_addr) except Exception as e: raise JSONRPCErrorResponse(retid=self._req_id, message='Invalid API key') self._error = None try: self._func = self._find_method() except AttributeError as e: raise JSONRPCErrorResponse(retid=self._req_id, message=str(e)) # now that we have a method, add self._req_params to # self.kargs and dispatch control to WGIController argspec = inspect.getargspec(self._func) arglist = argspec[0][1:] defaults = map(type, argspec[3] or []) default_empty = types.NotImplementedType # kw arguments required by this method func_kwargs = dict( itertools.izip_longest(reversed(arglist), reversed(defaults), fillvalue=default_empty)) # this is little trick to inject logged in user for # perms decorators to work they expect the controller class to have # authuser attribute set request.authuser = request.user = auth_u # This attribute will need to be first param of a method that uses # api_key, which is translated to instance of user at that name USER_SESSION_ATTR = 'apiuser' # get our arglist and check if we provided them as args for arg, default in func_kwargs.iteritems(): if arg == USER_SESSION_ATTR: # USER_SESSION_ATTR is something translated from API key and # this is checked before so we don't need validate it continue # skip the required param check if it's default value is # NotImplementedType (default_empty) if default == default_empty and arg not in self._request_params: raise JSONRPCErrorResponse( retid=self._req_id, message='Missing non optional `%s` arg in JSON DATA' % arg, ) extra = set(self._request_params).difference(func_kwargs) if extra: raise JSONRPCErrorResponse( retid=self._req_id, message='Unknown %s arg in JSON DATA' % ', '.join('`%s`' % arg for arg in extra), ) self._rpc_args = {} self._rpc_args.update(self._request_params) self._rpc_args['action'] = self._req_method self._rpc_args['environ'] = environ log.info( 'IP: %s Request to %s time: %.3fs' % (self._get_ip_addr(environ), safe_unicode( _get_access_path(environ)), time.time() - start)) state.set_action(self._rpc_call, []) state.set_params(self._rpc_args) return state
def _dispatch(self, state, remainder=None): """ Parse the request body as JSON, look up the method on the controller and if it exists, dispatch to it. """ # Since we are here we should respond as JSON response.content_type = 'application/json' environ = state.request.environ start = time.time() ip_addr = request.ip_addr = self._get_ip_addr(environ) self._req_id = None if 'CONTENT_LENGTH' not in environ: log.debug("No Content-Length") raise JSONRPCErrorResponse(retid=self._req_id, message="No Content-Length in request") else: length = environ['CONTENT_LENGTH'] or 0 length = int(environ['CONTENT_LENGTH']) log.debug('Content-Length: %s', length) if length == 0: raise JSONRPCErrorResponse(retid=self._req_id, message="Content-Length is 0") raw_body = environ['wsgi.input'].read(length) try: json_body = json.loads(raw_body) except ValueError as e: # catch JSON errors Here raise JSONRPCErrorResponse(retid=self._req_id, message="JSON parse error ERR:%s RAW:%r" % (e, raw_body)) # check AUTH based on API key try: self._req_api_key = json_body['api_key'] self._req_id = json_body['id'] self._req_method = json_body['method'] self._request_params = json_body['args'] if not isinstance(self._request_params, dict): self._request_params = {} log.debug('method: %s, params: %s', self._req_method, self._request_params) except KeyError as e: raise JSONRPCErrorResponse(retid=self._req_id, message='Incorrect JSON query missing %s' % e) # check if we can find this session using api_key try: u = User.get_by_api_key(self._req_api_key) if u is None: raise JSONRPCErrorResponse(retid=self._req_id, message='Invalid API key') auth_u = AuthUser(dbuser=u) if not AuthUser.check_ip_allowed(auth_u, ip_addr): raise JSONRPCErrorResponse(retid=self._req_id, message='request from IP:%s not allowed' % (ip_addr,)) else: log.info('Access for IP:%s allowed', ip_addr) except Exception as e: raise JSONRPCErrorResponse(retid=self._req_id, message='Invalid API key') self._error = None try: self._func = self._find_method() except AttributeError as e: raise JSONRPCErrorResponse(retid=self._req_id, message=str(e)) # now that we have a method, add self._req_params to # self.kargs and dispatch control to WGIController argspec = inspect.getargspec(self._func) arglist = argspec[0][1:] defaults = map(type, argspec[3] or []) default_empty = types.NotImplementedType # kw arguments required by this method func_kwargs = dict(itertools.izip_longest(reversed(arglist), reversed(defaults), fillvalue=default_empty)) # this is little trick to inject logged in user for # perms decorators to work they expect the controller class to have # authuser attribute set request.authuser = request.user = auth_u # This attribute will need to be first param of a method that uses # api_key, which is translated to instance of user at that name USER_SESSION_ATTR = 'apiuser' # get our arglist and check if we provided them as args for arg, default in func_kwargs.iteritems(): if arg == USER_SESSION_ATTR: # USER_SESSION_ATTR is something translated from API key and # this is checked before so we don't need validate it continue # skip the required param check if it's default value is # NotImplementedType (default_empty) if default == default_empty and arg not in self._request_params: raise JSONRPCErrorResponse( retid=self._req_id, message='Missing non optional `%s` arg in JSON DATA' % arg, ) extra = set(self._request_params).difference(func_kwargs) if extra: raise JSONRPCErrorResponse( retid=self._req_id, message='Unknown %s arg in JSON DATA' % ', '.join('`%s`' % arg for arg in extra), ) self._rpc_args = {} self._rpc_args.update(self._request_params) self._rpc_args['action'] = self._req_method self._rpc_args['environ'] = environ log.info('IP: %s Request to %s time: %.3fs' % ( self._get_ip_addr(environ), safe_unicode(_get_access_path(environ)), time.time() - start) ) state.set_action(self._rpc_call, []) state.set_params(self._rpc_args) return state
def get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit=100): log = get_logger(get_commits_stats) DBS = get_session() lockkey = __get_lockkey('get_commits_stats', repo_name, ts_min_y, ts_max_y) lockkey_path = config['app_conf']['cache_dir'] log.info('running task with lockkey %s', lockkey) try: lock = l = DaemonLock(file_=jn(lockkey_path, lockkey)) # for js data compatibility cleans the key for person from ' akc = lambda k: person(k).replace('"', "") co_day_auth_aggr = {} commits_by_day_aggregate = {} repo = Repository.get_by_repo_name(repo_name) if repo is None: return True repo = repo.scm_instance repo_size = repo.count() # return if repo have no revisions if repo_size < 1: lock.release() return True skip_date_limit = True parse_limit = int(config['app_conf'].get('commit_parse_limit')) last_rev = None last_cs = None timegetter = itemgetter('time') dbrepo = DBS.query(Repository) \ .filter(Repository.repo_name == repo_name).scalar() cur_stats = DBS.query(Statistics) \ .filter(Statistics.repository == dbrepo).scalar() if cur_stats is not None: last_rev = cur_stats.stat_on_revision if last_rev == repo.get_changeset().revision and repo_size > 1: # pass silently without any work if we're not on first revision or # current state of parsing revision(from db marker) is the # last revision lock.release() return True if cur_stats: commits_by_day_aggregate = OrderedDict(json.loads( cur_stats.commit_activity_combined)) co_day_auth_aggr = json.loads(cur_stats.commit_activity) log.debug('starting parsing %s', parse_limit) lmktime = mktime last_rev = last_rev + 1 if last_rev >= 0 else 0 log.debug('Getting revisions from %s to %s', last_rev, last_rev + parse_limit ) for cs in repo[last_rev:last_rev + parse_limit]: log.debug('parsing %s', cs) last_cs = cs # remember last parsed changeset k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1], cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0]) if akc(cs.author) in co_day_auth_aggr: try: l = [timegetter(x) for x in co_day_auth_aggr[akc(cs.author)]['data']] time_pos = l.index(k) except ValueError: time_pos = None if time_pos >= 0 and time_pos is not None: datadict = \ co_day_auth_aggr[akc(cs.author)]['data'][time_pos] datadict["commits"] += 1 datadict["added"] += len(cs.added) datadict["changed"] += len(cs.changed) datadict["removed"] += len(cs.removed) else: if k >= ts_min_y and k <= ts_max_y or skip_date_limit: datadict = {"time": k, "commits": 1, "added": len(cs.added), "changed": len(cs.changed), "removed": len(cs.removed), } co_day_auth_aggr[akc(cs.author)]['data'] \ .append(datadict) else: if k >= ts_min_y and k <= ts_max_y or skip_date_limit: co_day_auth_aggr[akc(cs.author)] = { "label": akc(cs.author), "data": [{"time":k, "commits":1, "added":len(cs.added), "changed":len(cs.changed), "removed":len(cs.removed), }], "schema": ["commits"], } #gather all data by day if k in commits_by_day_aggregate: commits_by_day_aggregate[k] += 1 else: commits_by_day_aggregate[k] = 1 overview_data = sorted(commits_by_day_aggregate.items(), key=itemgetter(0)) if not co_day_auth_aggr: co_day_auth_aggr[akc(repo.contact)] = { "label": akc(repo.contact), "data": [0, 1], "schema": ["commits"], } stats = cur_stats if cur_stats else Statistics() stats.commit_activity = json.dumps(co_day_auth_aggr) stats.commit_activity_combined = json.dumps(overview_data) log.debug('last revision %s', last_rev) leftovers = len(repo.revisions[last_rev:]) log.debug('revisions to parse %s', leftovers) if last_rev == 0 or leftovers < parse_limit: log.debug('getting code trending stats') stats.languages = json.dumps(__get_codes_stats(repo_name)) try: stats.repository = dbrepo stats.stat_on_revision = last_cs.revision if last_cs else 0 DBS.add(stats) DBS.commit() except: log.error(traceback.format_exc()) DBS.rollback() lock.release() return False # final release lock.release() # execute another task if celery is enabled if len(repo.revisions) > 1 and CELERY_ON and recurse_limit > 0: recurse_limit -= 1 run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y, recurse_limit) if recurse_limit <= 0: log.debug('Breaking recursive mode due to reach of recurse limit') return True except LockHeld: log.info('LockHeld') return 'Task with key %s already running' % lockkey