def whoosh_index(repo_location, full_index): #log = whoosh_index.get_logger() from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon index_location = config['index_dir'] WhooshIndexingDaemon(index_location=index_location, repo_location=repo_location, sa=get_session())\ .run(full_index=full_index)
def reset_user_password(user_email): from rhodecode.lib import auth log = get_logger(reset_user_password) DBS = get_session() try: try: user = User.get_by_email(user_email) new_passwd = auth.PasswordGenerator().gen_password(8, auth.PasswordGenerator.ALPHABETS_BIG_SMALL) if user: user.password = auth.get_crypt_password(new_passwd) user.api_key = auth.generate_api_key(user.username) DBS.add(user) DBS.commit() log.info('change password for %s' % user_email) if new_passwd is None: raise Exception('unable to generate new password') except: log.error(traceback.format_exc()) DBS.rollback() run_task(send_email, user_email, 'Your new password', 'Your new RhodeCode password:%s' % (new_passwd)) log.info('send new password mail to %s' % user_email) except: log.error('Failed to update user password') log.error(traceback.format_exc()) return True
def send_password_link(user_email): from rhodecode.model.notification import EmailNotificationModel log = get_logger(send_password_link) DBS = get_session() try: user = User.get_by_email(user_email) if user: log.debug('password reset user found %s' % user) link = url('reset_password_confirmation', key=user.api_key, qualified=True) reg_type = EmailNotificationModel.TYPE_PASSWORD_RESET body = EmailNotificationModel().get_email_tmpl(reg_type, **{'user':user.short_contact, 'reset_url':link}) log.debug('sending email') run_task(send_email, user_email, _("password reset link"), body) log.info('send new password mail to %s' % user_email) else: log.debug("password reset email %s not found" % user_email) except: log.error(traceback.format_exc()) return False return True
def whoosh_index(repo_location, full_index): from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon log = get_logger(whoosh_index) DBS = get_session() index_location = config["index_dir"] WhooshIndexingDaemon(index_location=index_location, repo_location=repo_location, sa=DBS).run(full_index=full_index)
def whoosh_index(repo_location, full_index): from rhodecode.lib.indexers.daemon import WhooshIndexingDaemon log = get_logger(whoosh_index) DBS = get_session() index_location = config['index_dir'] WhooshIndexingDaemon(index_location=index_location, repo_location=repo_location, sa=DBS)\ .run(full_index=full_index)
def send_email(recipients, subject, body='', html_body=''): """ Sends an email with defined parameters from the .ini files. :param recipients: list of recipients, it this is empty the defined email address from field 'email_to' is used instead :param subject: subject of the mail :param body: body of the mail :param html_body: html version of body """ log = get_logger(send_email) DBS = get_session() email_config = config subject = "%s %s" % (email_config.get('email_prefix', ''), subject) if not recipients: # if recipients are not defined we send to email_config + all admins admins = [ u.email for u in User.query().filter(User.admin == True).all() ] recipients = [email_config.get('email_to')] + admins mail_from = email_config.get('app_email_from', 'RhodeCode') user = email_config.get('smtp_username') passwd = email_config.get('smtp_password') mail_server = email_config.get('smtp_server') mail_port = email_config.get('smtp_port') tls = str2bool(email_config.get('smtp_use_tls')) ssl = str2bool(email_config.get('smtp_use_ssl')) debug = str2bool(email_config.get('debug')) smtp_auth = email_config.get('smtp_auth') if not mail_server: log.error("SMTP mail server not configured - cannot send mail") return False try: m = SmtpMailer(mail_from, user, passwd, mail_server, smtp_auth, mail_port, ssl, tls, debug=debug) m.send(recipients, subject, body, html_body) except: log.error('Mail sending failed') log.error(traceback.format_exc()) return False return True
def create_repo_fork(form_data, cur_user): from rhodecode.model.repo import RepoModel try: log = create_repo_fork.get_logger() except: log = logging.getLogger(__name__) repo_model = RepoModel(get_session()) repo_model.create(form_data, cur_user, just_db=True, fork=True) repo_name = form_data['repo_name'] repos_path = get_repos_path() repo_path = os.path.join(repos_path, repo_name) repo_fork_path = os.path.join(repos_path, form_data['fork_name']) alias = form_data['repo_type'] log.info('creating repo fork %s as %s', repo_name, repo_path) backend = get_backend(alias) backend(str(repo_fork_path), create=True, src_url=str(repo_path))
def send_email(recipients, subject, body): """ Sends an email with defined parameters from the .ini files. :param recipients: list of recipients, it this is empty the defined email address from field 'email_to' is used instead :param subject: subject of the mail :param body: body of the mail """ try: log = send_email.get_logger() except: log = logging.getLogger(__name__) sa = get_session() email_config = config if not recipients: # if recipients are not defined we send to email_config + all admins admins = [ u.email for u in sa.query(User).filter(User.admin==True).all() ] recipients = [email_config.get('email_to')] + admins mail_from = email_config.get('app_email_from') user = email_config.get('smtp_username') passwd = email_config.get('smtp_password') mail_server = email_config.get('smtp_server') mail_port = email_config.get('smtp_port') tls = str2bool(email_config.get('smtp_use_tls')) ssl = str2bool(email_config.get('smtp_use_ssl')) debug = str2bool(config.get('debug')) smtp_auth = email_config.get('smtp_auth') try: m = SmtpMailer(mail_from, user, passwd, mail_server,smtp_auth, mail_port, ssl, tls, debug=debug) m.send(recipients, subject, body) except: log.error('Mail sending failed') log.error(traceback.format_exc()) return False return True
def send_email(recipients, subject, body="", html_body=""): """ Sends an email with defined parameters from the .ini files. :param recipients: list of recipients, it this is empty the defined email address from field 'email_to' is used instead :param subject: subject of the mail :param body: body of the mail :param html_body: html version of body """ log = get_logger(send_email) DBS = get_session() email_config = config subject = "%s %s" % (email_config.get("email_prefix", ""), subject) if not recipients: # if recipients are not defined we send to email_config + all admins admins = [u.email for u in User.query().filter(User.admin == True).all()] recipients = [email_config.get("email_to")] + admins mail_from = email_config.get("app_email_from", "RhodeCode") user = email_config.get("smtp_username") passwd = email_config.get("smtp_password") mail_server = email_config.get("smtp_server") mail_port = email_config.get("smtp_port") tls = str2bool(email_config.get("smtp_use_tls")) ssl = str2bool(email_config.get("smtp_use_ssl")) debug = str2bool(email_config.get("debug")) smtp_auth = email_config.get("smtp_auth") if not mail_server: log.error("SMTP mail server not configured - cannot send mail") return False try: m = SmtpMailer(mail_from, user, passwd, mail_server, smtp_auth, mail_port, ssl, tls, debug=debug) m.send(recipients, subject, body, html_body) except: log.error("Mail sending failed") log.error(traceback.format_exc()) return False return True
def create_repo_fork(form_data, cur_user): """ Creates a fork of repository using interval VCS methods :param form_data: :param cur_user: """ from rhodecode.model.repo import RepoModel log = get_logger(create_repo_fork) DBS = get_session() base_path = Repository.base_path() fork_repo = RepoModel(DBS).create(form_data, cur_user, just_db=True, fork=True) alias = form_data['repo_type'] org_repo_name = form_data['org_path'] fork_name = form_data['repo_name_full'] update_after_clone = form_data['update_after_clone'] source_repo_path = os.path.join(base_path, org_repo_name) destination_fork_path = os.path.join(base_path, fork_name) log.info('creating fork of %s as %s', source_repo_path, destination_fork_path) backend = get_backend(alias) backend(safe_str(destination_fork_path), create=True, src_url=safe_str(source_repo_path), update_after_clone=update_after_clone) log_create_repository(fork_repo.get_dict(), created_by=cur_user.username) action_logger(cur_user, 'user_forked_repo:%s' % fork_name, org_repo_name, '', DBS) action_logger(cur_user, 'user_created_fork:%s' % fork_name, fork_name, '', DBS) # finally commit at latest possible stage DBS.commit()
def reset_user_password(user_email): try: log = reset_user_password.get_logger() except: log = logging.getLogger(__name__) from rhodecode.lib import auth try: try: sa = get_session() user = sa.query(User).filter(User.email == user_email).scalar() new_passwd = auth.PasswordGenerator().gen_password(8, auth.PasswordGenerator.ALPHABETS_BIG_SMALL) if user: user.password = auth.get_crypt_password(new_passwd) user.api_key = auth.generate_api_key(user.username) sa.add(user) sa.commit() log.info('change password for %s', user_email) if new_passwd is None: raise Exception('unable to generate new password') except: log.error(traceback.format_exc()) sa.rollback() run_task(send_email, user_email, "Your new RhodeCode password", 'Your new RhodeCode password:%s' % (new_passwd)) log.info('send new password mail to %s', user_email) except: log.error('Failed to update user password') log.error(traceback.format_exc()) return True
def send_password_link(user_email): try: log = reset_user_password.get_logger() except: log = logging.getLogger(__name__) from rhodecode.lib import auth try: sa = get_session() user = sa.query(User).filter(User.email == user_email).scalar() if user: link = url('reset_password_confirmation', key=user.api_key, qualified=True) tmpl = """ Hello %s We received a request to create a new password for your account. You can generate it by clicking following URL: %s If you didn't request new password please ignore this email. """ run_task(send_email, user_email, "RhodeCode password reset link", tmpl % (user.short_contact, link)) log.info('send new password mail to %s', user_email) except: log.error('Failed to update user password') log.error(traceback.format_exc()) return False return True
def get_commits_stats(repo_name, ts_min_y, ts_max_y, recurse_limit=100): log = get_logger(get_commits_stats) DBS = get_session() lockkey = __get_lockkey('get_commits_stats', repo_name, ts_min_y, ts_max_y) lockkey_path = config['app_conf']['cache_dir'] log.info('running task with lockkey %s' % lockkey) try: lock = l = DaemonLock(file_=jn(lockkey_path, lockkey)) # for js data compatibility cleans the key for person from ' akc = lambda k: person(k).replace('"', "") co_day_auth_aggr = {} commits_by_day_aggregate = {} repo = Repository.get_by_repo_name(repo_name) if repo is None: return True repo = repo.scm_instance repo_size = repo.count() # return if repo have no revisions if repo_size < 1: lock.release() return True skip_date_limit = True parse_limit = int(config['app_conf'].get('commit_parse_limit')) last_rev = None last_cs = None timegetter = itemgetter('time') dbrepo = DBS.query(Repository)\ .filter(Repository.repo_name == repo_name).scalar() cur_stats = DBS.query(Statistics)\ .filter(Statistics.repository == dbrepo).scalar() if cur_stats is not None: last_rev = cur_stats.stat_on_revision if last_rev == repo.get_changeset().revision and repo_size > 1: # pass silently without any work if we're not on first revision or # current state of parsing revision(from db marker) is the # last revision lock.release() return True if cur_stats: commits_by_day_aggregate = OrderedDict( json.loads(cur_stats.commit_activity_combined)) co_day_auth_aggr = json.loads(cur_stats.commit_activity) log.debug('starting parsing %s' % parse_limit) lmktime = mktime last_rev = last_rev + 1 if last_rev >= 0 else 0 log.debug('Getting revisions from %s to %s' % (last_rev, last_rev + parse_limit)) for cs in repo[last_rev:last_rev + parse_limit]: log.debug('parsing %s' % cs) last_cs = cs # remember last parsed changeset k = lmktime([ cs.date.timetuple()[0], cs.date.timetuple()[1], cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0 ]) if akc(cs.author) in co_day_auth_aggr: try: l = [ timegetter(x) for x in co_day_auth_aggr[akc(cs.author)]['data'] ] time_pos = l.index(k) except ValueError: time_pos = None if time_pos >= 0 and time_pos is not None: datadict = \ co_day_auth_aggr[akc(cs.author)]['data'][time_pos] datadict["commits"] += 1 datadict["added"] += len(cs.added) datadict["changed"] += len(cs.changed) datadict["removed"] += len(cs.removed) else: if k >= ts_min_y and k <= ts_max_y or skip_date_limit: datadict = { "time": k, "commits": 1, "added": len(cs.added), "changed": len(cs.changed), "removed": len(cs.removed), } co_day_auth_aggr[akc(cs.author)]['data']\ .append(datadict) else: if k >= ts_min_y and k <= ts_max_y or skip_date_limit: co_day_auth_aggr[akc(cs.author)] = { "label": akc(cs.author), "data": [{ "time": k, "commits": 1, "added": len(cs.added), "changed": len(cs.changed), "removed": len(cs.removed), }], "schema": ["commits"], } #gather all data by day if k in commits_by_day_aggregate: commits_by_day_aggregate[k] += 1 else: commits_by_day_aggregate[k] = 1 overview_data = sorted(commits_by_day_aggregate.items(), key=itemgetter(0)) if not co_day_auth_aggr: co_day_auth_aggr[akc(repo.contact)] = { "label": akc(repo.contact), "data": [0, 1], "schema": ["commits"], } stats = cur_stats if cur_stats else Statistics() stats.commit_activity = json.dumps(co_day_auth_aggr) stats.commit_activity_combined = json.dumps(overview_data) log.debug('last revison %s' % last_rev) leftovers = len(repo.revisions[last_rev:]) log.debug('revisions to parse %s' % leftovers) if last_rev == 0 or leftovers < parse_limit: log.debug('getting code trending stats') stats.languages = json.dumps(__get_codes_stats(repo_name)) try: stats.repository = dbrepo stats.stat_on_revision = last_cs.revision if last_cs else 0 DBS.add(stats) DBS.commit() except: log.error(traceback.format_exc()) DBS.rollback() lock.release() return False # final release lock.release() # execute another task if celery is enabled if len(repo.revisions) > 1 and CELERY_ON and recurse_limit > 0: recurse_limit -= 1 run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y, recurse_limit) if recurse_limit <= 0: log.debug('Breaking recursive mode due to reach of recurse limit') return True except LockHeld: log.info('LockHeld') return 'Task with key %s already running' % lockkey
def create_repo_fork(form_data, cur_user): """ Creates a fork of repository using interval VCS methods :param form_data: :param cur_user: """ from rhodecode.model.repo import RepoModel from rhodecode.model.user import UserModel log = get_logger(create_repo_fork) DBS = get_session() base_path = Repository.base_path() cur_user = UserModel(DBS)._get_user(cur_user) fork_name = form_data['repo_name_full'] repo_type = form_data['repo_type'] description = form_data['description'] owner = cur_user private = form_data['private'] clone_uri = form_data.get('clone_uri') repos_group = form_data['repo_group'] landing_rev = form_data['landing_rev'] copy_fork_permissions = form_data.get('copy_permissions') fork_of = RepoModel(DBS)._get_repo(form_data.get('fork_parent_id')) fork_repo = RepoModel(DBS).create_repo( fork_name, repo_type, description, owner, private, clone_uri, repos_group, landing_rev, just_db=True, fork_of=fork_of, copy_fork_permissions=copy_fork_permissions) update_after_clone = form_data['update_after_clone'] source_repo_path = os.path.join(base_path, fork_of.repo_name) destination_fork_path = os.path.join(base_path, fork_name) log.info('creating fork of %s as %s', source_repo_path, destination_fork_path) backend = get_backend(repo_type) if repo_type == 'git': r = backend(safe_str(destination_fork_path), create=True, src_url=safe_str(source_repo_path), update_after_clone=update_after_clone, bare=True) # add rhodecode hook into this repo ScmModel().install_git_hook(repo=r) elif repo_type == 'hg': r = backend(safe_str(destination_fork_path), create=True, src_url=safe_str(source_repo_path), update_after_clone=update_after_clone) else: raise Exception('Unknown backend type %s' % repo_type) log_create_repository(fork_repo.get_dict(), created_by=cur_user.username) action_logger(cur_user, 'user_forked_repo:%s' % fork_name, fork_of.repo_name, '', DBS) action_logger(cur_user, 'user_created_fork:%s' % fork_name, fork_name, '', DBS) # finally commit at latest possible stage DBS.commit() fork_repo.update_changeset_cache()
def get_repos_path(): sa = get_session() q = sa.query(RhodeCodeUi).filter(RhodeCodeUi.ui_key == '/').one() return q.ui_value
def create_repo_fork(form_data, cur_user): """ Creates a fork of repository using interval VCS methods :param form_data: :param cur_user: """ from rhodecode.model.repo import RepoModel from rhodecode.model.user import UserModel log = get_logger(create_repo_fork) DBS = get_session() base_path = Repository.base_path() cur_user = UserModel(DBS)._get_user(cur_user) fork_name = form_data["repo_name_full"] repo_type = form_data["repo_type"] description = form_data["description"] owner = cur_user private = form_data["private"] clone_uri = form_data.get("clone_uri") repos_group = form_data["repo_group"] landing_rev = form_data["landing_rev"] copy_fork_permissions = form_data.get("copy_permissions") fork_of = RepoModel(DBS)._get_repo(form_data.get("fork_parent_id")) fork_repo = RepoModel(DBS).create_repo( fork_name, repo_type, description, owner, private, clone_uri, repos_group, landing_rev, just_db=True, fork_of=fork_of, copy_fork_permissions=copy_fork_permissions, ) update_after_clone = form_data["update_after_clone"] source_repo_path = os.path.join(base_path, fork_of.repo_name) destination_fork_path = os.path.join(base_path, fork_name) log.info("creating fork of %s as %s", source_repo_path, destination_fork_path) backend = get_backend(repo_type) if repo_type == "git": r = backend( safe_str(destination_fork_path), create=True, src_url=safe_str(source_repo_path), update_after_clone=update_after_clone, bare=True, ) # add rhodecode hook into this repo ScmModel().install_git_hook(repo=r) elif repo_type == "hg": r = backend( safe_str(destination_fork_path), create=True, src_url=safe_str(source_repo_path), update_after_clone=update_after_clone, ) else: raise Exception("Unknown backend type %s" % repo_type) log_create_repository(fork_repo.get_dict(), created_by=cur_user.username) action_logger(cur_user, "user_forked_repo:%s" % fork_name, fork_of.repo_name, "", DBS) action_logger(cur_user, "user_created_fork:%s" % fork_name, fork_name, "", DBS) # finally commit at latest possible stage DBS.commit() fork_repo.update_changeset_cache()
def create_repo_fork(form_data, cur_user): """ Creates a fork of repository using internal VCS methods :param form_data: :param cur_user: """ from rhodecode.model.repo import RepoModel from rhodecode.model.user import UserModel log = get_logger(create_repo_fork) DBS = get_session() cur_user = UserModel(DBS)._get_user(cur_user) owner = cur_user repo_name = form_data['repo_name'] # fork in this case repo_name_full = form_data['repo_name_full'] repo_type = form_data['repo_type'] description = form_data['description'] private = form_data['private'] clone_uri = form_data.get('clone_uri') repo_group = safe_int(form_data['repo_group']) landing_rev = form_data['landing_rev'] copy_fork_permissions = form_data.get('copy_permissions') fork_id = safe_int(form_data.get('fork_parent_id')) try: fork_of = RepoModel(DBS)._get_repo(fork_id) RepoModel(DBS)._create_repo( repo_name=repo_name_full, repo_type=repo_type, description=description, owner=owner, private=private, clone_uri=clone_uri, repo_group=repo_group, landing_rev=landing_rev, fork_of=fork_of, copy_fork_permissions=copy_fork_permissions) action_logger(cur_user, 'user_forked_repo:%s' % repo_name_full, fork_of.repo_name, '', DBS) DBS.commit() base_path = Repository.base_path() source_repo_path = os.path.join(base_path, fork_of.repo_name) # now create this repo on Filesystem RepoModel(DBS)._create_filesystem_repo( repo_name=repo_name, repo_type=repo_type, repo_group=RepoModel(DBS)._get_repo_group(repo_group), clone_uri=source_repo_path, ) repo = Repository.get_by_repo_name(repo_name_full) log_create_repository(created_by=owner.username, **repo.get_dict()) # update repo commit caches initially config = repo._config config.set('extensions', 'largefiles', '') repo.update_commit_cache(config=config) # set new created state repo.set_state(Repository.STATE_CREATED) DBS.commit() except Exception as e: log.warning( 'Exception %s occurred when forking repository, ' 'doing cleanup...', e) # rollback things manually ! repo = Repository.get_by_repo_name(repo_name_full) if repo: Repository.delete(repo.repo_id) DBS.commit() RepoModel(DBS)._delete_filesystem_repo(repo) raise # it's an odd fix to make celery fail task when exception occurs def on_failure(self, *args, **kwargs): pass return True
def create_repo(form_data, cur_user): from rhodecode.model.repo import RepoModel from rhodecode.model.user import UserModel from rhodecode.model.settings import SettingsModel log = get_logger(create_repo) DBS = get_session() cur_user = UserModel(DBS)._get_user(cur_user) owner = cur_user repo_name = form_data['repo_name'] repo_name_full = form_data['repo_name_full'] repo_type = form_data['repo_type'] description = form_data['repo_description'] private = form_data['repo_private'] clone_uri = form_data.get('clone_uri') repo_group = safe_int(form_data['repo_group']) landing_rev = form_data['repo_landing_rev'] copy_fork_permissions = form_data.get('copy_permissions') copy_group_permissions = form_data.get('repo_copy_permissions') fork_of = form_data.get('fork_parent_id') state = form_data.get('repo_state', Repository.STATE_PENDING) # repo creation defaults, private and repo_type are filled in form defs = SettingsModel().get_default_repo_settings(strip_prefix=True) enable_statistics = form_data.get('enable_statistics', defs.get('repo_enable_statistics')) enable_locking = form_data.get('enable_locking', defs.get('repo_enable_locking')) enable_downloads = form_data.get('enable_downloads', defs.get('repo_enable_downloads')) try: RepoModel(DBS)._create_repo( repo_name=repo_name_full, repo_type=repo_type, description=description, owner=owner, private=private, clone_uri=clone_uri, repo_group=repo_group, landing_rev=landing_rev, fork_of=fork_of, copy_fork_permissions=copy_fork_permissions, copy_group_permissions=copy_group_permissions, enable_statistics=enable_statistics, enable_locking=enable_locking, enable_downloads=enable_downloads, state=state) action_logger(cur_user, 'user_created_repo', repo_name_full, '', DBS) DBS.commit() # now create this repo on Filesystem RepoModel(DBS)._create_filesystem_repo( repo_name=repo_name, repo_type=repo_type, repo_group=RepoModel(DBS)._get_repo_group(repo_group), clone_uri=clone_uri, ) repo = Repository.get_by_repo_name(repo_name_full) log_create_repository(created_by=owner.username, **repo.get_dict()) # update repo commit caches initially repo.update_commit_cache() # set new created state repo.set_state(Repository.STATE_CREATED) DBS.commit() except Exception as e: log.warning( 'Exception %s occurred when creating repository, ' 'doing cleanup...', e) # rollback things manually ! repo = Repository.get_by_repo_name(repo_name_full) if repo: Repository.delete(repo.repo_id) DBS.commit() RepoModel(DBS)._delete_filesystem_repo(repo) raise # it's an odd fix to make celery fail task when exception occurs def on_failure(self, *args, **kwargs): pass return True
def get_commits_stats(repo_name, ts_min_y, ts_max_y): log = get_logger(get_commits_stats) DBS = get_session() lockkey = __get_lockkey('get_commits_stats', repo_name, ts_min_y, ts_max_y) lockkey_path = config['here'] log.info('running task with lockkey %s' % lockkey) try: lock = l = DaemonLock(file_=jn(lockkey_path, lockkey)) # for js data compatibility cleans the key for person from ' akc = lambda k: person(k).replace('"', "") co_day_auth_aggr = {} commits_by_day_aggregate = {} repo = Repository.get_by_repo_name(repo_name) if repo is None: return True repo = repo.scm_instance repo_size = repo.count() # return if repo have no revisions if repo_size < 1: lock.release() return True skip_date_limit = True parse_limit = int(config['app_conf'].get('commit_parse_limit')) last_rev = None last_cs = None timegetter = itemgetter('time') dbrepo = DBS.query(Repository)\ .filter(Repository.repo_name == repo_name).scalar() cur_stats = DBS.query(Statistics)\ .filter(Statistics.repository == dbrepo).scalar() if cur_stats is not None: last_rev = cur_stats.stat_on_revision if last_rev == repo.get_changeset().revision and repo_size > 1: # pass silently without any work if we're not on first revision or # current state of parsing revision(from db marker) is the # last revision lock.release() return True if cur_stats: commits_by_day_aggregate = OrderedDict(json.loads( cur_stats.commit_activity_combined)) co_day_auth_aggr = json.loads(cur_stats.commit_activity) log.debug('starting parsing %s' % parse_limit) lmktime = mktime last_rev = last_rev + 1 if last_rev >= 0 else 0 log.debug('Getting revisions from %s to %s' % ( last_rev, last_rev + parse_limit) ) for cs in repo[last_rev:last_rev + parse_limit]: log.debug('parsing %s' % cs) last_cs = cs # remember last parsed changeset k = lmktime([cs.date.timetuple()[0], cs.date.timetuple()[1], cs.date.timetuple()[2], 0, 0, 0, 0, 0, 0]) if akc(cs.author) in co_day_auth_aggr: try: l = [timegetter(x) for x in co_day_auth_aggr[akc(cs.author)]['data']] time_pos = l.index(k) except ValueError: time_pos = False if time_pos >= 0 and time_pos is not False: datadict = \ co_day_auth_aggr[akc(cs.author)]['data'][time_pos] datadict["commits"] += 1 datadict["added"] += len(cs.added) datadict["changed"] += len(cs.changed) datadict["removed"] += len(cs.removed) else: if k >= ts_min_y and k <= ts_max_y or skip_date_limit: datadict = {"time": k, "commits": 1, "added": len(cs.added), "changed": len(cs.changed), "removed": len(cs.removed), } co_day_auth_aggr[akc(cs.author)]['data']\ .append(datadict) else: if k >= ts_min_y and k <= ts_max_y or skip_date_limit: co_day_auth_aggr[akc(cs.author)] = { "label": akc(cs.author), "data": [{"time":k, "commits":1, "added":len(cs.added), "changed":len(cs.changed), "removed":len(cs.removed), }], "schema": ["commits"], } #gather all data by day if k in commits_by_day_aggregate: commits_by_day_aggregate[k] += 1 else: commits_by_day_aggregate[k] = 1 overview_data = sorted(commits_by_day_aggregate.items(), key=itemgetter(0)) if not co_day_auth_aggr: co_day_auth_aggr[akc(repo.contact)] = { "label": akc(repo.contact), "data": [0, 1], "schema": ["commits"], } stats = cur_stats if cur_stats else Statistics() stats.commit_activity = json.dumps(co_day_auth_aggr) stats.commit_activity_combined = json.dumps(overview_data) log.debug('last revison %s' % last_rev) leftovers = len(repo.revisions[last_rev:]) log.debug('revisions to parse %s' % leftovers) if last_rev == 0 or leftovers < parse_limit: log.debug('getting code trending stats') stats.languages = json.dumps(__get_codes_stats(repo_name)) try: stats.repository = dbrepo stats.stat_on_revision = last_cs.revision if last_cs else 0 DBS.add(stats) DBS.commit() except: log.error(traceback.format_exc()) DBS.rollback() lock.release() return False # final release lock.release() # execute another task if celery is enabled if len(repo.revisions) > 1 and CELERY_ON: run_task(get_commits_stats, repo_name, ts_min_y, ts_max_y) return True except LockHeld: log.info('LockHeld') return 'Task with key %s already running' % lockkey
def create_repo_fork(form_data, cur_user): """ Creates a fork of repository using interval VCS methods :param form_data: :param cur_user: """ from rhodecode.model.repo import RepoModel from rhodecode.model.user import UserModel log = get_logger(create_repo_fork) DBS = get_session() base_path = Repository.base_path() cur_user = UserModel(DBS)._get_user(cur_user) fork_name = form_data['repo_name_full'] repo_type = form_data['repo_type'] description = form_data['description'] owner = cur_user private = form_data['private'] clone_uri = form_data.get('clone_uri') repos_group = form_data['repo_group'] landing_rev = form_data['landing_rev'] copy_fork_permissions = form_data.get('copy_permissions') fork_of = RepoModel(DBS)._get_repo(form_data.get('fork_parent_id')) fork_repo = RepoModel(DBS).create_repo( fork_name, repo_type, description, owner, private, clone_uri, repos_group, landing_rev, just_db=True, fork_of=fork_of, copy_fork_permissions=copy_fork_permissions ) update_after_clone = form_data['update_after_clone'] source_repo_path = os.path.join(base_path, fork_of.repo_name) destination_fork_path = os.path.join(base_path, fork_name) log.info('creating fork of %s as %s', source_repo_path, destination_fork_path) backend = get_backend(repo_type) if repo_type == 'git': backend(safe_str(destination_fork_path), create=True, src_url=safe_str(source_repo_path), update_after_clone=update_after_clone, bare=True) elif repo_type == 'hg': backend(safe_str(destination_fork_path), create=True, src_url=safe_str(source_repo_path), update_after_clone=update_after_clone) else: raise Exception('Unknown backend type %s' % repo_type) log_create_repository(fork_repo.get_dict(), created_by=cur_user.username) action_logger(cur_user, 'user_forked_repo:%s' % fork_name, fork_of.repo_name, '', DBS) action_logger(cur_user, 'user_created_fork:%s' % fork_name, fork_name, '', DBS) # finally commit at latest possible stage DBS.commit()