def repo_restored_cb(sender, **kwargs): repo_id = kwargs['repo_id'] operator = kwargs['operator'] repo = seafile_api.get_repo(repo_id) org_id = get_org_id_by_repo_id(repo_id) if org_id > 0: related_users = seafile_api.org_get_shared_users_by_repo( org_id, repo_id) repo_owner = seafile_api.get_org_repo_owner(repo_id) else: related_users = seafile_api.get_shared_users_by_repo(repo_id) repo_owner = seafile_api.get_repo_owner(repo_id) if repo_owner not in related_users: related_users.append(repo_owner) record = { 'op_type': 'recover', 'obj_type': 'repo', 'timestamp': datetime.datetime.utcnow(), 'repo_id': repo_id, 'repo_name': repo.repo_name, 'path': '/', 'op_user': operator, 'related_users': related_users, 'org_id': org_id, } from utils import SeafEventsSession session = SeafEventsSession() seafevents.save_user_activity(session, record) session.close()
def get_related_users_by_repo(repo_id, org_id=None): """ Return all users who can view this library. 1. repo owner 2. users repo has been shared to 3. members of groups repo has been shared to """ users = [] # 1. users repo has been shared to if org_id and org_id > 0: users.extend(seafile_api.org_get_shared_users_by_repo(org_id, repo_id)) owner = seafile_api.get_org_repo_owner(repo_id) else: users.extend(seafile_api.get_shared_users_by_repo(repo_id)) owner = seafile_api.get_repo_owner(repo_id) # 2. repo owner if owner not in users: users.append(owner) # 3. members of groups repo has been shared to groups = get_shared_groups_by_repo(repo_id, org_id) for group in groups: members = ccnet_api.get_group_members(group.id) for member in members: if member.user_name not in users: users.append(member.user_name) return users
def DraftPublishEventHandler(session, msg): elements = msg['content'].split('\t') if len(elements) != 6: logging.warning("got bad message: %s", elements) return record = dict() record["timestamp"] = datetime.datetime.utcfromtimestamp(msg['ctime']) record["op_type"] = elements[0] record["obj_type"] = elements[1] record["repo_id"] = elements[2] repo = seafile_api.get_repo(elements[2]) record["repo_name"] = repo.name if repo else '' record["op_user"] = elements[3] record["path"] = elements[4] record["old_path"] = elements[5] users = [] org_id = get_org_id_by_repo_id(elements[2]) if org_id > 0: users.extend(seafile_api.org_get_shared_users_by_repo(org_id, elements[2])) owner = seafile_api.get_org_repo_owner(elements[2]) else: users.extend(seafile_api.get_shared_users_by_repo(elements[2])) owner = seafile_api.get_repo_owner(elements[2]) if owner not in users: users = users + [owner] if not users: return record["related_users"] = users save_user_activity(session, record)
def repo_restored_cb(sender, **kwargs): repo_id = kwargs['repo_id'] operator = kwargs['operator'] repo = seafile_api.get_repo(repo_id) org_id = get_org_id_by_repo_id(repo_id) if org_id > 0: related_users = seafile_api.org_get_shared_users_by_repo(org_id, repo_id) repo_owner = seafile_api.get_org_repo_owner(repo_id) else: related_users = seafile_api.get_shared_users_by_repo(repo_id) repo_owner = seafile_api.get_repo_owner(repo_id) related_users.append(repo_owner) record = { 'op_type':'recover', 'obj_type':'repo', 'timestamp': datetime.datetime.utcnow(), 'repo_id': repo_id, 'repo_name': repo.repo_name, 'path': '/', 'op_user': operator, 'related_users': [related_users], 'org_id': org_id, } from utils import SeafEventsSession session = SeafEventsSession() seafevents.save_user_activity(session, record) session.close()
def clean_up_repo_trash_cb(sender, **kwargs): """When a repo trash is deleted, the operator will be recorded. """ org_id = kwargs['org_id'] operator = kwargs['operator'] repo_id = kwargs['repo_id'] days = kwargs.get('days', None) repo_name = kwargs['repo_name'] repo_owner = kwargs['repo_owner'] if org_id > 0: related_users = seafile_api.org_get_shared_users_by_repo(org_id, repo_id) else: related_users = seafile_api.get_shared_users_by_repo(repo_id) org_id = -1 related_users.append(repo_owner) record = { 'op_type':'clean-up-trash', 'obj_type':'repo', 'timestamp': datetime.datetime.utcnow(), 'repo_id': repo_id, 'repo_name': repo_name, 'path': '/', 'days': days, 'op_user': operator, 'related_users': related_users, 'org_id': org_id, } from utils import SeafEventsSession session = SeafEventsSession() seafevents.save_user_activity(session, record) session.close()
def repo_created_cb(sender, **kwargs): org_id = kwargs['org_id'] creator = kwargs['creator'] repo_id = kwargs['repo_id'] repo_name = kwargs['repo_name'] # Move here to avoid model import during Django setup. # TODO: Don't register signal/handlers during Seahub start. if org_id > 0: related_users = seafile_api.org_get_shared_users_by_repo( org_id, repo_id) else: related_users = seafile_api.get_shared_users_by_repo(repo_id) org_id = -1 if creator not in related_users: related_users.append(creator) record = { 'op_type': 'create', 'obj_type': 'repo', 'timestamp': datetime.datetime.utcnow(), 'repo_id': repo_id, 'repo_name': repo_name, 'path': '/', 'op_user': creator, 'related_users': related_users, 'org_id': org_id, } from utils import SeafEventsSession session = SeafEventsSession() seafevents.save_user_activity(session, record) session.close() LIBRARY_TEMPLATES = getattr(settings, 'LIBRARY_TEMPLATES', {}) library_template = kwargs['library_template'] if LIBRARY_TEMPLATES and library_template: if isinstance(library_template, unicode): library_template = library_template.encode('utf-8') try: dir_path_list = LIBRARY_TEMPLATES[library_template] for dir_path in dir_path_list: seafile_api.mkdir_with_parents(repo_id, '/', dir_path.strip('/'), related_users) except Exception as e: logger.error(e)
def repo_created_cb(sender, **kwargs): org_id = kwargs['org_id'] creator = kwargs['creator'] repo_id = kwargs['repo_id'] repo_name = kwargs['repo_name'] # Move here to avoid model import during Django setup. # TODO: Don't register signal/hanlders during Seahub start. if org_id > 0: related_users = seafile_api.org_get_shared_users_by_repo(org_id, repo_id) else: related_users = seafile_api.get_shared_users_by_repo(repo_id) org_id = -1 related_users.append(creator) record = { 'op_type':'create', 'obj_type':'repo', 'timestamp': datetime.datetime.utcnow(), 'repo_id': repo_id, 'repo_name': repo_name, 'path': '/', 'op_user': creator, 'related_users': related_users, 'org_id': org_id, } from utils import SeafEventsSession session = SeafEventsSession() seafevents.save_user_activity(session, record) session.close() LIBRARY_TEMPLATES = getattr(settings, 'LIBRARY_TEMPLATES', {}) library_template = kwargs['library_template'] if LIBRARY_TEMPLATES and library_template: if isinstance(library_template, unicode): library_template = library_template.encode('utf-8') try: dir_path_list = LIBRARY_TEMPLATES[library_template] for dir_path in dir_path_list: seafile_api.mkdir_with_parents(repo_id, '/', dir_path.strip('/'), related_users) except Exception as e: logger.error(e)
def repo_deleted_cb(sender, **kwargs): """When a repo is deleted, an event would be added to every user in all groups to which this repo is shared. """ org_id = kwargs['org_id'] operator = kwargs['operator'] repo_owner = kwargs['repo_owner'] repo_id = kwargs['repo_id'] repo_name = kwargs['repo_name'] if org_id and org_id > 0: related_users = seafile_api.org_get_shared_users_by_repo(org_id, repo_id) else: related_users = seafile_api.get_shared_users_by_repo(repo_id) org_id = -1 if repo_owner not in related_users: related_users.append(repo_owner) record = { 'op_type': 'delete', 'obj_type': 'repo', 'timestamp': datetime.datetime.utcnow(), 'repo_id': repo_id, 'repo_name': repo_name, 'path': '/', 'op_user': operator, 'related_users': related_users, 'org_id': org_id if org_id and org_id > 0 else -1, } from .utils import SeafEventsSession session = SeafEventsSession() seafevents.save_user_activity(session, record) session.close() # KEEPER logging.info("REPO DELETE EVENT repo_name: %s, repo_id: %s" % (repo_name, repo_id))
def clean_up_repo_trash_cb(sender, **kwargs): """When a repo trash is deleted, the operator will be recorded. """ org_id = kwargs['org_id'] operator = kwargs['operator'] repo_id = kwargs['repo_id'] days = kwargs.get('days', None) repo_name = kwargs['repo_name'] repo_owner = kwargs['repo_owner'] if org_id > 0: related_users = [ r.user for r in seafile_api.org_get_shared_users_by_repo( org_id, repo_id) ] else: related_users = [ r.user for r in seafile_api.get_shared_users_by_repo(repo_id) ] org_id = -1 related_users.append(repo_owner) record = { 'op_type': 'clean-up-trash', 'obj_type': 'repo', 'timestamp': datetime.datetime.utcnow(), 'repo_id': repo_id, 'repo_name': repo_name, 'path': '/', 'days': days, 'op_user': operator, 'related_users': related_users, 'org_id': org_id, } from utils import SeafEventsSession session = SeafEventsSession() seafevents.save_user_activity(session, record) session.close()
def RepoUpdateEventHandler(session, msg): elements = msg.body.split('\t') if len(elements) != 3: logging.warning("got bad message: %s", elements) return repo_id = elements[1] commit_id = elements[2] if isinstance(repo_id, str): repo_id = repo_id.decode('utf8') if isinstance(commit_id, str): commit_id = commit_id.decode('utf8') commit = commit_mgr.load_commit(repo_id, 1, commit_id) if commit is None: commit = commit_mgr.load_commit(repo_id, 0, commit_id) # TODO: maybe handle merge commit. if commit is not None and commit.parent_id and not commit.second_parent_id: parent = commit_mgr.load_commit(repo_id, commit.version, commit.parent_id) if parent is not None: differ = CommitDiffer(repo_id, commit.version, parent.root_id, commit.root_id, True, True) added_files, deleted_files, added_dirs, deleted_dirs, modified_files,\ renamed_files, moved_files, renamed_dirs, moved_dirs = differ.diff_to_unicode() if renamed_files or renamed_dirs or moved_files or moved_dirs: changer = ChangeFilePathHandler() for r_file in renamed_files: changer.update_db_records(repo_id, r_file.path, r_file.new_path, 0) for r_dir in renamed_dirs: changer.update_db_records(repo_id, r_dir.path, r_dir.new_path, 1) for m_file in moved_files: changer.update_db_records(repo_id, m_file.path, m_file.new_path, 0) for m_dir in moved_dirs: changer.update_db_records(repo_id, m_dir.path, m_dir.new_path, 1) changer.close_session() users = [] org_id = get_org_id_by_repo_id(repo_id) if org_id > 0: users = seafile_api.org_get_shared_users_by_repo(org_id, repo_id) owner = seafile_api.get_org_repo_owner(repo_id) else: users = seafile_api.get_shared_users_by_repo(repo_id) owner = seafile_api.get_repo_owner(repo_id) if owner not in users: users = users + [owner] if not users: return time = datetime.datetime.utcfromtimestamp(msg.ctime) if added_files or deleted_files or added_dirs or deleted_dirs or \ modified_files or renamed_files or moved_files or renamed_dirs or moved_dirs: if appconfig.fh.enabled: records = generate_filehistory_records(added_files, deleted_files, added_dirs, deleted_dirs, modified_files, renamed_files, moved_files, renamed_dirs, moved_dirs, commit, repo_id, parent, time) save_file_histories(session, records) records = generate_activity_records(added_files, deleted_files, added_dirs, deleted_dirs, modified_files, renamed_files, moved_files, renamed_dirs, moved_dirs, commit, repo_id, parent, users, time) save_user_activities(session, records) else: save_repo_rename_activity(session, commit, repo_id, parent, org_id, users, time)
def do_work(self): self._current_commit_position = 0 repo = seafile_api.get_repo_list(self._current_repo_position, 1) if not repo: return -1 repo = repo[0] logging.info('Start processing repo :%s', repo.repo_id) org_id = get_org_id_by_repo_id(repo.repo_id) repo_id = repo.repo_id if org_id > 0: users_obj = seafile_api.org_get_shared_users_by_repo( org_id, repo_id) owner = seafile_api.get_org_repo_owner(repo_id) else: users_obj = seafile_api.get_shared_users_by_repo(repo_id) owner = seafile_api.get_repo_owner(repo_id) users = [e.user for e in users_obj] + [owner] self._last_commit_id = None if repo_id in self._history_repo.keys(): commit_ids = self.get_repo_last_commits(repo_id) count = 0 k = 0 bk = False while True: temp = [ e.id for e in seafile_api.get_commit_list( repo_id, k * 100, 100) ] if not temp: break # avoid two commit at the same time for commit_id in commit_ids: if commit_id[0] in temp: count += 1 if count == len(commit_ids): self._current_commit_position = k * 100 + temp.index( commit_id[0]) + 1 self._last_commit_id = commit_id[0] bk = True break if bk: break k += 1 else: # keeping _current_commit_position zero will restore all activity records of the repo commit_objs = seafile_api.get_commit_list( repo_id, self._current_commit_position, 1) current_commit_id = [e.id for e in commit_objs][0] self._last_commit_id = current_commit_id self.diff_and_update(repo_id, current_commit_id, org_id, users) start_commit_position = self._current_commit_position count_offest = 0 while True: # get last commit and another commits # avoid current_commit_position expired by generate new record commit_objs = seafile_api.get_commit_list( repo_id, self._current_commit_position - 1, 5) commit_ids = [e.id for e in commit_objs] if not commit_objs or len(commit_objs) == 1: break if self._last_commit_id not in commit_ids or commit_objs[ -1].id == self._last_commit_id: self._current_commit_position += 4 count_offest = 4 else: offset = commit_ids.index(self._last_commit_id) self._current_commit_position += offset current_commit_id = commit_ids[offset + 1] self._last_commit_id = commit_ids[offset + 1] self.diff_and_update(repo_id, current_commit_id, org_id, users) count_offest = 1 count = self._current_commit_position - start_commit_position - count_offest logging.info("%s recover %s activity records" % (repo_id, count))
def RepoUpdateEventHandler(session, msg): elements = msg['content'].split('\t') if len(elements) != 3: logging.warning("got bad message: %s", elements) return repo_id = elements[1] commit_id = elements[2] commit = commit_mgr.load_commit(repo_id, 1, commit_id) if commit is None: commit = commit_mgr.load_commit(repo_id, 0, commit_id) # TODO: maybe handle merge commit. if commit is not None and commit.parent_id and not commit.second_parent_id: parent = commit_mgr.load_commit(repo_id, commit.version, commit.parent_id) if parent is not None: differ = CommitDiffer(repo_id, commit.version, parent.root_id, commit.root_id, True, True) added_files, deleted_files, added_dirs, deleted_dirs, modified_files,\ renamed_files, moved_files, renamed_dirs, moved_dirs = differ.diff() if renamed_files or renamed_dirs or moved_files or moved_dirs: changer = ChangeFilePathHandler() for r_file in renamed_files: changer.update_db_records(repo_id, r_file.path, r_file.new_path, 0) for r_dir in renamed_dirs: changer.update_db_records(repo_id, r_dir.path, r_dir.new_path, 1) for m_file in moved_files: changer.update_db_records(repo_id, m_file.path, m_file.new_path, 0) for m_dir in moved_dirs: changer.update_db_records(repo_id, m_dir.path, m_dir.new_path, 1) changer.close_session() users = [] org_id = get_org_id_by_repo_id(repo_id) if org_id > 0: users = seafile_api.org_get_shared_users_by_repo(org_id, repo_id) owner = seafile_api.get_org_repo_owner(repo_id) else: users = seafile_api.get_shared_users_by_repo(repo_id) owner = seafile_api.get_repo_owner(repo_id) if owner not in users: users = users + [owner] if not users: return time = datetime.datetime.utcfromtimestamp(msg['ctime']) if added_files or deleted_files or added_dirs or deleted_dirs or \ modified_files or renamed_files or moved_files or renamed_dirs or moved_dirs: if appconfig.fh.enabled: records = generate_filehistory_records(added_files, deleted_files, added_dirs, deleted_dirs, modified_files, renamed_files, moved_files, renamed_dirs, moved_dirs, commit, repo_id, parent, time) save_file_histories(session, records) records = generate_activity_records(added_files, deleted_files, added_dirs, deleted_dirs, modified_files, renamed_files, moved_files, renamed_dirs, moved_dirs, commit, repo_id, parent, users, time) save_user_activities(session, records) else: save_repo_rename_activity(session, commit, repo_id, parent, org_id, users, time) # TODO check: catalog entry update # KEEPER logging.info("REPO UPDATED EVENT repo_id: %s" % repo_id) logging.info("Trying to create/update keeper catalog entry for repo_id: %s..." % repo_id) if bool(generate_catalog_entry_by_repo_id(repo_id)): logging.info("Success!") else: logging.error("Something went wrong...") if appconfig.enable_collab_server: send_message_to_collab_server(repo_id)