def get_dir_files_last_modified(self, repo_id, parent_dir, dir_id=None): """Calc the last modified time of all the files under the directory <parent_dir> of the repo <repo_id>. Return a dict whose keys are the file names and values are their corresponding last modified timestamps. Arguments: - `self`: - `repo_id`: - `parent_dir`: - `dir_id`: """ if not dir_id: dir_id = seafile_api.get_dir_id_by_path(repo_id, parent_dir) parent_dir_hash = calc_file_path_hash(parent_dir) if not dir_id: return {} try: info = super(DirFilesLastModifiedInfoManager, self).get(repo_id=repo_id, parent_dir_hash=parent_dir_hash) except self.model.DoesNotExist: # no cache yet return self._calc_dir_files_last_modified(repo_id, parent_dir, parent_dir_hash, dir_id) else: # cache exist if info.dir_id != dir_id: # cache is outdated info.delete() return self._calc_dir_files_last_modified( repo_id, parent_dir, parent_dir_hash, dir_id) else: # cache is valid return json.loads(info.last_modified_info)
def get_file_last_modified(self, repo_id, file_path): """ Arguments: - `self`: - `repo_id`: - `file_path`: """ last_modified = 0 file_path_hash = calc_file_path_hash(file_path) file_id = seafile_api.get_file_id_by_path(repo_id, file_path) try: fc = super(FileLastModifiedInfoManager, self).get(repo_id=repo_id, file_path_hash=file_path_hash) except self.model.DoesNotExist: # has no cache yet user, last_modified = self._calc_file_last_modified( repo_id, file_path, file_path_hash, file_id) else: # cache found if fc.file_id != file_id: # but cache is outdated fc.delete() user, last_modified = self._calc_file_last_modified( repo_id, file_path, file_path_hash, file_id) else: # cache is valid user, last_modified = fc.email, fc.last_modified return user, last_modified
def get_file_last_modified(self, repo_id, file_path): """ Arguments: - `self`: - `repo_id`: - `file_path`: """ last_modified = 0 file_path_hash = calc_file_path_hash(file_path) file_id = seafile_api.get_file_id_by_path(repo_id, file_path) try: fc = super(FileLastModifiedInfoManager, self).get( repo_id=repo_id, file_path_hash=file_path_hash) except self.model.DoesNotExist: # has no cache yet user, last_modified = self._calc_file_last_modified( repo_id, file_path, file_path_hash, file_id) else: # cache found if fc.file_id != file_id: # but cache is outdated fc.delete() user, last_modified = self._calc_file_last_modified( repo_id, file_path, file_path_hash, file_id) else: # cache is valid user, last_modified = fc.email, fc.last_modified return user, last_modified
def get_dir_files_last_modified(self, repo_id, parent_dir, dir_id=None): """Calc the last modified time of all the files under the directory <parent_dir> of the repo <repo_id>. Return a dict whose keys are the file names and values are their corresponding last modified timestamps. Arguments: - `self`: - `repo_id`: - `parent_dir`: - `dir_id`: """ if not dir_id: dir_id = seafile_api.get_dir_id_by_path(repo_id, parent_dir) parent_dir_hash = calc_file_path_hash(parent_dir) if not dir_id: return {} try: info = super(DirFilesLastModifiedInfoManager, self).get( repo_id=repo_id, parent_dir_hash=parent_dir_hash) except self.model.DoesNotExist: # no cache yet return self._calc_dir_files_last_modified(repo_id, parent_dir, parent_dir_hash, dir_id) else: # cache exist if info.dir_id != dir_id: # cache is outdated info.delete() return self._calc_dir_files_last_modified(repo_id, parent_dir, parent_dir_hash, dir_id) else: # cache is valid return json.loads(info.last_modified_info)
def group_recommend(request): """ Get or post file/directory discussions to a group. """ content_type = "application/json; charset=utf-8" result = {} if request.method == "POST": form = GroupRecommendForm(request.POST) if form.is_valid(): repo_id = form.cleaned_data["repo_id"] attach_type = form.cleaned_data["attach_type"] path = form.cleaned_data["path"] message = form.cleaned_data["message"] # groups is a group_id list, e.g. [u'1', u'7'] groups = request.POST.getlist("groups") username = request.user.username groups_not_in = [] groups_posted_to = [] for group_id in groups: # Check group id format try: group_id = int(group_id) except ValueError: result["error"] = _(u"Error: wrong group id") return HttpResponse(json.dumps(result), status=400, content_type=content_type) group = get_group(group_id) if not group: result["error"] = _(u"Error: the group does not exist.") return HttpResponse(json.dumps(result), status=400, content_type=content_type) # TODO: Check whether repo is in the group and Im in the group if not is_group_user(group_id, username): groups_not_in.append(group.group_name) continue # save message to group gm = GroupMessage(group_id=group_id, from_email=username, message=message) gm.save() # send signal grpmsg_added.send(sender=GroupMessage, group_id=group_id, from_email=username, message=message) # save attachment ma = MessageAttachment( group_message=gm, repo_id=repo_id, attach_type=attach_type, path=path, src="recommend" ) ma.save() # save discussion fd = FileDiscuss(group_message=gm, repo_id=repo_id, path=path) fd.save() group_url = reverse("group_discuss", args=[group_id]) groups_posted_to.append( u'<a href="%(url)s" target="_blank">%(name)s</a>' % {"url": group_url, "name": group.group_name} ) if len(groups_posted_to) > 0: result["success"] = _(u"Successfully posted to %(groups)s.") % {"groups": ", ".join(groups_posted_to)} if len(groups_not_in) > 0: result["error"] = _(u"Error: you are not in group %s.") % (", ".join(groups_not_in)) else: result["error"] = str(form.errors) return HttpResponse(json.dumps(result), status=400, content_type=content_type) # request.method == 'GET' else: repo_id = request.GET.get("repo_id") path = request.GET.get("path", None) repo = get_repo(repo_id) if not repo: result["error"] = _(u"Error: the library does not exist.") return HttpResponse(json.dumps(result), status=400, content_type=content_type) if path is None: result["error"] = _(u"Error: no path.") return HttpResponse(json.dumps(result), status=400, content_type=content_type) # get discussions & replies path_hash = calc_file_path_hash(path) discussions = FileDiscuss.objects.filter(path_hash=path_hash, repo_id=repo_id) msg_ids = [e.group_message_id for e in discussions] grp_msgs = GroupMessage.objects.filter(id__in=msg_ids).order_by("-timestamp") msg_replies = MessageReply.objects.filter(reply_to__in=grp_msgs) for msg in grp_msgs: msg.replies = [] for reply in msg_replies: if msg.id == reply.reply_to_id: msg.replies.append(reply) msg.reply_cnt = len(msg.replies) msg.replies = msg.replies[-3:] ctx = {} ctx["messages"] = grp_msgs html = render_to_string("group/discussion_list.html", ctx) result["html"] = html return HttpResponse(json.dumps(result), content_type=content_type)
def save(self, *args, **kwargs): if not self.path_hash: self.path_hash = calc_file_path_hash(self.path) super(FileDiscuss, self).save(*args, **kwargs)
def save(self, *args, **kwargs): if not self.path_hash: from seahub.utils import calc_file_path_hash self.path_hash = calc_file_path_hash(self.path) super(FileDiscuss, self).save(*args, **kwargs)
def get_files_last_modified(self, files_list): """Batch calculate file last modification file. Arguments: - `files_list`: A list contains repo id and file path and file id. For example: [ (u'66a7aaaf-0b59-4c22-9f7a-52606e8fbee3', u'/Chrys (1).jpg', u'c5ee20b7ecf5c44bd184cf64c775aad769f50399'), (u'66a7aaaf-0b59-4c22-9f7a-52606e8fbee3', u'/Chrys (2).jpg', u'd5ee20b7ecf5c44bd184cf64c775aad769f50399'), (u'66a7aaaf-0b59-4c22-9f7a-52606e8fbee3', u'/foo.pdf', u'f78b579f757cec44a99d420331a06ad752b30153'), ... ] Returns: A dict mapping keys to the repo id and file path, seperated by "|", and values to the last modification time. For example: { u'66a7aaaf-0b59-4c22-9f7a-52606e8fbee3|/Chrys (1).jpg|c5ee20b7ecf5c44bd184cf64c775aad769f50399': 1374549194, u'66a7aaaf-0b59-4c22-9f7a-52606e8fbee3|/Chrys (2).jpg|d5ee20b7ecf5c44bd184cf64c775aad769f50399': 1374585247, u'66a7aaaf-0b59-4c22-9f7a-52606e8fbee3|/foo.pdf|f78b579f757cec44a99d420331a06ad752b30153': 1362471870, ... } """ filepath_hash_set = set() ret_dict = {} for e in files_list: repo_id, file_path, file_id = e path_hash = calc_file_path_hash(file_path) filepath_hash_set.add(path_hash) m_infos = super( FileLastModifiedInfoManager, self).filter(file_path_hash__in=list(filepath_hash_set)) for f in files_list: repo_id, file_path, file_id = f for info in m_infos: if repo_id == info.repo_id and file_path == info.file_path: # Got the record in db ret_key = '|'.join(f) if file_id != info.file_id: # record is outdated, need re-calculate info.delete() email, last_modified = self._calc_file_last_modified( info.repo_id, info.file_path, info.file_path_hash, file_id) ret_dict[ret_key] = last_modified continue else: # record is valid ret_dict[ret_key] = info.last_modified continue # Process the remaining files. for f in files_list: ret_key = '|'.join(f) if ret_dict.has_key(ret_key): continue repo_id, file_path, file_id = f path_hash = calc_file_path_hash(file_path) email, last_modified = self._calc_file_last_modified( repo_id, file_path, path_hash, file_id) ret_dict[ret_key] = last_modified return ret_dict
def group_recommend(request): """ Get or post file/directory discussions to a group. """ content_type = 'application/json; charset=utf-8' result = {} if request.method == 'POST': form = GroupRecommendForm(request.POST) if form.is_valid(): repo_id = form.cleaned_data['repo_id'] attach_type = form.cleaned_data['attach_type'] path = form.cleaned_data['path'] message = form.cleaned_data['message'] # groups is a group_id list, e.g. [u'1', u'7'] groups = request.POST.getlist('groups') username = request.user.username groups_not_in = [] groups_posted_to = [] for group_id in groups: # Check group id format try: group_id = int(group_id) except ValueError: result['error'] = _(u'Error: wrong group id') return HttpResponse(json.dumps(result), status=400, content_type=content_type) group = get_group(group_id) if not group: result['error'] = _(u'Error: the group does not exist.') return HttpResponse(json.dumps(result), status=400, content_type=content_type) # TODO: Check whether repo is in the group and Im in the group if not is_group_user(group_id, username): groups_not_in.append(group.group_name) continue # save message to group gm = GroupMessage(group_id=group_id, from_email=username, message=message) gm.save() # send signal grpmsg_added.send(sender=GroupMessage, group_id=group_id, from_email=username, message=message) # save attachment ma = MessageAttachment(group_message=gm, repo_id=repo_id, attach_type=attach_type, path=path, src='recommend') ma.save() # save discussion fd = FileDiscuss(group_message=gm, repo_id=repo_id, path=path) fd.save() group_url = reverse('group_discuss', args=[group_id]) groups_posted_to.append(u'<a href="%(url)s" target="_blank">%(name)s</a>' % \ {'url':group_url, 'name':group.group_name}) if len(groups_posted_to) > 0: result['success'] = _(u'Successfully posted to %(groups)s.') % {'groups': ', '.join(groups_posted_to)} if len(groups_not_in) > 0: result['error'] = _(u'Error: you are not in group %s.') % (', '.join(groups_not_in)) else: result['error'] = str(form.errors) return HttpResponse(json.dumps(result), status=400, content_type=content_type) # request.method == 'GET' else: repo_id = request.GET.get('repo_id') path = request.GET.get('path', None) repo = get_repo(repo_id) if not repo: result['error'] = _(u'Error: the library does not exist.') return HttpResponse(json.dumps(result), status=400, content_type=content_type) if path is None: result['error'] = _(u'Error: no path.') return HttpResponse(json.dumps(result), status=400, content_type=content_type) # get discussions & replies path_hash = calc_file_path_hash(path) discussions = FileDiscuss.objects.filter(path_hash=path_hash, repo_id=repo_id) msg_ids = [ e.group_message_id for e in discussions ] grp_msgs = GroupMessage.objects.filter(id__in=msg_ids).order_by('-timestamp') msg_replies = MessageReply.objects.filter(reply_to__in=grp_msgs) for msg in grp_msgs: msg.replies = [] for reply in msg_replies: if msg.id == reply.reply_to_id: msg.replies.append(reply) msg.reply_cnt = len(msg.replies) msg.replies = msg.replies[-3:] ctx = {} ctx['messages'] = grp_msgs html = render_to_string("group/discussion_list.html", ctx) result['html'] = html return HttpResponse(json.dumps(result), content_type=content_type)
def get_files_last_modified(self, files_list): """Batch calculate file last modification file. Arguments: - `files_list`: A list contains repo id and file path and file id. For example: [ (u'66a7aaaf-0b59-4c22-9f7a-52606e8fbee3', u'/Chrys (1).jpg', u'c5ee20b7ecf5c44bd184cf64c775aad769f50399'), (u'66a7aaaf-0b59-4c22-9f7a-52606e8fbee3', u'/Chrys (2).jpg', u'd5ee20b7ecf5c44bd184cf64c775aad769f50399'), (u'66a7aaaf-0b59-4c22-9f7a-52606e8fbee3', u'/foo.pdf', u'f78b579f757cec44a99d420331a06ad752b30153'), ... ] Returns: A dict mapping keys to the repo id and file path, seperated by "|", and values to the last modification time. For example: { u'66a7aaaf-0b59-4c22-9f7a-52606e8fbee3|/Chrys (1).jpg|c5ee20b7ecf5c44bd184cf64c775aad769f50399': 1374549194, u'66a7aaaf-0b59-4c22-9f7a-52606e8fbee3|/Chrys (2).jpg|d5ee20b7ecf5c44bd184cf64c775aad769f50399': 1374585247, u'66a7aaaf-0b59-4c22-9f7a-52606e8fbee3|/foo.pdf|f78b579f757cec44a99d420331a06ad752b30153': 1362471870, ... } """ filepath_hash_set = set() ret_dict = {} for e in files_list: repo_id, file_path, file_id = e path_hash = calc_file_path_hash(file_path) filepath_hash_set.add(path_hash) m_infos = super(FileLastModifiedInfoManager, self).filter( file_path_hash__in=list(filepath_hash_set)) for f in files_list: repo_id, file_path, file_id = f for info in m_infos: if repo_id == info.repo_id and file_path == info.file_path: # Got the record in db ret_key = '|'.join(f) if file_id != info.file_id: # record is outdated, need re-calculate info.delete() email, last_modified = self._calc_file_last_modified( info.repo_id, info.file_path, info.file_path_hash, file_id) ret_dict[ret_key] = last_modified continue else: # record is valid ret_dict[ret_key] = info.last_modified continue # Process the remaining files. for f in files_list: ret_key = '|'.join(f) if ret_dict.has_key(ret_key): continue repo_id, file_path, file_id = f path_hash = calc_file_path_hash(file_path) email, last_modified = self._calc_file_last_modified( repo_id, file_path, path_hash, file_id) ret_dict[ret_key] = last_modified return ret_dict