def _total_pages_b(document): try: path = document.get_root_path() + '/700x' elems = fss.listdir(path) except OSError: path = document.get_root_path() + '/normal' elems = fss.listdir(path) except: elems = [] return elems
def test_upload_doc_local(self): #Create document_exists = exists_document(self.title) self.assertTrue(document_exists) self.assertEquals(self.document.public, self.public) self.assertEquals(self.document.title, self.title) self.assertEquals(self.document.notes, self.notes) document_list_url = \ self.live_server_url + reverse('documents.views.list_documents') self.assertEquals(self.browser.url, document_list_url) document_xpath = '/html/body/div/div[2]/table/tbody/tr[1]' document_tr = self.browser.find_by_xpath(document_xpath) document_id = document_tr['data-id'] self.assertEquals(int(document_id), self.document.id) document_title_xpath = '//*[@id="documents_cell"]/span[1]' document_title = self.browser.find_by_xpath(document_title_xpath) self.assertEquals(document_title.value, self.title) profile_xpath = '/html/body/div/div[1]/div/ul[2]/li[4]/a' profile_link = self.browser.find_by_xpath(profile_xpath) owner_xpath = '/html/body/div/div[2]/table/tbody/tr[1]/td[4]/a' owner_link = self.browser.find_by_xpath(owner_xpath) self.assertEquals(profile_link.value, owner_link.value) status_xpath = '/html/body/div/div[2]/table/tbody/tr/td[5]/div' status_div = self.browser.find_by_xpath(status_xpath) self.assertEquals(status_div.value, self.document.status) numpages_xpath = '/html/body/div/div[2]/table/tbody/tr[1]/td[6]/div' numpages_div = self.browser.find_by_xpath(numpages_xpath) self.assertEquals(int(numpages_div.value), self.document.page_count) privacy_icon_xpath = '//*[@id="privacy"]/i' privacy_icon = self.browser.find_by_xpath(privacy_icon_xpath) self.assertTrue(privacy_icon.has_class('icon-eye-open')) structure = create_structure(self.document) root_path = self.document.get_root_path() dirs = fss.listdir(root_path)[0] files = fss.listdir(root_path)[1] for d in dirs: dir_path = os.path.join(root_path, d) for f in structure['dirs'][d]: self.assertIn(f, fss.listdir(dir_path)[1]) for f in structure['files']: self.assertIn(f, fss.listdir(root_path)[1]) # import time; time.sleep(3) self.browser.quit()
def generate_visible(self): visible_pages = [p.page for p in self.pages_set.filter(visible=True)] abs_path = "%s/%s" % (self.get_root_path(), self.slug) # txt all_txt = fs.open("%s-visible.txt" % abs_path, "w") pages = {} for f in fss.listdir(self.get_root_path())[1]: # [0]: dirs, [1]: files if f[-4:] == ".txt" and f != "%s.txt" % self.slug: m = RE_PAGE.match(f) if m: k = int(m.group(1)) if k in visible_pages: pages[k] = f for k in pages: f = pages[k] tmp_txt = "%s/%s" % (self.get_root_path(), f) fss.write_file(tmp_txt, all_txt) fss.close_file(all_txt) # pdf pages_str = " ".join(map(str, visible_pages)) command = "pdftk {}.pdf cat {} output {}-visible.pdf".format(abs_path, pages_str, abs_path) Popen(command, shell=True, stdout=PIPE).stdout.read() abs_url = "%s/%s" % (self.get_root_url(), self.slug) return ("%s-visible.txt" % abs_url, "%s-visible.pdf" % abs_url)
def _total_pages_a(document): version_re = re.compile(r'^.*-([0-9]{20})\.txt$') path = document.get_root_path() elems = [ path + '/' + e for e in fss.listdir(path) if e[-4:] == '.txt' and e != "%s.txt" % document.slug and not version_re.match(e) ] return elems
def _total_pages_a(document): version_re = re.compile(r'^.*-([0-9]{20})\.txt$') path = document.get_root_path() elems = [ path+'/'+e for e in fss.listdir(path) if e[-4:] == '.txt' and e != "%s.txt" % document.slug and not version_re.match(e) ] return elems
def regenerate(self): # reconcatenate all text files all_txt = "%s/%s.txt" % (self.get_root_path(), self.slug) pages = {} for f in fss.listdir(self.get_root_path())[1]: # [0]: dirs, [1]: files if f[-4:] == ".txt" and f != "%s.txt" % self.slug: m = RE_PAGE.match(f) if m: k = int(m.group(1)) pages[k] = f for k in pages: f = pages[k] tmp_txt = "%s/%s" % (self.get_root_path(), f) fss.write_file(tmp_txt, all_txt) fss.close_file(all_txt)
def generate(self): # concatenate all text files ts = datetime.now() all_txt = "%s/%s.txt" % (self.get_root_path(), self.slug) self.page_count = 0 self.pages_set.all().delete() pages = {} for f in fss.listdir(self.get_root_path())[1]: # [0]: dirs, [1]: files if f[-4:] == ".txt" and f != "%s.txt" % self.slug: m = RE_PAGE.match(f) if m: k = int(m.group(1)) pages[k] = f mod_pags_orig = {} mod_pags_curr = {} for k in pages: f = pages[k] self.page_count += 1 tmp_txt = "%s/%s" % (self.get_root_path(), f) fss.write_file(tmp_txt, all_txt) page = Page(document=self, page=RE_PAGE.match(f).group(1), modified=ts) page.save() filepath_orig = "%s/%s_%s-%s.txt" % (self.document.get_root_path(), self.document.slug, k, zeros) fss.copy_file(tmp_txt, filepath_orig) text_url = self.document.text_page_url mod_pags_orig[self.page_count] = text_url.replace("%(page)s", "{}-{}".format(self.page_count, zeros)) mod_pags_curr[self.page_count] = text_url.replace("%(page)s", "{}".format(self.page_count)) Edition.objects.create( document=self.document, author=self.document.document.owner, comment="Original version", modified_pages=mod_pags_orig, date_string=zeros, ) Edition.objects.create( document=self.document, author=self.document.document.owner, comment="Current version", modified_pages=mod_pags_curr, date_string=nines, ) fss.close_file(all_txt)