def test_deleting_dir(test_app, client: FlaskClient): create_dir("testing") assert "testing" in get_dirs() resp = client.post( "/folders/delete", data={"dir_name": "testing"}, follow_redirects=True ) assert not "testing" in get_dirs() assert b"Folder successfully deleted." in resp.data
def create_folder(): form = forms.NewFolderForm() if form.validate_on_submit(): path = form.parent_dir.data + form.new_dir.data print(path) data.create_dir(path) flash("Folder successfully created.", "success") return redirect(f"/?path={path}") flash("Could not create folder.", "error") return redirect(request.referrer or "/")
def test_unformat_multiple_md_file(test_app, cli_runner, click_cli, bookmark_fixture, note_fixture): out_dir = mkdtemp() create_dir("") res = cli_runner.invoke(cli, [ "unformat", str(bookmark_fixture.fullpath), str(note_fixture.fullpath), out_dir ]) assert f"Unformatted and moved {bookmark_fixture.fullpath} to {out_dir}/{bookmark_fixture.title}" in res.output assert f"Unformatted and moved {note_fixture.fullpath} to {out_dir}/{note_fixture.title}" in res.output
def sync(force): with app.app_context(): db = get_db() # update pocket dictionary pocket = db.search(Query().type == "pocket_key")[0] pocket_data = { "consumer_key": pocket["consumer_key"], "access_token": pocket["access_token"], "sort": "newest", } # get date of latest call to pocket api since = datetime(1970, 1, 1) create_dir("pocket") already_saved = set() for post in get_items(path="pocket/", structured=False): date = datetime.strptime(post["date"].replace("-", "/"), "%x") already_saved.add(post["url"]) since = max(date, since) if since != datetime(1970, 1, 1) and not force: since = datetime.timestamp(since) pocket_data["since"] = since bookmarks = requests.post("https://getpocket.com/v3/get", json=pocket_data).json() # api spec: https://getpocket.com/developer/docs/v3/retrieve # for some reason, if the `list` attribute is empty it returns a list instead of a dict. if not len(bookmarks["list"]): click.echo("No new bookmarks.") else: for pocket_bookmark in bookmarks["list"].values(): url = pocket_bookmark.get("resolved_url", pocket_bookmark["given_url"]) if int(pocket_bookmark["status"] ) != 2 and url not in already_saved: bookmark = DataObj( url=url, date=datetime.now(), type="pocket_bookmark", path="pocket", ) try: bookmark.process_bookmark_url() click.echo(f"Saving {bookmark.title}...") bookmark.insert() except: click.echo( f"Could not save {bookmark.url} - website may already be down." ) click.echo("Done!")
def test_unformat_directory(test_app, cli_runner, click_cli, bookmark_fixture, note_fixture): out_dir = mkdtemp() # create directory to store archivy note note_dir = "note-dir" create_dir(note_dir) nested_note = DataObj(type="note", title="Nested note", path=note_dir) nested_note.insert() # unformat directory res = cli_runner.invoke( cli, ["unformat", os.path.join(get_data_dir(), note_dir), out_dir]) assert f"Unformatted and moved {nested_note.fullpath} to {out_dir}/{note_dir}/{nested_note.title}" in res.output
def test_creating_bookmark_without_passing_path_saves_to_default_dir( test_app, client, mocked_responses): mocked_responses.add(responses.GET, "http://example.org", body="Example\n") bookmarks_dir = "bookmarks" test_app.config["DEFAULT_BOOKMARKS_DIR"] = bookmarks_dir create_dir(bookmarks_dir) resp = client.post( "/api/bookmarks", json={ "url": "http://example.org", }, ) bookmark = get_items(structured=False)[0] assert ("bookmarks" in bookmark["path"]) # verify it was saved to default bookmark dir
def test_move_data(test_app, note_fixture, client): create_dir("random") resp = client.post( "/dataobj/move/1", data={ "path": "random", "submit": "true" }, follow_redirects=True, ) assert resp.status_code == 200 assert "Data successfully moved to random." assert get_item(1)["dir"] == "random"
def create_folder(): directory = request.json.get("paths") try: sanitized_name = data.create_dir(directory) except FileExistsError: return "Directory already exists", 401 return sanitized_name, 200
def create_folder(): """ Creates new directory Parameter in JSON body: - **path** (required) - path of newdir """ directory = request.json.get("path") try: sanitized_name = data.create_dir(directory) except FileExistsError: return Response("Directory already exists", status=401) return Response(sanitized_name, status=200)
def test_get_dataobjs(test_app, client: FlaskClient, bookmark_fixture): note_dict = { "type": "note", "title": "Nested Test Note", "tags": ["testing", "archivy"], "path": "t", } create_dir("t") note = DataObj(**note_dict) note.insert() response: Flask.response_class = client.get("/api/dataobjs") print(response.data) assert response.status_code == 200 assert isinstance(response.json, list) # check it correctly gets nested note assert len(response.json) == 2 bookmark = response.json[0] assert bookmark["metadata"]["title"] == "Example" assert bookmark["metadata"]["id"] == 1 assert bookmark["content"].startswith("Lorem ipsum")
def create_folder(): directory = request.json.get("name") data.create_dir(directory) return "Successfully Created", 200
def hn_sync(save_comments, post_type, username, hn_password, force): global num_ask_hn, num_links, num_links_processed with app.app_context(): session = requests.Session() print("\nLogging in...") r = session.post(f"{BASE_URL}/login", data={ "acct": username, "pw": hn_password }) if session.cookies.get("user", None) is None: print("Error logging in. Verify the credentials and try again.") sys.exit(1) print("Logged in successfully.\n") url = f"{BASE_URL}/{post_type}?id={username}&p=" headers = { "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:75.0) Gecko/20100101 Firefox/75.0", } i = 1 # create folders in archivy to store content create_dir("hacker_news") create_dir("hacker_news/" + post_type) # store titles of previous posts seen_posts = set([ post["url"] for post in get_items(path=f"hacker_news/{post_type}/", structured=False) ]) while True: links_processed_prev = num_links_processed print(f"Getting results of page {i}") r = session.get(url + str(i), headers=headers) tree = BeautifulSoup(r.text, features="lxml") tree_subtext = tree.select(".subtext") post_links = tree.select(".titlelink") # Number of links on the page n = len(tree_subtext) if not n: print(f"Processing page {i}. No links found.") break for j in range(n): tree_subtext_each = tree_subtext[j].find_all("a") # This is to take care of situations where flag link may not be # present in the subtext. So number of links could be either 3 # or 4. num_subtext = len(tree_subtext_each) # get post id by parsing link to comments post_id = int( tree_subtext_each[num_subtext - 1]["href"].split("=")[1].split("&")[0]) post_url = post_links[j]["href"] hn_link = f"{BASE_URL}/item?id={post_id}" if (post_url in seen_posts or hn_link in seen_posts) and not force: # we have already seen this upvoted story # this means that all stories that follow will also be seen finish() if (post_url in seen_posts or hn_link in seen_posts) and force: print(f"{post_url} already saved.") continue # call algolia api try: res = requests.get( f"https://hn.algolia.com/api/v1/items/{post_id}").json( ) except: print(f"Could not save {post_url}.") continue # might return a 404 if not indexed, so we check if we got a response by calling .get() if res.get("type") and res["type"] == "story": bookmark = DataObj( path=f"hacker_news/{post_type}/", date=datetime.utcfromtimestamp(res["created_at_i"]), type="bookmark", ) if res["url"]: num_links += 1 bookmark.url = post_url bookmark.process_bookmark_url() else: num_ask_hn += 1 bookmark.url = hn_link bookmark.content = (res["title"].replace( "<p>", "").replace("</p>", "")) bookmark.title = res["title"] bookmark.content = f"{res['points']} points on [Hacker News]({hn_link})\n\n{bookmark.content}" # save comments if user requests it through option or if story is an ASK HN if save_comments or not res["url"]: bookmark.content += "\n\n## Comments from Hacker News" for comment in res["children"]: comments = "<ul>" + build_comments( comment) + "</ul>" bookmark.content += "\n\n" + html2text( comments, bodywidth=0).replace("\n\n", "\n") bookmark.insert() num_links_processed += 1 print(f"Saving {res['title']}...") if n < 30: # no more links break elif links_processed_prev == num_links_processed: sleep( 1 ) # throttling if no new links have been saved (when we're running force.) i += 1 finish()