async def bulk_transform_images(directory: Optional[str] = Query( default=None, title="directory", description= "Subdirectory of remote CDN to transverse and transform images.", max_length=50, )) -> JSONResponse: """ Apply transformations to images uploaded within the current month. Optionally accepts a `directory` parameter to override image directory. :param Optional[str] directory: Remote directory to recursively fetch images and apply transformations. :returns: JSONResponse """ if directory is None: directory = settings.GCP_BUCKET_FOLDER transformed_images = { "purged": images.purge_unwanted_images(directory), "retina": images.retina_transformations(directory), "mobile": images.mobile_transformations(directory), # "standard": gcs.standard_transformations(directory), } response = [] for k, v in transformed_images.items(): if v is not None: response.append(f"{len(v)} {k}") else: response.append(f"0 {k}") LOGGER.success(f"Transformed {', '.join(response)} images") return JSONResponse(transformed_images)
async def new_account(new_account_event: NetlifyUserEvent, db: Session = Depends(get_db)): """ Create user account from Netlify identity signup. :param NetlifyUserEvent new_account_event: Newly created user account from Netlify. :param Session db: ORM Database session. :returns: NetlifyAccountCreationResponse """ account = new_account_event.user db_account = get_account(db, account.email) if db_account: LOGGER.warning(f"User account already exists for `{account.email}`.") raise HTTPException( status_code=400, detail=f"User account already exists for `{account.email}`.", ) create_account(db, account) db_account_created = get_account(db, account.email) if db_account_created: LOGGER.success( f"Account created: id={account.id} email={account.email}, name={account.user_metadata.full_name}" ) return NetlifyAccountCreationResponse( succeeded=new_account_event, failed=None, ) LOGGER.warning( f"Account not created: id={account.id} email={account.email}, name={account.user_metadata.full_name}" ) return NetlifyAccountCreationResponse( succeeded=None, failed=new_account_event, )
def create_donation(db: Session, donation: NewDonation) -> Donation: """ Create new BuyMeACoffee donation record. :param Session db: ORM database session. :param NewDonation donation: Donation schema object. :returns: Donation """ try: db_item = Donation( coffee_id=donation.coffee_id, email=donation.email, name=donation.name, count=donation.count, message=donation.message, link=donation.link, created_at=datetime.now(), ) db.add(db_item) db.commit() LOGGER.success( f"Successfully received donation: `{donation.count}` coffees from `{donation.name}`." ) return db_item except SQLAlchemyError as e: LOGGER.error(f"SQLAlchemyError while creating donation record: {e}") except IntegrityError as e: LOGGER.error(f"IntegrityError while creating donation record: {e}") except Exception as e: LOGGER.error(f"Unexpected error while creating donation record: {e}")
def init_script(): """Batch download all Snapchat memories.""" for media_type in SNAPCHAT_MEDIA_URLS.keys(): if SNAPCHAT_MEDIA_URLS[media_type] is None: parse_and_decode_urls(media_type) download_snapchat_memories(SNAPCHAT_MEDIA_URLS[media_type], media_type) LOGGER.success(f"Completed downloading all Snapchat memories.")
def update_post(self, post_id: str, body: dict, slug: str) -> Optional[dict]: """ Update post by ID. :param str post_id: Ghost post ID :param dict body: Payload containing post updates. :param str slug: Human-readable unique identifier. :returns: Optional[dict] """ try: resp = requests.put( f"{self.admin_api_url}/posts/{post_id}/", json=body, headers={ "Authorization": self.session_token, "Content-Type": "application/json", }, ) if resp.status_code != 200: LOGGER.success(f"Successfully updated post `{slug}`") return resp.json() except HTTPError as e: LOGGER.error(f"HTTPError while updating Ghost post: {e}") except Exception as e: LOGGER.error(f"Unexpected error while updating Ghost post: {e}")
def remove_comment_upvote(db: Session, user_id: str, comment_id: int): """ Delete a record of a user's upvote for a given comment. :param Session db: ORM database session. :param str user_id: Primary key for account record. :param int comment_id: Unique ID of comment user attempted to upvote. :returns: CommentUpvote """ try: upvote = CommentUpvote(user_id=user_id, comment_id=comment_id) db.delete(upvote) db.commit() LOGGER.success( f"Removed upvote for comment `{comment_id}` from user `{user_id}`." ) except SQLAlchemyError as e: LOGGER.error( f"SQLAlchemyError while attempting to remove comment upvote: {e}") except IntegrityError as e: LOGGER.error( f"IntegrityError while attempting to remove comment upvote: {e}") except Exception as e: LOGGER.error( f"Unexpected error while attempting to remove comment upvote: {e}")
async def save_user_search_queries() -> JSONResponse: """ Save top search analytics for the current week. :returns: JSONResponse """ weekly_searches = persist_algolia_searches(settings.ALGOLIA_TABLE_WEEKLY, 7) monthly_searches = persist_algolia_searches(settings.ALGOLIA_TABLE_MONTHLY, 90) if weekly_searches is None or monthly_searches is None: HTTPException(500, "Unexpected error when saving search query data.") LOGGER.success( f"Inserted {len(weekly_searches)} rows into `{settings.ALGOLIA_TABLE_WEEKLY}`, \ {len(monthly_searches)} into `{settings.ALGOLIA_TABLE_MONTHLY}`") return JSONResponse({ "7-Day": { "count": len(weekly_searches), "rows": weekly_searches, }, "90-Day": { "count": len(monthly_searches), "rows": monthly_searches, }, })
def new_ghost_subscription( user: NetlifyAccount) -> Optional[Dict[str, List[Dict]]]: """ Create Ghost member from Netlify identity signup. :param NetlifyAccount user: New user account from Netlify auth. :returns: Optional[str, Dict[List[Dict]]] """ body = { "accounts": [{ "name": user.user_metadata.full_name, "email": user.email, "note": "Subscribed from Netlify", "subscribed": True, "comped": False, "labels": user.user_metadata.roles, }] } response, code = ghost.create_member(body) if code != 200: error_type = response["errors"][0]["type"] if error_type == "ValidationError": LOGGER.info( f"Skipped Ghost member creation for existing user: {user.user_metadata.full_name} <{user.email}>" ) else: LOGGER.success( f"Created new Ghost member: {user.user_metadata.full_name} <{user.email}>" ) return body
def _transform_mobile_image( self, original_image_blob: Blob, new_image_blob: Blob ) -> Optional[Blob]: """ Create smaller image size to be served on mobile devices. :param Blob original_image_blob: Original image blob. :param Blob new_image_blob: New newly created Blob for mobile image. :returns: Optional[Blob] """ img_meta = self._get_image_meta(original_image_blob) img_bytes = original_image_blob.download_as_bytes() if img_bytes: stream = BytesIO(img_bytes) im = Image.open(stream) try: with BytesIO() as output: new_image = im.reduce(2) new_image.save(output, format=img_meta["format"]) new_image_blob.upload_from_string( output.getvalue(), content_type=img_meta["content-type"] ) LOGGER.success(f"Created mobile image `{new_image_blob.name}`") return new_image_blob except GoogleCloudError as e: LOGGER.error( f"GoogleCloudError while saving mobile image `{new_image_blob.name}`: {e}" ) except Exception as e: LOGGER.error( f"Unexpected exception while saving mobile image `{new_image_blob.name}`: {e}" )
async def update_tags_metadata() -> JSONResponse: """ Enrich tag metadata upon update. :returns: JSONResponse """ tag_update_queries = collect_sql_queries("tags") update_results = rdbms.execute_queries(tag_update_queries, "hackers_dev") LOGGER.success(f"Updated tags metadata: {update_results}") return JSONResponse(update_results, status_code=200)
async def get_all_posts() -> JSONResponse: """ List all published Ghost posts. :returns: JSONResponse """ posts = ghost.get_all_posts() LOGGER.success(f"Fetched all {len(posts)} Ghost posts: {posts}") return JSONResponse( posts, status_code=200, )
async def authors_bulk_update_metadata() -> JSONResponse: """ Bulk update author images to use CDN URLs. :returns: JSONResponse """ update_author_queries = collect_sql_queries("users") update_author_results = rdbms.execute_queries(update_author_queries, "hackers_dev") LOGGER.success( f"Updated author metadata for {len(update_author_results)} authors.") return JSONResponse( content={"authors": update_author_results}, status_code=200, )
def decode_urls(encoded_urls: List[Dict[str, str]], media_type: str) -> List[Dict[str, str]]: """ Decode video and photo URLs. :param encoded_urls: Resource URLs to fetch. :type encoded_urls: List[Dict[str, str]] :param media_type: Resource URLs to fetch. :type media_type: str :returns: List[Dict[str, str]] """ loop = asyncio.get_event_loop() urls = loop.run_until_complete(run(encoded_urls, media_type)) LOGGER.success(f"Decoded {len(urls)} {media_type} URLs.") return urls
async def update_post(post_update: PostUpdate) -> JSONResponse: """ Enrich post metadata upon update. :param PostUpdate post_update: Request to update Ghost post. :returns: JSONResponse """ previous_update = post_update.post.previous if previous_update: current_time = get_current_datetime() previous_update_date = datetime.strptime( str(previous_update.updated_at), "%Y-%m-%dT%H:%M:%S.000Z" ) if previous_update_date and current_time - previous_update_date < timedelta( seconds=5 ): LOGGER.warning("Post update ignored as post was just updated.") raise HTTPException( status_code=422, detail="Post update ignored as post was just updated." ) post = post_update.post.current slug = post.slug feature_image = post.feature_image html = post.html body = { "posts": [ { "meta_title": post.title, "og_title": post.title, "twitter_title": post.title, "meta_description": post.custom_excerpt, "twitter_description": post.custom_excerpt, "og_description": post.custom_excerpt, "updated_at": get_current_time(), } ] } if html and "http://" in html: body = update_html_ssl_links(html, body, slug) if feature_image is not None: body = update_metadata_images(feature_image, body, slug) sleep(1) time = get_current_time() body["posts"][0]["updated_at"] = time response, code = ghost.update_post(post.id, body, post.slug) LOGGER.success(f"Successfully updated post `{slug}`: {body}") return JSONResponse({str(code): response})
def download_snapchat_memories(decoded_memories: List[Dict[str, str]], media_type: str): """ Fetch media files and save to local drive. :param decoded_memories: Resource URLs to fetch. :type decoded_memories: List[Dict[str, str]] :param media_type: Type of media to fetch (photo or video). :type media_type: str """ saved_media_destination = f"{MEDIA_EXPORT_FILEPATH}/{media_type}" if path.exists(saved_media_destination) is False: mkdir(saved_media_destination) loop = asyncio.get_event_loop() loop.run_until_complete(run(decoded_memories, media_type)) LOGGER.success( f"Completed downloading {len(decoded_memories)} {media_type}.")
def parse_urls_from_export(media_type: str) -> List[Dict[str, str]]: """ Parse `Snapchat memories` URLs into convenient dictionary for a given data type. :param media_type: Resource URLs to fetch (either photos or videos). :type media_type: str :return: List[Dict[str, str]] """ media_type_key = media_type.upper().replace("S", "") encoded_urls = [ create_url_pair(m) for m in SNAPCHAT_MEMORIES_JSON if m["Media Type"] == media_type_key ] LOGGER.success( f"Found {len(encoded_urls)} {media_type} from Snapchat export export." ) return encoded_urls
async def migrate_site_analytics(): """Fetch top searches for weekly & monthly timeframes.""" weekly_traffic = fetch_top_visited_urls("7d", limit=100) monthly_traffic = fetch_top_visited_urls("month", limit=500) LOGGER.success( f"Inserted {len(weekly_traffic)} rows into `weekly_stats`, {len(monthly_traffic)} into `monthly_stats`." ) return { "updated": { "weekly_stats": { "count": len(weekly_traffic), "rows": weekly_traffic, }, "monthly_stats": { "count": len(monthly_traffic), "rows": monthly_traffic, }, } }
async def bulk_organize_images( directory: Optional[str] = None) -> JSONResponse: """ Sort retina and mobile images into their appropriate directories. :param Optional[str] directory: Remote directory to organize images into subdirectories. :returns: JSONResponse """ if directory is None: directory = settings.GCP_BUCKET_FOLDER retina_images = images.organize_retina_images(directory) image_headers = images.image_headers(directory) LOGGER.success( f"Moved {len(retina_images)} retina images, modified {len(image_headers)} content types." ) return JSONResponse({ "retina": retina_images, "headers": image_headers, })
def create_member(self, body: dict) -> Tuple[str, int]: """ Create new Ghost member account used to receive newsletters. :param dict body: Payload containing member information. :returns: Optional[List[str]] """ try: resp = requests.post( f"{self.admin_api_url}/members/", json=body, headers={"Authorization": self.session_token}, ) response = f'Successfully created new Ghost member `{body.get("email")}: {resp.json()}.' LOGGER.success(response) return response, resp.status_code except HTTPError as e: LOGGER.error( f"Failed to create Ghost member: {e.response.content}") return e.response.content, e.response.status_code
async def batch_update_metadata() -> JSONResponse: """ Run SQL queries to sanitize metadata for all posts. :returns: JSONResponse """ update_queries = collect_sql_queries("posts/updates") update_results = rdbms.execute_queries(update_queries, "hackers_dev") insert_posts = rdbms.execute_query_from_file( f"{BASE_DIR}/database/queries/posts/selects/missing_all_metadata.sql", "hackers_dev", ) insert_results = update_metadata(insert_posts) LOGGER.success( f"Inserted metadata for {len(insert_results)} posts, updated {len(update_results.keys())}." ) return JSONResponse( { "inserted": {"count": len(insert_results), "posts": insert_results}, "updated": {"count": len(update_results.keys()), "posts": update_results}, } )
def create_comment(db: Session, comment: NewComment, user_role: Optional[str]) -> Comment: """ Create new user-submitted comment. :param Session db: ORM database session. :param NewComment comment: User comment object. :param Optional[str] user_role: Permissions of the comment author, if any. :returns: Comment """ try: LOGGER.info( f"Creating comment from {comment.user_email} on {comment.post_slug}..." ) new_comment = Comment( user_id=comment.user_id, user_name=comment.user_name, user_avatar=comment.user_avatar, user_email=comment.user_email, user_role=user_role, body=comment.body, post_slug=comment.post_slug, post_id=comment.post_id, created_at=datetime.now(), ) db.add(new_comment) db.commit() LOGGER.success( f"New comment created by user `{new_comment.user_name}` on post `{new_comment.post_slug}`" ) return new_comment except SQLAlchemyError as e: LOGGER.error(f"SQLAlchemyError while creating comment: {e}") except IntegrityError as e: LOGGER.error(f"IntegrityError while creating comment: {e}") except Exception as e: LOGGER.error(f"Unexpected error while creating comment: {e}")
def create_retina_image(self, image_blob: Blob) -> Optional[Blob]: """ Create a single retina image variant of a standard-res image. :param Blob image_blob: Image blob object. :returns: Optional[Blob] """ image_folder, image_name = self._get_folder_and_filename(image_blob) retina_blob_filepath = f"{image_folder}/_retina/{image_name.replace('.jpg', '@2x.jpg').replace('.png', '@2x.png')}" retina_image_blob = self.bucket.blob(retina_blob_filepath) if retina_image_blob.exists() is False: self.bucket.copy_blob( image_blob, self.bucket, new_name=retina_blob_filepath ) new_retina_image_blob = self.bucket.blob(retina_blob_filepath) LOGGER.success(f"Created retina image `{retina_blob_filepath}`") return new_retina_image_blob else: LOGGER.info( f"Skipping retina image `{retina_blob_filepath}`; already exists." ) return None
def create_account(db: Session, account: NetlifyAccount) -> NetlifyAccount: """ Create new account record sourced from Netlify. :param Session db: ORM database session. :param account: User comment schema object. :param NetlifyAccount account: User account registered via Netlify. :returns: NetlifyAccount """ try: new_account = Account( id=account.id, full_name=account.user_metadata.full_name, avatar_url=account.user_metadata.avatar_url, email=account.email, role=account.role, provider=account.app_metadata.provider, created_at=datetime.now(), updated_at=datetime.now(), ) db.add(new_account) db.commit() LOGGER.success( f"New Netlify account created: `{account.user_metadata.full_name}`" ) return account except SQLAlchemyError as e: LOGGER.error( f"SQLAlchemyError while creating Netlify user account: {e}") except IntegrityError as e: LOGGER.error( f"IntegrityError while creating Netlify user account: {e}") except Exception as e: LOGGER.error( f"Unexpected error while creating Netlify user account: {e}")
api = FastAPI( title="Jamstack API", description="API to automate optimizations for JAMStack sites.", version="0.1.0", debug=True, docs_url="/", openapi_url="/api.json", openapi_tags=settings.API_TAGS, ) api.add_middleware( CORSMiddleware, allow_origins=settings.CORS_ORIGINS, allow_credentials=True, allow_methods=["*"], allow_headers=["*"], ) api.include_router(analytics.router) api.include_router(newsletter.router) api.include_router(posts.router) api.include_router(accounts.router) api.include_router(authors.router) api.include_router(donations.router) api.include_router(images.router) api.include_router(tags.router) api.include_router(github.router) LOGGER.success("API successfully started.")