def test_import_no_mailboxes(flask_client): # Create user user = login(flask_client) # Check start state assert len(Alias.filter_by(user_id=user.id).all()) == 1 # Onboarding alias # Create domain CustomDomain.create( user_id=user.id, domain="my-domain.com", ownership_verified=True ) Session.commit() alias_data = [ "alias,note", "[email protected],Used on eBay", '[email protected],"Used on Facebook, Instagram."', ] file = File.create(path="/test", commit=True) batch_import = BatchImport.create(user_id=user.id, file_id=file.id) import_from_csv(batch_import, user, alias_data) assert len(Alias.filter_by(user_id=user.id).all()) == 3 # +2
def batch_import_route(): # only for users who have custom domains if not current_user.verified_custom_domains(): flash("Alias batch import is only available for custom domains", "warning") if current_user.disable_import: flash( "you cannot use the import feature, please contact SimpleLogin team", "error", ) return redirect(url_for("dashboard.index")) batch_imports = BatchImport.filter_by(user_id=current_user.id).all() if request.method == "POST": alias_file = request.files["alias-file"] file_path = random_string(20) + ".csv" file = File.create(user_id=current_user.id, path=file_path) s3.upload_from_bytesio(file_path, alias_file) Session.flush() LOG.d("upload file %s to s3 at %s", file, file_path) bi = BatchImport.create(user_id=current_user.id, file_id=file.id) Session.flush() LOG.d("Add a batch import job %s for %s", bi, current_user) # Schedule batch import job Job.create( name=JOB_BATCH_IMPORT, payload={"batch_import_id": bi.id}, run_at=arrow.now(), ) Session.commit() flash( "The file has been uploaded successfully and the import will start shortly", "success", ) return redirect(url_for("dashboard.batch_import_route")) return render_template("dashboard/batch_import.html", batch_imports=batch_imports)
def test_import(flask_client): # Create user user = login(flask_client) # Check start state assert len(Alias.filter_by(user_id=user.id).all()) == 1 # Onboarding alias # Create domains CustomDomain.create( user_id=user.id, domain="my-domain.com", ownership_verified=True ) CustomDomain.create( user_id=user.id, domain="my-destination-domain.com", ownership_verified=True ) Session.commit() # Create mailboxes mailbox1 = Mailbox.create( user_id=user.id, email="*****@*****.**", verified=True ) mailbox2 = Mailbox.create( user_id=user.id, email="*****@*****.**", verified=True ) Session.commit() alias_data = [ "alias,note,mailboxes", "[email protected],Used on eBay,[email protected]", '[email protected],"Used on Facebook, Instagram.",[email protected] [email protected]', ] file = File.create(path="/test", commit=True) batch_import = BatchImport.create(user_id=user.id, file_id=file.id) import_from_csv(batch_import, user, alias_data) aliases = Alias.filter_by(user_id=user.id).order_by(Alias.id).all() assert len(aliases) == 3 # +2 # aliases[0] is the onboarding alias, skip it # eBay alias assert aliases[1].email == "*****@*****.**" assert len(aliases[1].mailboxes) == 1 # First one should be primary assert aliases[1].mailbox_id == mailbox1.id # Others are sorted assert aliases[1].mailboxes[0] == mailbox1 # Facebook alias assert aliases[2].email == "*****@*****.**" assert len(aliases[2].mailboxes) == 2 # First one should be primary assert aliases[2].mailbox_id == mailbox1.id # Others are sorted assert aliases[2].mailboxes[0] == mailbox2 assert aliases[2].mailboxes[1] == mailbox1
def handle_batch_import(batch_import: BatchImport): user = batch_import.user batch_import.processed = True db.session.commit() LOG.debug("Start batch import for %s %s", batch_import, user) file_url = s3.get_url(batch_import.file.path) LOG.d("Download file %s from %s", batch_import.file, file_url) r = requests.get(file_url) lines = [l.decode() for l in r.iter_lines()] reader = csv.DictReader(lines) for row in reader: try: full_alias = row["alias"].lower().strip().replace(" ", "") note = row["note"] except KeyError: LOG.warning("Cannot parse row %s", row) continue alias_domain = get_email_domain_part(full_alias) custom_domain = CustomDomain.get_by(domain=alias_domain) if ( not custom_domain or not custom_domain.verified or custom_domain.user_id != user.id ): LOG.debug("domain %s can't be used %s", alias_domain, user) continue if ( Alias.get_by(email=full_alias) or DeletedAlias.get_by(email=full_alias) or DomainDeletedAlias.get_by(email=full_alias) ): LOG.d("alias already used %s", full_alias) continue alias = Alias.create( user_id=user.id, email=full_alias, note=note, mailbox_id=user.default_mailbox_id, custom_domain_id=custom_domain.id, batch_import_id=batch_import.id, ) db.session.commit() LOG.d("Create %s", alias)
def handle_batch_import(batch_import: BatchImport): user = batch_import.user batch_import.processed = True db.session.commit() LOG.debug("Start batch import for %s %s", batch_import, user) file_url = s3.get_url(batch_import.file.path) LOG.d("Download file %s from %s", batch_import.file, file_url) r = requests.get(file_url) lines = [line.decode() for line in r.iter_lines()] import_from_csv(user, lines)
def test_import_no_mailboxes_no_domains(flask_client): # Create user user = login(flask_client) # Check start state assert len(Alias.filter_by(user_id=user.id).all()) == 1 # Onboarding alias alias_data = [ "alias,note", "[email protected],Used on eBay", '[email protected],"Used on Facebook, Instagram."', ] file = File.create(path="/test", commit=True) batch_import = BatchImport.create(user_id=user.id, file_id=file.id, commit=True) import_from_csv(batch_import, user, alias_data) # Should have failed to import anything new because my-domain.com isn't registered assert len(Alias.filter_by(user_id=user.id).all()) == 1 # +0
def test_import_no_domains(flask_client): # Create user user = login(flask_client) # Check start state assert len(Alias.filter_by(user_id=user.id).all()) == 1 # Onboarding alias alias_data = [ "alias,note,mailboxes", "[email protected],Used on eBay,[email protected]", '[email protected],"Used on Facebook, Instagram.",[email protected] [email protected]', ] batch_import = BatchImport.create(user_id=user.id, file_id=0) import_from_csv(batch_import, user, alias_data) # Should have failed to import anything new because my-domain.com isn't registered assert len(Alias.filter_by(user_id=user.id).all()) == 1 # +0
if user and user.notification and user.activated: LOG.d("send onboarding mailbox email to user %s", user) onboarding_mailbox(user) elif job.name == JOB_ONBOARDING_4: user_id = job.payload.get("user_id") user = User.get(user_id) # user might delete their account in the meantime # or disable the notification if user and user.notification and user.activated: LOG.d("send onboarding pgp email to user %s", user) onboarding_pgp(user) elif job.name == JOB_BATCH_IMPORT: batch_import_id = job.payload.get("batch_import_id") batch_import = BatchImport.get(batch_import_id) handle_batch_import(batch_import) elif job.name == JOB_DELETE_ACCOUNT: user_id = job.payload.get("user_id") user = User.get(user_id) if not user: LOG.exception("No user found for %s", user_id) continue user_email = user.email LOG.warning("Delete user %s", user) User.delete(user.id) db.session.commit() send_email(