def handle_batch_import(batch_import: BatchImport): user = batch_import.user batch_import.processed = True db.session.commit() LOG.debug("Start batch import for %s %s", batch_import, user) file_url = s3.get_url(batch_import.file.path) LOG.d("Download file %s from %s", batch_import.file, file_url) r = requests.get(file_url) lines = [l.decode() for l in r.iter_lines()] reader = csv.DictReader(lines) for row in reader: try: full_alias = row["alias"].lower().strip().replace(" ", "") note = row["note"] except KeyError: LOG.warning("Cannot parse row %s", row) continue alias_domain = get_email_domain_part(full_alias) custom_domain = CustomDomain.get_by(domain=alias_domain) if ( not custom_domain or not custom_domain.verified or custom_domain.user_id != user.id ): LOG.debug("domain %s can't be used %s", alias_domain, user) continue if ( Alias.get_by(email=full_alias) or DeletedAlias.get_by(email=full_alias) or DomainDeletedAlias.get_by(email=full_alias) ): LOG.d("alias already used %s", full_alias) continue alias = Alias.create( user_id=user.id, email=full_alias, note=note, mailbox_id=user.default_mailbox_id, custom_domain_id=custom_domain.id, batch_import_id=batch_import.id, ) db.session.commit() LOG.d("Create %s", alias)
def handle_batch_import(batch_import: BatchImport): user = batch_import.user batch_import.processed = True db.session.commit() LOG.debug("Start batch import for %s %s", batch_import, user) file_url = s3.get_url(batch_import.file.path) LOG.d("Download file %s from %s", batch_import.file, file_url) r = requests.get(file_url) lines = [line.decode() for line in r.iter_lines()] import_from_csv(user, lines)