def _find_tag_attr(self, soup, find_kwargs, attr_name, msg): """Use BS to find a tag's attribute in the html""" tag = soup.find(**find_kwargs) if not tag: raise PortalError("Error finding tag: {args}\n" "{msg}".format(args=find_kwargs, msg=msg)) attr = tag.attrs.get(attr_name) if not attr: raise PortalError("Error finding attr: {attr}\n" "Tag: {tag}\n" "{msg}".format(attr=attr_name, tag=tag, msg=msg)) return attr
def _get_request_id(self, comm): """Get the request id for sending followup messages""" if not comm.foia: raise PortalError('Communication has no FOIA\n' 'Fetching Request ID') if not comm.foia.current_tracking_id(): raise PortalError('FOIA has no tracking ID\n' 'Fetching Request ID') pattern = re.compile('[0-9]+-([0-9]+)') match = pattern.match(comm.foia.current_tracking_id()) if not match: raise PortalError('FOIA tracking ID not in expected format\n' 'Fetching Request ID') return match.groups(1)
def _send_documents(self, comm, session, request_id): """Send a document along with a request or followup""" # pylint: disable=too-many-locals csrf_token = self._get_csrf_token( session, ['requests', comm.foia.current_tracking_id()], ) documents = [] for file_ in comm.files.all(): params = { 'filename': file_.name(), 'request_id': request_id, 'xhr_upload': 'true', } reply = self._get( session, furl(self.portal.url).add(path='presigned_url').url, 'Signing a document for upload', params=params, ) reply_json = reply.json() data = json.loads(reply_json['formData']) files = {'file': (file_.name(), file_.ffile)} reply = self._post( session, reply_json['url'], 'Upload the document', data=data, files=files, expected_status=201, ) location_pattern = re.compile( r'<Location>https?:([^<]+)</Location>') match = location_pattern.search(reply.content) if not match: raise PortalError('While uploading documents\n' 'Could not parse location from XML') location = match.group(1).replace('%2F', '/') documents.append((file_.name(), location)) headers = { 'X-CSRF-Token': csrf_token, 'X-Requested-With': 'XMLHttpRequest', } data = {'request_id': request_id} for i, (name, location) in enumerate(documents): data['documents[{}][description]'.format(i)] = '' data['documents[{}][url]'.format(i)] = location data['documents[{}][title]'.format(i)] = name data['documents[{}][filename]'.format(i)] = name data['documents[{}][doc_date]'.format(i)] = '' self._post( session, furl(self.portal.url).add(path='documents').url, 'Saving the uploaded documents', data=data, headers=headers, )
def _send_documents(self, comm, session, request_id): """Send a document along with a request or followup""" # pylint: disable=too-many-locals csrf_token = self._get_csrf_token( session, ["requests", comm.foia.current_tracking_id()]) documents = [] for file_ in comm.files.all(): params = { "filename": file_.name(), "request_id": request_id, "xhr_upload": "true", } reply = self._get( session, furl(self.portal.url).add(path="presigned_url").url, "Signing a document for upload", params=params, ) reply_json = reply.json() data = json.loads(reply_json["formData"]) files = {"file": (file_.name(), file_.ffile)} reply = self._post( session, reply_json["url"], "Upload the document", data=data, files=files, expected_status=201, ) location_pattern = re.compile( r"<Location>https?:([^<]+)</Location>") match = location_pattern.search(reply.content) if not match: raise PortalError("While uploading documents\n" "Could not parse location from XML") location = match.group(1).replace("%2F", "/") documents.append((file_.name(), location)) headers = { "X-CSRF-Token": csrf_token, "X-Requested-With": "XMLHttpRequest" } data = {"request_id": request_id} for i, (name, location) in enumerate(documents): data["documents[{}][description]".format(i)] = "" data["documents[{}][url]".format(i)] = location data["documents[{}][title]".format(i)] = name data["documents[{}][filename]".format(i)] = name data["documents[{}][doc_date]".format(i)] = "" self._post( session, furl(self.portal.url).add(path="documents").url, "Saving the uploaded documents", data=data, headers=headers, )
def _request(self, type_, session, url, msg, expected_status=200, **kwargs): """Make a request and check the status code""" # pylint: disable=too-many-arguments method = getattr(session, type_) reply = method(url, **kwargs) if reply.status_code != expected_status: raise PortalError( 'Error fetching: {url}\n' 'Status code: {code}\n' '{msg}'.format( url=url, code=reply.status_code, msg=msg, ) ) return reply
def _login(self, comm, session): """Login to the portal""" csrf_token = self._get_csrf_token(session, 'users/sign_in') data = { 'user[email]': comm.foia.get_request_email(), 'user[password]': comm.foia.portal_password, 'user[remember_me]': 0, 'utf8': '✓', 'authenticity_token': csrf_token, 'commit': 'Sign In', } reply = self._post( session, furl(self.portal.url).add(path='users/sign_in'), 'Logging in', data=data, ) soup = BeautifulSoup(reply.content, 'lxml') tag = soup.find('span', class_='notice', text='Signed in successfully.') if tag is None: raise PortalError('Error logging in')
def _login(self, comm, session): """Login to the portal""" csrf_token = self._get_csrf_token(session, "users/sign_in") data = { "user[email]": comm.foia.get_request_email(), "user[password]": comm.foia.portal_password, "user[remember_me]": 0, "utf8": "✓", "authenticity_token": csrf_token, "commit": "Sign In", } reply = self._post( session, furl(self.portal.url).add(path="users/sign_in"), "Logging in", data=data, ) soup = BeautifulSoup(reply.content, "lxml") tag = soup.find("span", class_="notice", text="Signed in successfully.") if tag is None: raise PortalError("Error logging in")