def make_http_request(url, data=None, files=None, certificate=None): # type: (str, Optional[Envelope], Optional[Attachments], str) -> Tuple[int, bytes] if data is None: data = {} if files is None: files = {} schema, netloc, url, params, query, fragments = urlparse(url) logger.debug("Sending POST request to {0}".format(url)) body, boundary = encode_multipart_formdata(data, files) if certificate: ssl_context = ssl.create_default_context(cafile=certificate) else: ssl_context = ssl._create_default_https_context() connection = HTTPSConnection(netloc, context=ssl_context) connection.connect() connection.putrequest("POST", url) connection.putheader("Content-Type", "multipart/form-data; boundary={0}".format(boundary)) connection.putheader("Content-Length", str(len(body))) connection.endheaders() connection.send(body) r = connection.getresponse() return r.status, r.read()
def _do_post(self, query, extra_headers=[]): """ Do a POST to the Institution. :param query: Body content to POST (OFX Query) :type query: str :param extra_headers: Extra headers to send with the request, as a list of (Name, Value) header 2-tuples. :type extra_headers: list :return: 2-tuple of (HTTPResponse, str response body) :rtype: tuple """ i = self.institution logging.debug('posting data to %s' % i.url) garbage, path = splittype(i.url) host, selector = splithost(path) try: h = HTTPSConnection(host, timeout=60) h.connect() except ssl.SSLError as ex: if (ex.reason == "UNSUPPORTED_PROTOCOL"): h = HTTPSConnection(host, timeout=60, context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)) h.connect() else: raise # Discover requires a particular ordering of headers, so send the # request step by step. h.putrequest('POST', selector, skip_host=True, skip_accept_encoding=True) headers = [('Content-Type', 'application/x-ofx'), ('Host', host), ('Content-Length', len(query)), ('Connection', 'Keep-Alive')] if self.accept: headers.append(('Accept', self.accept)) if self.user_agent: headers.append(('User-Agent', self.user_agent)) for ehname, ehval in extra_headers: headers.append((ehname, ehval)) logging.debug('---- request headers ----') for hname, hval in headers: logging.debug('%s: %s', hname, hval) h.putheader(hname, hval) logging.debug('---- request body (query) ----') logging.debug(query) h.endheaders(query.encode()) res = h.getresponse() response = res.read().decode('ascii', 'ignore') logging.debug('---- response ----') logging.debug(res.__dict__) logging.debug('Headers: %s', res.getheaders()) logging.debug(response) res.close() return res, response
def _do_post(self, query, extra_headers=[]): """ Do a POST to the Institution. :param query: Body content to POST (OFX Query) :type query: str :param extra_headers: Extra headers to send with the request, as a list of (Name, Value) header 2-tuples. :type extra_headers: list :return: 2-tuple of (HTTPResponse, str response body) :rtype: tuple """ i = self.institution logging.debug('posting data to %s' % i.url) garbage, path = splittype(i.url) host, selector = splithost(path) try: h = HTTPSConnection(host, timeout=60) h.connect() except ssl.SSLError as ex: if (ex.reason == "UNSUPPORTED_PROTOCOL"): h = HTTPSConnection(host, timeout=60, context=ssl.SSLContext(ssl.PROTOCOL_TLSv1)) h.connect() else: raise # Discover requires a particular ordering of headers, so send the # request step by step. h.putrequest('POST', selector, skip_host=True, skip_accept_encoding=True) headers = [ ('Content-Type', 'application/x-ofx'), ('Host', host), ('Content-Length', len(query)), ('Connection', 'Keep-Alive') ] if self.accept: headers.append(('Accept', self.accept)) if self.user_agent: headers.append(('User-Agent', self.user_agent)) for ehname, ehval in extra_headers: headers.append((ehname, ehval)) logging.debug('---- request headers ----') for hname, hval in headers: logging.debug('%s: %s', hname, hval) h.putheader(hname, hval) logging.debug('---- request body (query) ----') logging.debug(query) h.endheaders(query.encode()) res = h.getresponse() response = res.read().decode('ascii', 'ignore') logging.debug('---- response ----') logging.debug(res.__dict__) logging.debug('Headers: %s', res.getheaders()) logging.debug(response) res.close() return res, response
def fetch_file(self, server, path): "Fetch file using httplib module." print("downloading https://%s%s" % (server, path)) req = HTTPSConnection(server) req.putrequest('GET', path) req.putheader('Host', server) req.putheader('Accept', 'text/svg') req.endheaders() r1 = req.getresponse() data = r1.read().decode('utf-8') req.close() return data
def fetchFile(self, server, path): "Fetch file using httplib module." print("downloading http://%s%s" % (server, path)) req = HTTPSConnection(server) req.putrequest('GET', path) req.putheader('Host', server) req.putheader('Accept', 'text/svg') req.endheaders() r1 = req.getresponse() data = r1.read().decode('utf-8') req.close() return data
class HessianProxy(object): def __init__(self, service_uri, credentials=None, key_file=None, cert_file=None, timeout=10, buffer_size=65535, error_factory=lambda x: x, overload=False): self._headers = list() self._headers.append(('User-Agent', 'mustaine/' + __version__,)) self._headers.append(('Content-Type', 'application/x-hessian',)) if sys.version_info < (2,6): warn('HessianProxy timeout not enforceable before Python 2.6', RuntimeWarning, stacklevel=2) timeout = {} else: timeout = {'timeout': timeout} self._uri = urlparse(service_uri) if self._uri.scheme == 'http': self._client = HTTPConnection(self._uri.hostname, self._uri.port or 80, strict=True, **timeout) elif self._uri.scheme == 'https': self._client = HTTPSConnection(self._uri.hostname, self._uri.port or 443, key_file=key_file, cert_file=cert_file, strict=True, **timeout) else: raise NotImplementedError("HessianProxy only supports http:// and https:// URIs") # autofill credentials if they were passed via url instead of kwargs if (self._uri.username and self._uri.password) and not credentials: credentials = (self._uri.username, self._uri.password) if credentials: auth = 'Basic ' + base64.b64encode(':'.join(credentials)) self._headers.append(('Authorization', auth)) self._buffer_size = buffer_size self._error_factory = error_factory self._overload = overload self._parser = Parser() class __RemoteMethod(object): # dark magic for autoloading methods def __init__(self, caller, method): self.__caller = caller self.__method = method def __call__(self, *args): return self.__caller(self.__method, args) def __getattr__(self, method): return self.__RemoteMethod(self, method) def __repr__(self): return "<mustaine.client.HessianProxy(\"%s\")>" % (self._uri.geturl(),) def __str__(self): return self.__repr__() def __call__(self, method, args): try: self._client.putrequest('POST', self._uri.path) for header in self._headers: self._client.putheader(*header) request = encode_object(Call(method, args, overload=self._overload)) self._client.putheader("Content-Length", str(len(request))) self._client.endheaders() self._client.send(str(request)) response = self._client.getresponse() if response.status != 200: raise ProtocolError(self._uri.geturl(), response.status, response.reason) length = response.getheader('Content-Length', -1) if length == '0': raise ProtocolError(self._uri.geturl(), 'FATAL:', 'Server sent zero-length response') reply = self._parser.parse_stream(BufferedReader(response, buffer_size=self._buffer_size)) self._client.close() if isinstance(reply.value, Fault): raise self._error_factory(reply.value) else: return reply.value except: self._client.close() raise
class HessianProxy(object): def __init__( self, service_uri, credentials=None, key_file=None, cert_file=None, timeout=10, buffer_size=65535, error_factory=lambda x: x, overload=False, ): self._headers = list() self._headers.append(("User-Agent", "mustaine/" + __version__)) self._headers.append(("Content-Type", "application/x-hessian")) if sys.version_info < (2, 6): warn("HessianProxy timeout not enforceable before Python 2.6", RuntimeWarning, stacklevel=2) timeout = {} else: timeout = {"timeout": timeout} self._uri = urlparse(service_uri) if self._uri.scheme == "http": self._client = HTTPConnection(self._uri.hostname, self._uri.port or 80, strict=True, **timeout) elif self._uri.scheme == "https": self._client = HTTPSConnection( self._uri.hostname, self._uri.port or 443, key_file=key_file, cert_file=cert_file, strict=True, **timeout ) else: raise NotImplementedError("HessianProxy only supports http:// and https:// URIs") # autofill credentials if they were passed via url instead of kwargs if (self._uri.username and self._uri.password) and not credentials: credentials = (self._uri.username, self._uri.password) if credentials: auth = "Basic " + base64.b64encode(":".join(credentials)) self._headers.append(("Authorization", auth)) self._buffer_size = buffer_size self._error_factory = error_factory self._overload = overload self._parser = Parser() class __RemoteMethod(object): # dark magic for autoloading methods def __init__(self, caller, method): self.__caller = caller self.__method = method def __call__(self, *args): return self.__caller(self.__method, args) def __getattr__(self, method): return self.__RemoteMethod(self, method) def __repr__(self): return '<mustaine.client.HessianProxy("%s")>' % (self._uri.geturl(),) def __str__(self): return self.__repr__() def __call__(self, method, args): try: self._client.putrequest("POST", self._uri.path) for header in self._headers: self._client.putheader(*header) request = encode_object(Call(method, args, overload=self._overload)) self._client.putheader("Content-Length", str(len(request)).encode("utf8")) self._client.endheaders() self._client.send(request) response = self._client.getresponse() if response.status != 200: raise ProtocolError(self._uri.geturl(), response.status, response.reason) length = response.getheader("Content-Length", -1) if length == "0": raise ProtocolError(self._uri.geturl(), "FATAL:", "Server sent zero-length response") reply = self._parser.parse_stream(BufferedReader(response, buffer_size=self._buffer_size)) self._client.close() if isinstance(reply.value, Fault): raise self._error_factory(reply.value) else: return reply.value except: self._client.close() raise
class ClamAVFileUploadHandler(FileUploadHandler): chunk_size = CHUNK_SIZE skip_av_check = False def new_file(self, *args, **kwargs): super().new_file(*args, **kwargs) extension = pathlib.Path(self.file_name).suffix if extension in CLAM_AV_IGNORE_EXTENSIONS: self.skip_av_check = True return if CLAM_USE_HTTP: self.av_conn = HTTPConnection(host=CLAM_AV_DOMAIN, ) else: self.av_conn = HTTPSConnection( # noqa S309 host=CLAM_AV_DOMAIN, port=443, ) credentials = b64encode( bytes( f"{CLAM_AV_USERNAME}:{CLAM_AV_PASSWORD}", encoding="utf8", )).decode("ascii") try: self.av_conn.connect() self.av_conn.putrequest("POST", CLAM_PATH) self.av_conn.putheader("Content-Type", self.content_type) self.av_conn.putheader("Authorization", f"Basic {credentials}") self.av_conn.putheader("Transfer-encoding", "chunked") self.av_conn.endheaders() except Exception as ex: logger.error("Error connecting to ClamAV service", exc_info=True) raise AntiVirusServiceErrorException(ex) def receive_data_chunk(self, raw_data, start): if not self.skip_av_check: self.av_conn.send(hex(len(raw_data))[2:].encode("utf-8")) self.av_conn.send(b"\r\n") self.av_conn.send(raw_data) self.av_conn.send(b"\r\n") return raw_data def file_complete(self, file_size): if self.skip_av_check: return None self.av_conn.send(b"0\r\n\r\n") resp = self.av_conn.getresponse() response_content = resp.read() scanned_file = ScannedFile() if resp.status != 200: scanned_file.av_passed = False scanned_file.av_reason = "Non 200 response from AV server" scanned_file.save() raise AntiVirusServiceErrorException( f"Non 200 response from anti virus service, content: {response_content}" ) else: json_response = json.loads(response_content) if "malware" not in json_response: scanned_file.av_passed = False scanned_file.av_reason = "Malformed response from AV server" scanned_file.save() raise MalformedAntiVirusResponseException() if json_response["malware"]: scanned_file.av_passed = False scanned_file.av_reason = json_response["reason"] scanned_file.save() logger.error(f"Malware found in user uploaded file " f"'{self.file_name}', exiting upload process") else: scanned_file.av_passed = True scanned_file.save() # We are using 'content_type_extra' as the a means of making # the results available to following file handlers # TODO - put in a PR to Django project to allow file_complete # to return objects and not break out of file handler loop if not hasattr(self.content_type_extra, "clam_av_results"): self.content_type_extra["clam_av_results"] = [] self.content_type_extra["clam_av_results"].append({ "file_name": self.file_name, "av_passed": scanned_file.av_passed, "scanned_at": scanned_file.scanned_at, }) return None
def upload_document(path, progress_tracker=None, file_id=0, post_url='/upload_file3'): """ :param path: :param progress_tracker: :param file_id: 0 if uploading a new document, not 0 if repplacing an existing document. :param post_url: :return: """ mr = None if progress_tracker: mr = Wrap(progress_tracker) else: mr = MultiRead() mr.add_field('file_id', str(file_id)) # 0 == It's a new document, > 0 == overwrite # We store the filename here 'cos the one encoded in the file part # must be ASCII (IETF, RFC 2183, section 2.3 : Current [RFC 2045] grammar # restricts parameter values (and hence Content-Disposition filenames) # to US-ASCII. We recognize the great desirability of allowing # arbitrary character sets in filenames, but it is beyond the # scope of this document to define the necessary mechanisms. mr.add_field('encoding_safe_filename', os.path.split(path)[-1]) mr.add_file_part(path) mr.close_parts() host, port = extract_host_port( configuration.get("DownloadSite", "base_url")) mainlog.debug( u"Upload to {}:{}{} (determined from DownloadSite/base_url : {})". format(host, port, post_url, configuration.get("DownloadSite", "base_url"))) if configuration.get("DownloadSite", "base_url").startswith('https'): h = HTTPSConnection(host, port) else: h = HTTPConnection(host, port) h.putrequest('POST', post_url) h.putheader('content-type', mr.content_type()) h.putheader('content-length', str(mr.total_size())) h.putheader('x-filesize', str(mr.total_size())) h.endheaders() mr.open() h.send(mr) mr.close() server_response = h.getresponse() if server_response.status == OK: server_response.getheaders( ) # Skip headers (is this really necessary ?) t = server_response.read() file_id = int(t) mainlog.debug("Successfully uploaded {} bytes".format(mr.total_size())) h.close() return file_id else: raise Exception( "Unable to upload, server response status was {}".format( server_response.status))
def post(self, query): i = self.institution logging.debug('posting data to %s' % i.url) logging.debug('---- request ----') logging.debug(query) garbage, path = splittype(i.url) host, selector = splithost(path) h = HTTPSConnection(host, timeout=60) # Discover requires a particular ordering of headers, so send the # request step by step. h.putrequest('POST', selector, skip_host=True, skip_accept_encoding=True) h.putheader('Content-Type', 'application/x-ofx') h.putheader('Host', host) h.putheader('Content-Length', len(query)) h.putheader('Connection', 'Keep-Alive') if self.accept: h.putheader('Accept', self.accept) if self.user_agent: h.putheader('User-Agent', self.user_agent) h.endheaders(query.encode()) res = h.getresponse() response = res.read().decode('ascii', 'ignore') logging.debug('---- response ----') logging.debug(res.__dict__) logging.debug(response) res.close() return response