Exemplo n.º 1
0
    def _handle_stderr_event(self, fd, events):
        """Eventhandler for stderr"""

        assert fd == self.fd_stderr

        if events & self.ioloop.READ:
            # got data ready
            if not self.headers_sent:
                payload = self.process.stderr.read()

                data = 'HTTP/1.1 500 Internal Server Error\r\nDate: %s\r\nContent-Length: %d\r\n\r\n' % (
                    get_date_header(), len(payload))
                self.headers_sent = True
                data += payload
            else:
                # see stdout
                logger.error("This should not happen (stderr)")
                data = self.process.stderr.read()

            logger.debug('Sending stderr to client: %r', data)
            self.request.write(data)

        if events & self.ioloop.ERROR:
            logger.debug('Error on stderr')
            # ensure file is closed
            if not self.process.stderr.closed:
                self.process.stderr.close()
            # remove handler
            self.ioloop.remove_handler(self.fd_stderr)
            # if all fds are closed, we can finish
            return self._graceful_finish()
Exemplo n.º 2
0
    def _graceful_finish(self):
        """Detect if process has closed pipes and we can finish"""

        if not self.process.stdout.closed or not self.process.stderr.closed:
            return  # stdout/stderr still open

        if not self.process.stdin.closed:
            self.process.stdin.close()

        if self.number_of_8k_chunks_sent > 0:
            logger.debug('Sent %d * 8k chunks', self.number_of_8k_chunks_sent)

        logger.debug("Finishing up. Process poll: %r", self.process.poll())

        if not self.headers_sent:
            retval = self.process.poll()
            if retval != 0:
                logger.warning("Empty response. Git return value: " +
                               str(retval))
                payload = "Did not produce any data. Errorcode: " + str(retval)
                data = 'HTTP/1.1 500 Internal Server Error\r\nDate: %s\r\nContent-Length: %d\r\n\r\n' % (
                    get_date_header(), len(payload))
                self.headers_sent = True
                data += payload
                self.request.write(data)
            else:
                data = 'HTTP/1.1 200 Ok\r\nDate: %s\r\nContent-Length: 0\r\n\r\n' % get_date_header(
                )
                self.headers_sent = True
                self.request.write(data)

        # if we are in chunked mode, send end chunk with length 0
        elif self.sent_chunks:
            logger.debug("End chunk")
            self.request.write("0\r\n")
            #we could now send some more headers resp. trailers
            self.request.write("\r\n")

        self.request.finish()
Exemplo n.º 3
0
    def __init__(self, request, filename, headers={}):
        self.request = request
        self.headers = headers.copy()

        try:
            self.file = open(filename, 'rb')
            filesize = os.path.getsize(filename)
        except:
            raise tornado.web.HTTPError(500, 'Unable to open file')

        self.headers.update({'Date': get_date_header(), 'Content-Length': str(filesize)})
        self.request.write('HTTP/1.1 200 OK\r\n' + '\r\n'.join([ k + ': ' + v for k, v in self.headers.items()]) + '\r\n\r\n')

        self.write_chunk()
Exemplo n.º 4
0
    def __init__(self, request, filename, headers={}):
        self.request = request
        self.headers = headers.copy()

        try:
            self.file = open(filename, 'rb')
            filesize = os.path.getsize(filename)
        except:
            raise tornado.web.HTTPError(500, 'Unable to open file')

        self.headers.update({
            'Date': get_date_header(),
            'Content-Length': str(filesize)
        })
        self.request.write(
            'HTTP/1.1 200 OK\r\n' +
            '\r\n'.join([k + ': ' + v
                         for k, v in self.headers.items()]) + '\r\n\r\n')

        self.write_chunk()
Exemplo n.º 5
0
    def _graceful_finish(self):
        """Detect if process has closed pipes and we can finish"""

        if not self.process.stdout.closed or not self.process.stderr.closed:
            return # stdout/stderr still open

        if not self.process.stdin.closed:
            self.process.stdin.close()

        if self.number_of_8k_chunks_sent > 0:
            logger.debug('Sent %d * 8k chunks', self.number_of_8k_chunks_sent)

        logger.debug("Finishing up. Process poll: %r", self.process.poll())

        if not self.headers_sent:
            retval = self.process.poll()
            if retval != 0:
                logger.warning("Empty response. Git return value: " + str(retval))
                payload = "Did not produce any data. Errorcode: " + str(retval)
                data = 'HTTP/1.1 500 Internal Server Error\r\nDate: %s\r\nContent-Length: %d\r\n\r\n' % (get_date_header(), len(payload))
                self.headers_sent = True
                data += payload
                self.request.write(data)
            else:
                data = 'HTTP/1.1 200 Ok\r\nDate: %s\r\nContent-Length: 0\r\n\r\n' % get_date_header()
                self.headers_sent = True
                self.request.write(data)

        # if we are in chunked mode, send end chunk with length 0
        elif self.sent_chunks:
            logger.debug("End chunk")
            self.request.write(b"0\r\n")
            #we could now send some more headers resp. trailers
            self.request.write(b"\r\n")

        self.request.finish()
Exemplo n.º 6
0
from gaas import __version__
from gaas.utils import get_class
from gaas.handlers import BaseHandler
from gaas.handlers.repository import (
    CreateRepositoryHandler
)
from gaas.handlers.user import (
    CreateUserHandler,
    AddUserKeyHandler
)
from gaas.handlers.file import (
    ShowFileHandler
)

cache_forever = lambda: [('Expires', get_date_header(datetime.datetime.now() + datetime.timedelta(days=365))),
                 ('Pragma', 'no-cache'),
                 ('Cache-Control', 'public, max-age=31556926')]

dont_cache = lambda: [('Expires', 'Fri, 01 Jan 1980 00:00:00 GMT'),
              ('Pragma', 'no-cache'),
              ('Cache-Control', 'no-cache, max-age=0, must-revalidate')]


def main():
    AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient")
    GaasServer.run()
    # after many bizarre tests this should be the way to go:
    # https://moocode.com/posts/6-code-your-own-multi-user-private-git-server-in-5-minutes

Exemplo n.º 7
0
    def _handle_stdout_event(self, fd, events):
        """Eventhandler for stdout"""

        assert fd == self.fd_stdout

        if events & self.ioloop.READ:
            # got data ready to read
            data = ''

            # Now basically we have two cases: either the client supports
            # HTTP/1.1 in which case we can stream the answer in chunked mode
            # in HTTP/1.0 we need to send a content-length and thus buffer the complete output
            if self.request.supports_http_1_1():
                if not self.headers_sent:
                    self.sent_chunks = True
                    self.headers.update({
                        'Date': get_date_header(),
                        'Transfer-Encoding': 'chunked'
                    })
                    data = 'HTTP/1.1 200 OK\r\n' + '\r\n'.join(
                        [k + ': ' + v
                         for k, v in self.headers.items()]) + '\r\n\r\n'

                    if self.output_prelude:
                        data += hex(len(
                            self.output_prelude))[2:] + "\r\n"  # cut off 0x
                        data += self.output_prelude + "\r\n"

                    self.headers_sent = True

                payload = os.read(fd, 8192)
                if events & self.ioloop.ERROR:  # there might be data remaining in the buffer if we got HUP, get it all
                    remainder = True
                    while remainder != '':  # until EOF
                        remainder = os.read(fd, 8192)
                        payload += remainder

                data += hex(len(payload))[2:] + "\r\n"  # cut off 0x
                data += payload + "\r\n"

            else:
                if not self.headers_sent:
                    # Use the over-eager blocking read that will get everything until we hit EOF
                    # this might actually be somewhat dangerous as noted in the subprocess documentation
                    # and lead to a deadlock. This is only a legacy mode for HTTP/1.0 clients anyway,
                    # so we might want to remove it entirely anyways
                    payload = self.process.stdout.read()
                    self.headers.update({
                        'Date': get_date_header(),
                        'Content-Length': str(len(payload))
                    })
                    data = 'HTTP/1.0 200 OK\r\n' + '\r\n'.join(
                        [k + ': ' + v
                         for k, v in self.headers.items()]) + '\r\n\r\n'
                    self.headers_sent = True
                    data += self.output_prelude + payload
                else:
                    # this is actually somewhat illegal as it messes with content-length but
                    # it shouldn't happen anyways, as the read above should have read anything
                    # python docs say this can happen on ttys...
                    logger.error("This should not happen")
                    data = self.process.stdout.read()

            if len(data) == 8200:
                self.number_of_8k_chunks_sent += 1
            else:
                if self.number_of_8k_chunks_sent > 0:
                    logger.debug('Sent %d * 8192 bytes',
                                 self.number_of_8k_chunks_sent)
                    self.number_of_8k_chunks_sent = 0

                logger.debug('Sending stdout to client %d bytes: %r',
                             len(data), data[:20])
            self.request.write(data)

        # now we can also have an error. This is because tornado maps HUP onto error
        # therefore, no elif here!
        if events & self.ioloop.ERROR:
            logger.debug('Error on stdout')
            # ensure file is closed
            if not self.process.stdout.closed:
                self.process.stdout.close()
            # remove handler
            self.ioloop.remove_handler(self.fd_stdout)
            # if all fds are closed, we can finish
            return self._graceful_finish()
Exemplo n.º 8
0
    def _handle_stderr_event(self, fd, events):
        """Eventhandler for stderr"""

        assert fd == self.fd_stderr

        if events & self.ioloop.READ:
            # got data ready
            if not self.headers_sent:
                payload = self.process.stderr.read()

                data = 'HTTP/1.1 500 Internal Server Error\r\nDate: %s\r\nContent-Length: %d\r\n\r\n' % (get_date_header(), len(payload))
                self.headers_sent = True
                data += payload.decode("utf-8")
            else:
                # see stdout
                logger.error("This should not happen (stderr)")
                data = self.process.stderr.read()

            logger.debug('Sending stderr to client: %r', data)
            
            data_bytes = str.encode(data)
            type(data_bytes) # insures its bytes
            self.request.write(data_bytes)

        if events & self.ioloop.ERROR:
            logger.debug('Error on stderr')
            # ensure file is closed
            if not self.process.stderr.closed:
                self.process.stderr.close()
            # remove handler
            self.ioloop.remove_handler(self.fd_stderr)
            # if all fds are closed, we can finish
            return self._graceful_finish()
Exemplo n.º 9
0
    def _handle_stdout_event(self, fd, events):
        """Eventhandler for stdout"""

        assert fd == self.fd_stdout

        if events & self.ioloop.READ:
            # got data ready to read
            data = ''

            # Now basically we have two cases: either the client supports
            # HTTP/1.1 in which case we can stream the answer in chunked mode
            # in HTTP/1.0 we need to send a content-length and thus buffer the complete output
            if self.request.supports_http_1_1():
                if not self.headers_sent:
                    self.sent_chunks = True
                    self.headers.update({'Date': get_date_header(), 'Transfer-Encoding': 'chunked'})
                    data = 'HTTP/1.1 200 OK\r\n' + '\r\n'.join([ k + ': ' + v for k, v in self.headers.items()]) + '\r\n\r\n'

                    if self.output_prelude:
                        data += hex(len(self.output_prelude))[2:] + "\r\n" # cut off 0x
                        data += self.output_prelude + "\r\n"

                    self.headers_sent = True

                payload = os.read(fd, 8192)
                if events & self.ioloop.ERROR: # there might be data remaining in the buffer if we got HUP, get it all
                    remainder = True
                    while remainder != '': # until EOF
                        remainder = os.read(fd, 8192)
                        payload += remainder


                print("Type of payload", type( payload )) #Should be bytes

                data += hex(len(payload))[2:] + "\r\n" # cut off 0x
                print("DATA", data)
                data = str.encode( data ) #Convert to bytes
                data += payload
                data += str.encode("\r\n")
                #data += "\r\n"
                
                #data += payload + str.encode("\r\n")

            else:
                if not self.headers_sent:
                    # Use the over-eager blocking read that will get everything until we hit EOF
                    # this might actually be somewhat dangerous as noted in the subprocess documentation
                    # and lead to a deadlock. This is only a legacy mode for HTTP/1.0 clients anyway,
                    # so we might want to remove it entirely anyways
                    payload = self.process.stdout.read()
                    self.headers.update({'Date': get_date_header(), 'Content-Length': str(len(payload))})
                    data = 'HTTP/1.0 200 OK\r\n' + '\r\n'.join([ k + ': ' + v for k, v in self.headers.items()]) + '\r\n\r\n'
                    self.headers_sent = True
                    data += self.output_prelude + payload
                else:
                    # this is actually somewhat illegal as it messes with content-length but 
                    # it shouldn't happen anyways, as the read above should have read anything
                    # python docs say this can happen on ttys...
                    logger.error("This should not happen")
                    data = self.process.stdout.read()

            if len(data) == 8200:
                self.number_of_8k_chunks_sent += 1
            else:
                if self.number_of_8k_chunks_sent > 0:
                    logger.debug('Sent %d * 8192 bytes', self.number_of_8k_chunks_sent)
                    self.number_of_8k_chunks_sent = 0

                logger.debug('Sending stdout to client %d bytes: %r', len(data), data[:20])
            
            #Send data
            print("Sending data")
            #Data should be bytes!
            self.request.write(data)

        # now we can also have an error. This is because tornado maps HUP onto error
        # therefore, no elif here!
        if events & self.ioloop.ERROR:
            logger.debug('Error on stdout')
            # ensure file is closed
            if not self.process.stdout.closed:
                self.process.stdout.close()
            # remove handler
            self.ioloop.remove_handler(self.fd_stdout)
            # if all fds are closed, we can finish
            return self._graceful_finish()
Exemplo n.º 10
0
import urlparse
import re
import os.path
import datetime

import tornado.web

from gittornado.iowrapper import ProcessWrapper, FileWrapper
from gittornado.util import get_date_header

import logging
logger = logging.getLogger(__name__)

cache_forever = lambda: [('Expires',
                          get_date_header(datetime.datetime.now() + datetime.
                                          timedelta(days=365))),
                         ('Pragma', 'no-cache'),
                         ('Cache-Control', 'public, max-age=31556926')]

dont_cache = lambda: [('Expires', 'Fri, 01 Jan 1980 00:00:00 GMT'),
                      ('Pragma', 'no-cache'),
                      ('Cache-Control', 'no-cache, max-age=0, must-revalidate')
                      ]


class BaseHandler(tornado.web.RequestHandler):
    auth = None
    auth_failed = None
    gitlookup = None
    gitcommand = None