Esempio n. 1
0
    def test_file_logging_rotation_5_files(self):
        """
        Only 5 logfiles are kept.
        """
        logfile = FilePath(self.mktemp()).child('foo.log')
        logfile.parent().makedirs()
        # This file will become foo.log.1
        with logfile.open('w') as f:
            f.write(b'0')
            f.truncate(int(MiB(100).to_Byte().value))
        # These file extensions will be incremented
        for i in range(1, 5):
            sibling = logfile.sibling(logfile.basename() + u'.' + unicode(i))
            with sibling.open('w') as f:
                f.write(bytes(i))

        d = self.run_script(EliotScript, options=['--logfile', logfile.path])

        def verify_logfiles(stdout_messages, logfile):
            logfile_dir = logfile.parent()
            self.assertEqual(
                # The contents of the files will now be an integer one less
                # than the integer in the file name.
                map(bytes, range(0, 4)),
                list(
                    logfile_dir.child('foo.log.{}'.format(i)).open().read(1)
                    for i
                    in range(1, 5)
                )
            )
        d.addCallback(verify_logfiles, logfile=logfile)

        return d
Esempio n. 2
0
    def test_file_logging_rotation_5_files(self):
        """
        Only 5 logfiles are kept.
        """
        logfile = FilePath(self.mktemp()).child('foo.log')
        logfile.parent().makedirs()
        # This file will become foo.log.1
        with logfile.open('w') as f:
            f.write(b'0')
            f.truncate(int(MiB(100).to_Byte().value))
        # These file extensions will be incremented
        for i in range(1, 5):
            sibling = logfile.sibling(logfile.basename() + u'.' + unicode(i))
            with sibling.open('w') as f:
                f.write(bytes(i))

        d = self.run_script(EliotScript, options=['--logfile', logfile.path])

        def verify_logfiles(stdout_messages, logfile):
            logfile_dir = logfile.parent()
            self.assertEqual(
                # The contents of the files will now be an integer one less
                # than the integer in the file name.
                map(bytes, range(0, 4)),
                list(
                    logfile_dir.child('foo.log.{}'.format(i)).open().read(1)
                    for i in range(1, 5)))

        d.addCallback(verify_logfiles, logfile=logfile)

        return d
Esempio n. 3
0
    def test_addPyListings(self):
        """
        L{tree.addPyListings} accepts a document with nodes with their I{class}
        attribute set to I{py-listing} and replaces those nodes with Python
        source listings from the file given by the node's I{href} attribute.
        """
        listingPath = FilePath(self.mktemp())
        listingPath.setContent('def foo():\n    pass\n')

        parent = dom.Element('div')
        listing = dom.Element('a')
        listing.setAttribute('href', listingPath.basename())
        listing.setAttribute('class', 'py-listing')
        parent.appendChild(listing)

        tree.addPyListings(parent, listingPath.dirname())

        expected = """\
<div><div class="py-listing"><pre><p class="py-linenumber">1
2
</p><span class="py-src-keyword">def</span> <span class="py-src-identifier">foo</span>():
    <span class="py-src-keyword">pass</span>
</pre><div class="caption"> - <a href="temp"><span class="filename">temp</span></a></div></div></div>"""

        self.assertEqual(parent.toxml(), expected)
Esempio n. 4
0
def _wait_for_new_device(base, size, time_limit=60):
    """
    Helper function to wait for up to 60s for new
    EBS block device (`/dev/sd*` or `/dev/xvd*`) to
    manifest in the OS.

    :param list base: List of baseline block devices
        that existed before execution of operation that expects
        to create a new block device.
    :param int size: Size of the block device we are expected
        to manifest in the OS.
    :param int time_limit: Time, in seconds, to wait for
        new device to manifest. Defaults to 60s.

    :returns: formatted string name of the new block device.
    :rtype: unicode
    """
    start_time = time.time()
    elapsed_time = time.time() - start_time
    while elapsed_time < time_limit:
        for device in list(set(FilePath(b"/sys/block").children()) -
                           set(base)):
            device_name = FilePath.basename(device)
            if (device_name.startswith((b"sd", b"xvd")) and
                    _check_blockdevice_size(device_name, size)):
                new_device = u'/dev/' + device_name.decode("ascii")
                return new_device
        time.sleep(0.1)
        elapsed_time = time.time() - start_time
    return None
Esempio n. 5
0
    def _get_capinfos(self, username, ip_addr):
        """
        Get capinfos of user's pcap. This should be run afte the account
        has expired and after executing pcapsummarizer on the user's 
        directory.

        :param username (str): account username
        :param ip_addr (IPv4Address): IP address allocated for the account.

        :return: String with capinfos content.
        """
        user_dir = "{}_{}".format(username, str(ip_addr))
        user_dir = os.path.join(self.path['pcaps'], user_dir)
        user_dir_fp = FilePath(user_dir)

        capinfos = None
        for f in user_dir_fp.listdir():
            filename = os.path.join(user_dir, f)
            fp = FilePath(filename)
            if re.match(".*\.capinfos$", fp.basename()):
                if capinfos is None:
                    capinfos = fp.getContent()
                else:
                    capinfos = "{}\n\n{}".format(capinfos, fp.getContent())

        return capinfos
Esempio n. 6
0
	def test_rewriteCss(self):
		"""
		Test that CSS processing works, and verify the header.
		"""
		clock = Clock()
		fc = FileCache(lambda: clock.seconds(), 1)
		temp = FilePath(self.mktemp() + '.css')
		with temp.open('wb') as f:
			f.write("p { color: red; }\n")

		# BetterFile(temp.path) would not work because the processing happens
		# in getChild.  So, create a BetterFile for the .css file's parent dir.
		bf = BetterFile(temp.parent().path, fileCache=fc, rewriteCss=True)
		d = self._requestPostpathAndRender(bf, [temp.basename()])

		headerRe = re.compile(r"/\* CSSResource processed ([0-9a-f]{32}?) \*/")
		def assertProcessedContent((request, child)):
			out = "".join(request.written)
			lines = out.split("\n")
			self.assertTrue(re.match(headerRe, lines[0]), lines[0])
			self.assertEqual("p { color: red; }", lines[1])
			self.assertEqual("", lines[2])
			self.assertEqual(3, len(lines))
		d.addCallback(assertProcessedContent)
		return d
Esempio n. 7
0
    def test_addPyListings(self):
        """
        L{tree.addPyListings} accepts a document with nodes with their I{class}
        attribute set to I{py-listing} and replaces those nodes with Python
        source listings from the file given by the node's I{href} attribute.
        """
        listingPath = FilePath(self.mktemp())
        listingPath.setContent('def foo():\n    pass\n')

        parent = dom.Element('div')
        listing = dom.Element('a')
        listing.setAttribute('href', listingPath.basename())
        listing.setAttribute('class', 'py-listing')
        parent.appendChild(listing)

        tree.addPyListings(parent, listingPath.dirname())

        expected = """\
<div><div class="py-listing"><pre><p class="py-linenumber">1
2
</p><span class="py-src-keyword">def</span> <span class="py-src-identifier">foo</span>():
    <span class="py-src-keyword">pass</span>
</pre><div class="caption"> - <a href="temp"><span class="filename">temp</span></a></div></div></div>"""

        self.assertEqual(parent.toxml(), expected)
Esempio n. 8
0
 def test_readme(self):
     readme = FilePath(__file__).parent().parent().parent().child('README.rst')
     test = _doctest_parser.get_doctest(readme.getContent(), {},
                                        readme.basename(), readme.path, 0)
     output = []
     r = _doctest_runner.run(test, out=output.append)
     if r.failed:
         self.fail('%s\n%s' % (test.name, ''.join(output)))
Esempio n. 9
0
    def buildPDF(self, bookPath, inputDirectory, outputPath):
        """
        Build a PDF from the given a LaTeX book document.

        @type bookPath: L{FilePath}
        @param bookPath: The location of a LaTeX document defining a book.

        @type inputDirectory: L{FilePath}
        @param inputDirectory: The directory which the inputs of the book are
            relative to.

        @type outputPath: L{FilePath}
        @param outputPath: The location to which to write the resulting book.
        """
        if not bookPath.basename().endswith(".tex"):
            raise ValueError("Book filename must end with .tex")

        workPath = FilePath(mkdtemp())
        try:
            startDir = os.getcwd()
            try:
                os.chdir(inputDirectory.path)

                texToDVI = (
                    "latex -interaction=nonstopmode "
                    "-output-directory=%s %s") % (
                    workPath.path, bookPath.path)

                # What I tell you three times is true!
                # The first two invocations of latex on the book file allows it
                # correctly create page numbers for in-text references.  Why this is
                # the case, I could not tell you. -exarkun
                for i in range(3):
                    self.run(texToDVI)

                bookBaseWithoutExtension = bookPath.basename()[:-4]
                dviPath = workPath.child(bookBaseWithoutExtension + ".dvi")
                psPath = workPath.child(bookBaseWithoutExtension + ".ps")
                pdfPath = workPath.child(bookBaseWithoutExtension + ".pdf")
                self.run(
                    "dvips -o %(postscript)s -t letter -Ppdf %(dvi)s" % {
                        'postscript': psPath.path,
                        'dvi': dviPath.path})
                self.run("ps2pdf13 %(postscript)s %(pdf)s" % {
                        'postscript': psPath.path,
                        'pdf': pdfPath.path})
                pdfPath.moveTo(outputPath)
                workPath.remove()
            finally:
                os.chdir(startDir)
        except:
            workPath.moveTo(bookPath.parent().child(workPath.basename()))
            raise
 def _parse_file(self, kind, args):
     if args == "-":
         get_file = lambda: stdout
     else:
         path = FilePath(args)
         get_file = lambda: LogFile(
             path.basename(),
             path.dirname(),
             rotateLength=1024 * 1024 * 1024,
             maxRotatedFiles=10,
         )
     return lambda reactor: FileDestination(get_file())
Esempio n. 11
0
    def loadFile(self, filename):
        """
        Load records from C{filename}.

        @param filename: file to read from
        @type filename: L{bytes}
        """
        fp = FilePath(filename)
        # Not the best way to set an origin. It can be set using $ORIGIN
        # though.
        self.origin = nativeString(fp.basename() + b'.')

        lines = fp.getContent().splitlines(True)
        lines = self.stripComments(lines)
        lines = self.collapseContinuations(lines)
        self.parseLines(lines)
Esempio n. 12
0
    def loadFile(self, filename):
        """
        Load records from C{filename}.

        @param filename: file to read from
        @type filename: L{bytes}
        """
        fp = FilePath(filename)
        # Not the best way to set an origin. It can be set using $ORIGIN
        # though.
        self.origin = nativeString(fp.basename() + b'.')

        lines = fp.getContent().splitlines(True)
        lines = self.stripComments(lines)
        lines = self.collapseContinuations(lines)
        self.parseLines(lines)
Esempio n. 13
0
	def test_cssCached(self):
		"""
		The processed CSS file is cached, and updated when the underlying
		file changes.
		"""
		clock = Clock()
		fc = FileCache(lambda: clock.seconds(), 1)
		temp = FilePath(self.mktemp() + '.css')
		temp.setContent("p { color: red; }\n")

		bf = BetterFile(temp.parent().path, fileCache=fc, rewriteCss=True)
		d = self._requestPostpathAndRender(bf, [temp.basename()])

		def assertColorRed((request, child)):
			lines = "".join(request.written).split("\n")
			self.assertEqual(["p { color: red; }", ""], lines[1:])
		d.addCallback(assertColorRed)

		def modifyUnderlyingAndMakeRequest(_):
			with temp.open('wb') as f:
				f.write("p { color: green; }\n")
			d = self._requestPostpathAndRender(bf, [temp.basename()])
			return d
		d.addCallback(modifyUnderlyingAndMakeRequest)

		def assertStillColorRed((request, child)):
			lines = "".join(request.written).split("\n")
			self.assertEqual(["p { color: red; }", ""], lines[1:])
		d.addCallback(assertStillColorRed)

		def advanceClockAndMakeRequest(_):
			clock.advance(1)
			d = self._requestPostpathAndRender(bf, [temp.basename()])
			return d
		d.addCallback(advanceClockAndMakeRequest)

		def assertColorGreen((request, child)):
			lines = "".join(request.written).split("\n")
			self.assertEqual(["p { color: green; }", ""], lines[1:])
		d.addCallback(assertColorGreen)

		return d
Esempio n. 14
0
def do_urlextract(dest, url):
    global dsklst
    dest=FilePath(dest)

    # Don't do this if not mounted!
    mntpnt=dsklst['/'].real_mountpoint()
    if not os.path.ismount(mntpnt):
        return False

    if not dest.isdir():
        return False
   
    try:
        uh=urllib2.urlopen(url)
        tf=tarfile.open(mode='r|*',fileobj=uh)
        os.chroot(mntpnt)
        os.chdir(os.path.join(dest.dirname(),dest.basename()))
        tf.extractall()
    except:
        traceback.print_exc()
    os.chdir('/')
Esempio n. 15
0
def _wait_for_new_device(base, size, time_limit=60):
    """
    Helper function to wait for up to 60s for new
    EBS block device (`/dev/sd*` or `/dev/xvd*`) to
    manifest in the OS.

    :param list base: List of baseline block devices
        that existed before execution of operation that expects
        to create a new block device.
    :param int size: Size of the block device we are expected
        to manifest in the OS.
    :param int time_limit: Time, in seconds, to wait for
        new device to manifest. Defaults to 60s.

    :returns: formatted string name of the new block device.
    :rtype: unicode
    """
    start_time = time.time()
    elapsed_time = time.time() - start_time
    while elapsed_time < time_limit:
        for device in list(set(FilePath(b"/sys/block").children()) -
                           set(base)):
            device_name = FilePath.basename(device)
            if (device_name.startswith((b"sd", b"xvd")) and
                    _get_device_size(device_name) == size):
                new_device = u'/dev/' + device_name.decode("ascii")
                return new_device
        time.sleep(0.1)
        elapsed_time = time.time() - start_time

    # If we failed to find a new device of expected size,
    # log sizes of all new devices on this compute instance,
    # for debuggability.
    new_devices = list(set(FilePath(b"/sys/block").children()) - set(base))
    new_devices_size = [_get_device_size(device) for device in new_devices]
    NO_NEW_DEVICE_IN_OS(new_devices=new_devices,
                        new_devices_size=new_devices_size,
                        expected_size=size,
                        time_limit=time_limit).write()
    return None
Esempio n. 16
0
def _wait_for_new_device(base, size, time_limit=60):
    """
    Helper function to wait for up to 60s for new
    EBS block device (`/dev/sd*` or `/dev/xvd*`) to
    manifest in the OS.

    :param list base: List of baseline block devices
        that existed before execution of operation that expects
        to create a new block device.
    :param int size: Size of the block device we are expected
        to manifest in the OS.
    :param int time_limit: Time, in seconds, to wait for
        new device to manifest. Defaults to 60s.

    :returns: formatted string name of the new block device.
    :rtype: unicode
    """
    start_time = time.time()
    elapsed_time = time.time() - start_time
    while elapsed_time < time_limit:
        for device in list(
                set(FilePath(b"/sys/block").children()) - set(base)):
            device_name = FilePath.basename(device)
            if (device_name.startswith((b"sd", b"xvd"))
                    and _get_device_size(device_name) == size):
                new_device = u'/dev/' + device_name.decode("ascii")
                return new_device
        time.sleep(0.1)
        elapsed_time = time.time() - start_time

    # If we failed to find a new device of expected size,
    # log sizes of all new devices on this compute instance,
    # for debuggability.
    new_devices = list(set(FilePath(b"/sys/block").children()) - set(base))
    new_devices_size = [_get_device_size(device) for device in new_devices]
    NO_NEW_DEVICE_IN_OS(new_devices=new_devices,
                        new_devices_size=new_devices_size,
                        expected_size=size,
                        time_limit=time_limit).write()
    return None
Esempio n. 17
0
	def __init__(self, *argz, **kwz):
		super(Logtail, self).__init__(*argz, **kwz)

		self.exclude = self.conf.monitor_exclude or list()
		if isinstance(self.exclude, types.StringTypes): self.exclude = [self.exclude]
		self.exclude = map(re.compile, self.exclude)

		paths_watch = self.paths_watch = dict()
		self.paths_pos, self.paths_buff = dict(), dict()

		masks, paths = self.conf.monitor, list()
		if isinstance(masks, bytes): masks = [masks]
		for mask in masks:
			mask_patterns = self.glob_alter(mask)
			for mask_raw in mask_patterns:
				mask = FilePath(mask_raw)
				# All matched parent dirs - like /x/y/z for /x/*/z/file* - are watched for pattern
				# Note that watchers won't be auto-added for /x/m/z, if it'll be created later on
				paths_ext = list( (path.realpath(), mask.basename())
					for path in it.imap(FilePath, glob(mask.dirname())) )
				paths.extend(paths_ext)
				# If full pattern already match something, watch it if it's a dir - /x/dir1 for /x/dir*
				# Note that watchers won't be auto-added for /x/dir2, if it'll be created later on
				if paths_ext: # no point going deeper if parent dirs don't exist
					paths.extend( (path.realpath(), '*')
						for path in it.imap(FilePath, glob(mask_raw))
						if path.realpath().isdir() )
		# Aggregate path masks by-realpath
		for path, mask in paths:
			if not path.isdir():
				log.debug('Skipping special path: {}'.format(path))
				continue
			if path not in paths_watch:
				paths_watch[path] = {mask}
			else: paths_watch[path].add(mask)

		self.notifier_restart()
Esempio n. 18
0
 def _parse_file(self, kind, arg_text):
     # Reserve the possibility of an escape character in the future.  \ is
     # the standard choice but it's the path separator on Windows which
     # pretty much ruins it in this context.  Most other symbols already
     # have some shell-assigned meaning which makes them treacherous to use
     # in a CLI interface.  Eliminating all such dangerous symbols leaves
     # approximately @.
     if u"@" in arg_text:
         raise ValueError(
             u"Unsupported escape character (@) in destination text ({!r})."
             .format(arg_text), )
     arg_list = arg_text.split(u",")
     path_name = arg_list.pop(0)
     if path_name == "-":
         get_file = lambda: stdout
     else:
         path = FilePath(path_name)
         rotate_length = int(
             self._get_arg(
                 u"rotate_length",
                 1024 * 1024 * 1024,
                 arg_list,
             ))
         max_rotated_files = int(
             self._get_arg(
                 u"max_rotated_files",
                 10,
                 arg_list,
             ))
         get_file = lambda: LogFile(
             path.basename(),
             path.dirname(),
             rotateLength=rotate_length,
             maxRotatedFiles=max_rotated_files,
         )
     return lambda reactor: FileDestination(get_file())
Esempio n. 19
0
    def test_addPyListingsSkipLines(self):
        """
        If a node with the I{py-listing} class also has a I{skipLines}
        attribute, that number of lines from the beginning of the source
        listing are omitted.
        """
        listingPath = FilePath(self.mktemp())
        listingPath.setContent('def foo():\n    pass\n')

        parent = dom.Element('div')
        listing = dom.Element('a')
        listing.setAttribute('href', listingPath.basename())
        listing.setAttribute('class', 'py-listing')
        listing.setAttribute('skipLines', 1)
        parent.appendChild(listing)

        tree.addPyListings(parent, listingPath.dirname())

        expected = """\
<div><div class="py-listing"><pre><p class="py-linenumber">1
</p>    <span class="py-src-keyword">pass</span>
</pre><div class="caption"> - <a href="temp"><span class="filename">temp</span></a></div></div></div>"""

        self.assertEqual(parent.toxml(), expected)
Esempio n. 20
0
    def test_addPyListingsSkipLines(self):
        """
        If a node with the I{py-listing} class also has a I{skipLines}
        attribute, that number of lines from the beginning of the source
        listing are omitted.
        """
        listingPath = FilePath(self.mktemp())
        listingPath.setContent('def foo():\n    pass\n')

        parent = dom.Element('div')
        listing = dom.Element('a')
        listing.setAttribute('href', listingPath.basename())
        listing.setAttribute('class', 'py-listing')
        listing.setAttribute('skipLines', 1)
        parent.appendChild(listing)

        tree.addPyListings(parent, listingPath.dirname())

        expected = """\
<div><div class="py-listing"><pre><p class="py-linenumber">1
</p>    <span class="py-src-keyword">pass</span>
</pre><div class="caption"> - <a href="temp"><span class="filename">temp</span></a></div></div></div>"""

        self.assertEqual(parent.toxml(), expected)
Esempio n. 21
0
def start_worker():
    global antanistaticmap
    global templates
    global pool
    global rexp
    global ports

    lc = LoopingCall(updateListsTask)
    lc.start(600)

    rexp = {
        'body': re.compile(r'(<body.*?\s*>)', re.I),
        'w2t': re.compile(r'(http.?:)?//([a-z0-9]{16}).' + config.basehost + '(?!:\d+)', re.I),
        't2w': re.compile(r'(http.?:)?//([a-z0-9]{16}).(?!' + config.basehost + ')onion(?!:\d+)', re.I)
    }

    ###############################################################################
    # Static Data loading
    #    Here we make a file caching to not handle I/O
    #    at run-time and achieve better performance
    ###############################################################################
    antanistaticmap = {}

    # system default static files
    sys_static_dir = os.path.join(config.sysdatadir, "static/")
    if os.path.exists(sys_static_dir):
        for root, dirs, files in os.walk(os.path.join(sys_static_dir)):
            for basename in files:
                filename = os.path.join(root, basename)
                f = FilePath(filename)
                antanistaticmap[filename.replace(sys_static_dir, "")] = f.getContent()

    # user defined static files
    usr_static_dir = os.path.join(config.datadir, "static/")
    if usr_static_dir != sys_static_dir and os.path.exists(usr_static_dir):
        for root, dirs, files in os.walk(os.path.join(usr_static_dir)):
            for basename in files:
                filename = os.path.join(root, basename)
                f = FilePath(filename)
                antanistaticmap[filename.replace(usr_static_dir, "")] = f.getContent()
    ###############################################################################

    ###############################################################################
    # Templates loading
    #    Here we make a templates cache in order to not handle I/O
    #    at run-time and achieve better performance
    ###############################################################################
    templates = {}

    # system default templates
    sys_tpl_dir = os.path.join(config.sysdatadir, "templates/")
    if os.path.exists(sys_tpl_dir):
        files = FilePath(sys_tpl_dir).globChildren("*.tpl")
        for f in files:
            f = FilePath(config.t2w_file_path(os.path.join('templates', f.basename())))
            templates[f.basename()] = PageTemplate(XMLString(f.getContent()))

    # user defined templates
    usr_tpl_dir = os.path.join(config.datadir, "templates/")
    if usr_tpl_dir != sys_tpl_dir and os.path.exists(usr_tpl_dir):
        files = FilePath(usr_tpl_dir).globChildren("*.tpl")
        for f in files:
            f = FilePath(config.t2w_file_path(os.path.join('templates', f.basename())))
            templates[f.basename()] = PageTemplate(XMLString(f.getContent()))
    ###############################################################################

    pool = HTTPConnectionPool(reactor, True,
                              config.sockmaxpersistentperhost,
                              config.sockcachedconnectiontimeout,
                              config.sockretryautomatically)

    factory = T2WProxyFactory()

    # we do not want all workers to die in the same moment
    requests_countdown = config.requests_per_process / random.randint(1, 3)

    factory = T2WLimitedRequestsFactory(factory, requests_countdown)

    context_factory = T2WSSLContextFactory(os.path.join(config.datadir, "certs/tor2web-key.pem"),
                                                       os.path.join(config.datadir, "certs/tor2web-intermediate.pem"),
                                                       os.path.join(config.datadir, "certs/tor2web-dh.pem"),
                                                       config.cipher_list)

    fds_https = []
    if  'T2W_FDS_HTTPS' in os.environ:
        fds_https = filter(None, os.environ['T2W_FDS_HTTPS'].split(","))
        fds_https = [int(i) for i in fds_https]

    fds_http = []
    if  'T2W_FDS_HTTP' in os.environ:
        fds_http = filter(None, os.environ['T2W_FDS_HTTP'].split(","))
        fds_http = [int(i) for i in fds_http]


    reactor.listenTCPonExistingFD = listenTCPonExistingFD
    reactor.listenSSLonExistingFD = listenSSLonExistingFD

    for fd in fds_https:
        ports.append(reactor.listenSSLonExistingFD(reactor,
                                                   fd=fd,
                                                   factory=factory,
                                                   contextFactory=context_factory))

    for fd in fds_http:
        ports.append(reactor.listenTCPonExistingFD(reactor,
                                                   fd=fd,
                                                   factory=factory))

    def MailException(etype, value, tb):
        sendexceptionmail(config, etype, value, tb)

    sys.excepthook = MailException
Esempio n. 22
0
def start():
    global config
    global antanistaticmap
    global templates
    global pool
    global rexp
    global fds_https
    global fds_http
    global ports

    config = yield rpc("get_config")

    lc = LoopingCall(updateTask)
    lc.start(600)

    rexp = {
        'body':
        re.compile(r'(<body.*?\s*>)', re.I),
        'w2t':
        re.compile(
            r'(https:)?//([a-z0-9]{16}).' + config['basehost'] + '(:443)?',
            re.I),
        't2w':
        re.compile(r'(http:)?//([a-z0-9]{16}).onion(:80)?', re.I)
    }

    ###############################################################################
    # Templates loading
    ###############################################################################
    antanistaticmap = {}

    files = FilePath('/usr/share/tor2web/static/').globChildren("*")
    for file in files:
        file = FilePath(
            t2w_file_path(config['datadir'],
                          os.path.join('static', file.basename())))
        antanistaticmap[file.basename()] = file.getContent()

    # we add additional files eventually written in datadir/static
    # and not already loaded by previos lines.
    if os.path.exists(os.path.join(config['datadir'], "static/")):
        for file in files:
            if file.basename() not in antanistaticmap:
                antanistaticmap[file.basename()] = file.getContent()

    ###############################################################################

    ###############################################################################
    # Templates loading
    ###############################################################################
    templates = {}

    files = FilePath('/usr/share/tor2web/templates/').globChildren("*.tpl")
    for file in files:
        file = FilePath(
            t2w_file_path(config['datadir'],
                          os.path.join('templates', file.basename())))
        templates[file.basename()] = PageTemplate(XMLString(file.getContent()))
    ###############################################################################

    pool = HTTPConnectionPool(reactor, True,
                              config['sockmaxpersistentperhost'],
                              config['sockcachedconnectiontimeout'],
                              config['sockretryautomatically'])

    factory = T2WProxyFactory()

    # we do not want all workers to die in the same moment
    requests_countdown = config['requests_per_process'] / random.randint(1, 3)

    factory = T2WLimitedRequestsFactory(factory, requests_countdown)

    context_factory = T2WSSLContextFactory(
        os.path.join(config['datadir'], "certs/tor2web-key.pem"),
        os.path.join(config['datadir'], "certs/tor2web-intermediate.pem"),
        os.path.join(config['datadir'], "certs/tor2web-dh.pem"),
        config['cipher_list'])

    if config['debugmode'] and config['debugtostdout']:
        log.startLogging(sys.stdout)
    else:
        log.startLogging(log.NullFile)

    fds_https = filter(None, args[0].split(","))
    fds_https = [int(i) for i in fds_https]

    fds_http = filter(None, args[1].split(","))
    fds_http = [int(i) for i in fds_http]

    reactor.listenTCPonExistingFD = listenTCPonExistingFD
    reactor.listenSSLonExistingFD = listenSSLonExistingFD

    for fd in fds_https:
        ports.append(
            reactor.listenSSLonExistingFD(reactor,
                                          fd=fd,
                                          factory=factory,
                                          contextFactory=context_factory))

    for fd in fds_http:
        ports.append(
            reactor.listenTCPonExistingFD(reactor, fd=fd, factory=factory))

    sys.excepthook = MailException
Esempio n. 23
0
t2w = Tor2web(config)

rexp = {
    'body': re.compile(r'(<body.*?\s*>)', re.I),
    'w2t': re.compile(r'(https:)?//([a-z0-9]{16}).' + config.basehost + '(:443)?', re.I),
    't2w': re.compile(r'(http:)?//([a-z0-9]{16}).onion(:80)?', re.I)
}

###############################################################################
# Templates loading
###############################################################################
antanistaticmap = {}

files = FilePath('/usr/share/tor2web/static/').globChildren("*")
for file in files:
    file = FilePath(t2w_file_path(os.path.join('static', file.basename())))
    antanistaticmap[file.basename()] = file.getContent()
    
# we add additional files eventually written in datadir/static
# and not already loaded by previos lines.
if os.path.exists(os.path.join(config.datadir, "static/")):
    for file in files:
        if file.basename() not in antanistaticmap:
            antanistaticmap[file.basename()] = file.getContent()

###############################################################################

###############################################################################
# Templates loading
###############################################################################
templates = {}
Esempio n. 24
0
def start():
    global config
    global antanistaticmap
    global templates
    global pool
    global rexp
    global fds_https
    global fds_http
    global ports

    config = yield rpc("get_config")

    lc = LoopingCall(updateTask)
    lc.start(600)

    rexp = {
        'body': re.compile(r'(<body.*?\s*>)', re.I),
        'w2t': re.compile(r'(http.?:)?//([a-z0-9]{16}).' + config['basehost'] + '(?!:\d+)', re.I),
        't2w': re.compile(r'(http.?:)?//([a-z0-9]{16}).(?!' + config['basehost'] + ')onion(?!:\d+)', re.I)
    }

    ###############################################################################
    # Static Data loading
    #    Here we make a file caching to not handle I/O
    #    at run-time and achieve better performance
    ###############################################################################
    antanistaticmap = {}

    files = FilePath(static_path).globChildren("*")
    for f in files:
        f = FilePath(t2w_file_path(config['datadir'], os.path.join('static', f.basename())))
        antanistaticmap[f.basename()] = f.getContent()

    # we add also user defined data allowing also the user to override tor2web defaults.
    userstaticdir = os.path.join(config['datadir'], "static/")
    if os.path.exists(userstaticdir):
        for root, dirs, files in os.walk(os.path.join(userstaticdir)):
            for basename in files:
                filename = os.path.join(root, basename)
                f = FilePath(filename)
                antanistaticmap[filename.replace(userstaticdir, "")] = f.getContent()

    ###############################################################################

    ###############################################################################
    # Templates loading
    #    Here we make a templates caching to not handle I/O
    #    at run-time and achieve better performance
    ###############################################################################
    templates = {}

    files = FilePath(templates_path).globChildren("*.tpl")
    for f in files:
        f = FilePath(t2w_file_path(config['datadir'], os.path.join('templates', f.basename())))
        templates[f.basename()] = PageTemplate(XMLString(f.getContent()))
    ###############################################################################

    pool = HTTPConnectionPool(reactor, True,
                              config['sockmaxpersistentperhost'],
                              config['sockcachedconnectiontimeout'],
                              config['sockretryautomatically'])

    factory = T2WProxyFactory()

    # we do not want all workers to die in the same moment
    requests_countdown = config['requests_per_process'] / random.randint(1, 3)

    factory = T2WLimitedRequestsFactory(factory, requests_countdown)

    context_factory = T2WSSLContextFactory(os.path.join(config['datadir'], "certs/tor2web-key.pem"),
                                                       os.path.join(config['datadir'], "certs/tor2web-intermediate.pem"),
                                                       os.path.join(config['datadir'], "certs/tor2web-dh.pem"),
                                                       config['cipher_list'])

    if config['debugmode'] and config['debugtostdout']:
        log.startLogging(sys.stdout)
    else:
        log.startLogging(log.NullFile)

    fds_https = filter(None, args[0].split(","))
    fds_https = [int(i) for i in fds_https]

    fds_http = filter(None, args[1].split(","))
    fds_http = [int(i) for i in fds_http]

    reactor.listenTCPonExistingFD = listenTCPonExistingFD
    reactor.listenSSLonExistingFD = listenSSLonExistingFD

    for fd in fds_https:
        ports.append(reactor.listenSSLonExistingFD(reactor,
                                                   fd=fd,
                                                   factory=factory,
                                                   contextFactory=context_factory))

    for fd in fds_http:
        ports.append(reactor.listenTCPonExistingFD(reactor,
                                                   fd=fd,
                                                   factory=factory))

    sys.excepthook = MailException
Esempio n. 25
0
def start():
    global config
    global antanistaticmap
    global templates
    global pool
    global rexp
    global fds_https
    global fds_http
    global ports
    global requests_countdown

    config = yield rpc("get_config")

    rexp = {
        'body': re.compile(r'(<body.*?\s*>)', re.I),
        'w2t': re.compile(r'(https:)?//([a-z0-9]{16}).' + config['basehost'] + '(:443)?', re.I),
        't2w': re.compile(r'(http:)?//([a-z0-9]{16}).onion(:80)?', re.I)
    }

    ###############################################################################
    # Templates loading
    ###############################################################################
    antanistaticmap = {}

    files = FilePath('/usr/share/tor2web/static/').globChildren("*")
    for file in files:
        file = FilePath(t2w_file_path(config['datadir'], os.path.join('static', file.basename())))
        antanistaticmap[file.basename()] = file.getContent()

    # we add additional files eventually written in datadir/static
    # and not already loaded by previos lines.
    if os.path.exists(os.path.join(config['datadir'], "static/")):
        for file in files:
            if file.basename() not in antanistaticmap:
                antanistaticmap[file.basename()] = file.getContent()

    ###############################################################################

    ###############################################################################
    # Templates loading
    ###############################################################################
    templates = {}

    files = FilePath('/usr/share/tor2web/templates/').globChildren("*.tpl")
    for file in files:
        file = FilePath(t2w_file_path(config['datadir'], os.path.join('templates', file.basename())))
        templates[file.basename()] = PageTemplate(XMLString(file.getContent()))
    ###############################################################################

    pool = HTTPConnectionPool(reactor, True,
                              config['sockmaxpersistentperhost'],
                              config['sockcachedconnectiontimeout'],
                              config['sockretryautomatically'])

    factory = T2WProxyFactory()
    factory.requestCountdown = config['requests_per_process']

    context_factory = T2WSSLContextFactory(os.path.join(config['datadir'], "certs/tor2web-key.pem"),
                                                       os.path.join(config['datadir'], "certs/tor2web-intermediate.pem"),
                                                       os.path.join(config['datadir'], "certs/tor2web-dh.pem"),
                                                       config['cipher_list'])

    if config['debugmode']:
        if config['debugtostdout']:
            log.startLogging(sys.stdout)
    else:
        log.startLogging(log.NullFile)

    fds_https = filter(None, args[0].split(","))
    fds_https = [int(i) for i in fds_https]

    fds_http = filter(None, args[1].split(","))
    fds_http = [int(i) for i in fds_http]

    reactor.listenTCPonExistingFD = listenTCPonExistingFD
    reactor.listenSSLonExistingFD = listenSSLonExistingFD

    for fd in fds_https:
        ports.append(reactor.listenSSLonExistingFD(reactor,
                                                   fd=fd,
                                                   factory=factory,
                                                   contextFactory=context_factory))

    for fd in fds_http:
        ports.append(reactor.listenTCPonExistingFD(reactor,
                                                   fd=fd,
                                                   factory=factory))
    
    
    # we do not want all workers to die in the same moment
    requests_countdown = config['requests_per_process'] / random.randint(3, 5)

    sys.excepthook = MailException
Esempio n. 26
0
def start():
    global config
    global antanistaticmap
    global templates
    global pool
    global rexp
    global fds_https
    global fds_http
    global ports

    config = yield rpc("get_config")

    lc = LoopingCall(updateTask)
    lc.start(600)

    rexp = {
        'body':
        re.compile(r'(<body.*?\s*>)', re.I),
        'w2t':
        re.compile(
            r'(http.?:)?//([a-z0-9]{16}).' + config['basehost'] + '(?!:\d+)',
            re.I),
        't2w':
        re.compile(
            r'(http.?:)?//([a-z0-9]{16}).(?!' + config['basehost'] +
            ')onion(?!:\d+)', re.I)
    }

    ###############################################################################
    # Static Data loading
    #    Here we make a file caching to not handle I/O
    #    at run-time and achieve better performance
    ###############################################################################
    antanistaticmap = {}

    files = FilePath('/usr/share/tor2web/static/').globChildren("*")
    for f in files:
        f = FilePath(
            t2w_file_path(config['datadir'],
                          os.path.join('static', f.basename())))
        antanistaticmap[f.basename()] = f.getContent()

    # we add also user defined data allowing also the user to override tor2web defaults.
    userstaticdir = os.path.join(config['datadir'], "static/")
    if os.path.exists(userstaticdir):
        for root, dirs, files in os.walk(os.path.join(userstaticdir)):
            for basename in files:
                filename = os.path.join(root, basename)
                f = FilePath(filename)
                antanistaticmap[filename.replace(userstaticdir,
                                                 "")] = f.getContent()

    ###############################################################################

    ###############################################################################
    # Templates loading
    #    Here we make a templates caching to not handle I/O
    #    at run-time and achieve better performance
    ###############################################################################
    templates = {}

    files = FilePath('/usr/share/tor2web/templates/').globChildren("*.tpl")
    for f in files:
        f = FilePath(
            t2w_file_path(config['datadir'],
                          os.path.join('templates', f.basename())))
        templates[f.basename()] = PageTemplate(XMLString(f.getContent()))
    ###############################################################################

    pool = HTTPConnectionPool(reactor, True,
                              config['sockmaxpersistentperhost'],
                              config['sockcachedconnectiontimeout'],
                              config['sockretryautomatically'])

    factory = T2WProxyFactory()

    # we do not want all workers to die in the same moment
    requests_countdown = config['requests_per_process'] / random.randint(1, 3)

    factory = T2WLimitedRequestsFactory(factory, requests_countdown)

    context_factory = T2WSSLContextFactory(
        os.path.join(config['datadir'], "certs/tor2web-key.pem"),
        os.path.join(config['datadir'], "certs/tor2web-intermediate.pem"),
        os.path.join(config['datadir'], "certs/tor2web-dh.pem"),
        config['cipher_list'])

    if config['debugmode'] and config['debugtostdout']:
        log.startLogging(sys.stdout)
    else:
        log.startLogging(log.NullFile)

    fds_https = filter(None, args[0].split(","))
    fds_https = [int(i) for i in fds_https]

    fds_http = filter(None, args[1].split(","))
    fds_http = [int(i) for i in fds_http]

    reactor.listenTCPonExistingFD = listenTCPonExistingFD
    reactor.listenSSLonExistingFD = listenSSLonExistingFD

    for fd in fds_https:
        ports.append(
            reactor.listenSSLonExistingFD(reactor,
                                          fd=fd,
                                          factory=factory,
                                          contextFactory=context_factory))

    for fd in fds_http:
        ports.append(
            reactor.listenTCPonExistingFD(reactor, fd=fd, factory=factory))

    sys.excepthook = MailException
Esempio n. 27
0
class FSItem(BackendItem):
    logCategory = 'fs_item'

    def __init__(self, object_id, parent, path, mimetype, urlbase, UPnPClass,update=False):
        self.id = object_id
        self.parent = parent
        if parent:
            parent.add_child(self,update=update)
        if mimetype == 'root':
            self.location = unicode(path)
        else:
            if mimetype == 'item' and path is None:
                path = os.path.join(parent.get_path(),unicode(self.id))
            #self.location = FilePath(unicode(path))
            self.location = FilePath(path)
        self.mimetype = mimetype
        if urlbase[-1] != '/':
            urlbase += '/'
        self.url = urlbase + str(self.id)


        if parent == None:
            parent_id = -1
        else:
            parent_id = parent.get_id()

        self.item = UPnPClass(object_id, parent_id, self.get_name())
        if isinstance(self.item, Container):
            self.item.childCount = 0
        self.child_count = 0
        self.children = []


        if mimetype in ['directory','root']:
            self.update_id = 0
            self.get_url = lambda : self.url
            self.get_path = lambda : None
            #self.item.searchable = True
            #self.item.searchClass = 'object'
            if(isinstance(self.location,FilePath) and
               self.location.isdir() == True):
                self.check_for_cover_art()
                if hasattr(self, 'cover'):
                    _,ext =  os.path.splitext(self.cover)
                    """ add the cover image extension to help clients not reacting on
                        the mimetype """
                    self.item.albumArtURI = ''.join((urlbase,str(self.id),'?cover',ext))
        else:
            self.get_url = lambda : self.url

            if self.mimetype.startswith('audio/'):
                if hasattr(parent, 'cover'):
                    _,ext =  os.path.splitext(parent.cover)
                    """ add the cover image extension to help clients not reacting on
                        the mimetype """
                    self.item.albumArtURI = ''.join((urlbase,str(self.id),'?cover',ext))

            _,host_port,_,_,_ = urlsplit(urlbase)
            if host_port.find(':') != -1:
                host,port = tuple(host_port.split(':'))
            else:
                host = host_port

            try:
                size = self.location.getsize()
            except:
                size = 0

            if mimetype != 'item':
                res = Resource('file://'+ urllib.quote(self.get_path()), 'internal:%s:%s:*' % (host,self.mimetype))
                res.size = size
                self.item.res.append(res)

            if mimetype != 'item':
                res = Resource(self.url, 'http-get:*:%s:*' % self.mimetype)
            else:
                res = Resource(self.url, 'http-get:*:*:*')

            res.size = size
            self.item.res.append(res)


            """ if this item is an image and we want to add a thumbnail for it
                we have to follow these rules:

                create a new Resource object, at least a 'http-get'
                and maybe an 'internal' one too

                for an JPG this looks like that

                res = Resource(url_for_thumbnail,
                        'http-get:*:image/jpg:%s'% ';'.join(simple_dlna_tags+('DLNA.ORG_PN=JPEG_TN',)))
                res.size = size_of_thumbnail
                self.item.res.append(res)

                and for a PNG the Resource creation is like that

                res = Resource(url_for_thumbnail,
                        'http-get:*:image/png:%s'% ';'.join(simple_dlna_tags+('DLNA.ORG_PN=PNG_TN',)))

                if not hasattr(self.item, 'attachments'):
                    self.item.attachments = {}
                self.item.attachments[key] = utils.StaticFile(filename_of_thumbnail)
            """

            if self.mimetype in ('image/jpeg', 'image/png'):
                path = self.get_path()
                thumbnail = os.path.join(os.path.dirname(path),'.thumbs',os.path.basename(path))
                if os.path.exists(thumbnail):
                    mimetype,_ = mimetypes.guess_type(thumbnail, strict=False)
                    if mimetype in ('image/jpeg','image/png'):
                        if mimetype == 'image/jpeg':
                            dlna_pn = 'DLNA.ORG_PN=JPEG_TN'
                        else:
                            dlna_pn = 'DLNA.ORG_PN=PNG_TN'

                        hash_from_path = str(id(thumbnail))
                        new_res = Resource(self.url+'?attachment='+hash_from_path,
                            'http-get:*:%s:%s' % (mimetype, ';'.join(simple_dlna_tags+(dlna_pn,))))
                        new_res.size = os.path.getsize(thumbnail)
                        self.item.res.append(new_res)
                        if not hasattr(self.item, 'attachments'):
                            self.item.attachments = {}
                        self.item.attachments[hash_from_path] = utils.StaticFile(urllib.quote(thumbnail))


            try:
                # FIXME: getmtime is deprecated in Twisted 2.6
                self.item.date = datetime.fromtimestamp(self.location.getmtime())
            except:
                self.item.date = None

    def rebuild(self, urlbase):
        #print "rebuild", self.mimetype
        if self.mimetype != 'item':
            return
        #print "rebuild for", self.get_path()
        mimetype,_ = mimetypes.guess_type(self.get_path(),strict=False)
        if mimetype == None:
            return
        self.mimetype = mimetype
        #print "rebuild", self.mimetype
        UPnPClass = classChooser(self.mimetype)
        self.item = UPnPClass(self.id, self.parent.id, self.get_name())
        if hasattr(self.parent, 'cover'):
            _,ext =  os.path.splitext(self.parent.cover)
            """ add the cover image extension to help clients not reacting on
                the mimetype """
            self.item.albumArtURI = ''.join((urlbase,str(self.id),'?cover',ext))

        _,host_port,_,_,_ = urlsplit(urlbase)
        if host_port.find(':') != -1:
            host,port = tuple(host_port.split(':'))
        else:
            host = host_port

        res = Resource('file://'+urllib.quote(self.get_path()), 'internal:%s:%s:*' % (host,self.mimetype))
        try:
            res.size = self.location.getsize()
        except:
            res.size = 0
        self.item.res.append(res)
        res = Resource(self.url, 'http-get:*:%s:*' % self.mimetype)

        try:
            res.size = self.location.getsize()
        except:
            res.size = 0
        self.item.res.append(res)

        try:
            # FIXME: getmtime is deprecated in Twisted 2.6
            self.item.date = datetime.fromtimestamp(self.location.getmtime())
        except:
            self.item.date = None

        self.parent.update_id += 1

    def check_for_cover_art(self):
        """ let's try to find in the current directory some jpg file,
            or png if the jpg search fails, and take the first one
            that comes around
        """
        try:
            jpgs = [i.path for i in self.location.children() if i.splitext()[1] in ('.jpg', '.JPG')]
            try:
                self.cover = jpgs[0]
            except IndexError:
                pngs = [i.path for i in self.location.children() if i.splitext()[1] in ('.png', '.PNG')]
                try:
                    self.cover = pngs[0]
                except IndexError:
                    return
        except UnicodeDecodeError:
            self.warning("UnicodeDecodeError - there is something wrong with a file located in %r", self.location.path)

    def remove(self):
        #print "FSItem remove", self.id, self.get_name(), self.parent
        if self.parent:
            self.parent.remove_child(self)
        del self.item

    def add_child(self, child, update=False):
        self.children.append(child)
        self.child_count += 1
        if isinstance(self.item, Container):
            self.item.childCount += 1
        if update == True:
            self.update_id += 1

    def remove_child(self, child):
        #print "remove_from %d (%s) child %d (%s)" % (self.id, self.get_name(), child.id, child.get_name())
        if child in self.children:
            self.child_count -= 1
            if isinstance(self.item, Container):
                self.item.childCount -= 1
            self.children.remove(child)
            self.update_id += 1

    def get_children(self,start=0,request_count=0):
        if request_count == 0:
            return self.children[start:]
        else:
            return self.children[start:request_count]

    def get_child_count(self):
        return self.child_count

    def get_id(self):
        return self.id

    def get_update_id(self):
        if hasattr(self, 'update_id'):
            return self.update_id
        else:
            return None

    def get_path(self):
        if isinstance( self.location,FilePath):
            return self.location.path
        else:
            self.location

    def set_path(self,path=None,extension=None):
        if path is None:
            path = self.get_path()
        if extension is not None:
            path,old_ext = os.path.splitext(path)
            path = ''.join((path,extension))
        if isinstance( self.location,FilePath):
            self.location = FilePath(path)
        else:
            self.location = path

    def get_name(self):
        if isinstance( self.location,FilePath):
            name = self.location.basename().decode("utf-8", "replace")
        else:
            name = self.location.decode("utf-8", "replace")
        return name

    def get_cover(self):
        try:
            return self.cover
        except:
            try:
                return self.parent.cover
            except:
                return ''

    def get_parent(self):
        return self.parent

    def get_item(self):
        return self.item

    def get_xml(self):
        return self.item.toString()

    def __repr__(self):
        return 'id: ' + str(self.id) + ' @ ' + self.get_name().encode('ascii','xmlcharrefreplace')
Esempio n. 28
0
class FSItem(BackendItem):
    logCategory = 'fs_item'

    def __init__(
        self,
        object_id,
        parent,
        path,
        mimetype,
        urlbase,
        UPnPClass,
        update=False,
        store=None,
    ):
        BackendItem.__init__(self)
        self.id = object_id
        self.parent = parent
        if parent:
            parent.add_child(self, update=update)
        if mimetype == 'root':
            self.location = str(path)
        else:
            if mimetype == 'item' and path is None:
                path = os.path.join(parent.get_realpath(), str(self.id))
            # self.location = FilePath(unicode(path))
            self.location = FilePath(path)
        self.mimetype = mimetype
        if urlbase[-1] != '/':
            urlbase += '/'
        self.url = urlbase + str(self.id)

        self.store = store

        if parent is None:
            parent_id = -1
        else:
            parent_id = parent.get_id()

        self.item = UPnPClass(object_id, parent_id, self.get_name())
        if isinstance(self.item, Container):
            self.item.childCount = 0
        self.child_count = 0
        self.children = []
        self.sorted = False
        self.caption = None

        if mimetype in ['directory', 'root']:
            self.update_id = 0
            self.get_url = lambda: self.url
            # self.item.searchable = True
            # self.item.searchClass = 'object'
            if (isinstance(self.location, FilePath)
                    and self.location.isdir() is True):
                self.check_for_cover_art()
                if getattr(self, 'cover', None):
                    _, ext = os.path.splitext(self.cover)
                    ''' add the cover image extension to help clients
                        not reacting on the mimetype '''
                    self.item.albumArtURI = ''.join(
                        (urlbase, str(self.id), '?cover', str(ext)))
        else:
            self.get_url = lambda: self.url

            if self.mimetype.startswith('audio/'):
                if getattr(parent, 'cover', None):
                    _, ext = os.path.splitext(parent.cover)
                    ''' add the cover image extension to help clients
                        not reacting on the mimetype '''
                    self.item.albumArtURI = ''.join(
                        (urlbase, str(self.id), '?cover', ext))

            _, host_port, _, _, _ = urlsplit(urlbase)
            if host_port.find(':') != -1:
                host, port = tuple(host_port.split(':'))
            else:
                host = host_port

            try:
                size = self.location.getsize()
            except Exception:
                size = 0

            if (self.store.server and self.store.server.coherence.config.get(
                    'transcoding', 'no') == 'yes'):
                if self.mimetype in (
                        'application/ogg',
                        'audio/ogg',
                        'audio/x-wav',
                        'audio/x-m4a',
                        'application/x-flac',
                ):
                    new_res = Resource(
                        self.url + '/transcoded.mp3',
                        f'http-get:*:{"audio/mpeg"}:*',
                    )
                    new_res.size = None
                    # self.item.res.append(new_res)

            if mimetype != 'item':
                res = Resource(
                    'file://' + quote(self.get_path(), encoding='utf-8'),
                    f'internal:{host}:{self.mimetype}:*',
                )
                res.size = size
                self.item.res.append(res)

            if mimetype != 'item':
                res = Resource(self.url, f'http-get:*:{self.mimetype}:*')
            else:
                res = Resource(self.url, 'http-get:*:*:*')

            res.size = size
            self.item.res.append(res)
            ''' if this item is of type audio and we want to add a transcoding
                rule for it, this is the way to do it:

                create a new Resource object, at least a 'http-get'
                and maybe an 'internal' one too

                for transcoding to wav this looks like that

                res = Resource(
                    url_for_transcoded audio,
                    'http-get:*:audio/x-wav:%s'% ';'.join(
                        ['DLNA.ORG_PN=JPEG_TN']+simple_dlna_tags))
                res.size = None
                self.item.res.append(res)
            '''

            if (self.store.server and self.store.server.coherence.config.get(
                    'transcoding', 'no') == 'yes'):
                if self.mimetype in (
                        'audio/mpeg',
                        'application/ogg',
                        'audio/ogg',
                        'audio/x-wav',
                        'audio/x-m4a',
                        'audio/flac',
                        'application/x-flac',
                ):
                    dlna_pn = 'DLNA.ORG_PN=LPCM'
                    dlna_tags = simple_dlna_tags[:]
                    # dlna_tags[1] = 'DLNA.ORG_OP=00'
                    dlna_tags[2] = 'DLNA.ORG_CI=1'
                    new_res = Resource(
                        self.url + '?transcoded=lpcm',
                        f'http-get:*:{"audio/L16;rate=44100;channels=2"}:'
                        f'{";".join([dlna_pn] + dlna_tags)}',
                    )
                    new_res.size = None
                    # self.item.res.append(new_res)

                    if self.mimetype != 'audio/mpeg':
                        new_res = Resource(
                            self.url + '?transcoded=mp3',
                            f'http-get:*:{"audio/mpeg"}:*',
                        )
                        new_res.size = None
                        # self.item.res.append(new_res)
            ''' if this item is an image and we want to add a thumbnail for it
                we have to follow these rules:

                create a new Resource object, at least a 'http-get'
                and maybe an 'internal' one too

                for an JPG this looks like that

                res = Resource(url_for_thumbnail,
                        'http-get:*:image/jpg:%s'% ';'.join(
                        ['DLNA.ORG_PN=JPEG_TN']+simple_dlna_tags))
                res.size = size_of_thumbnail
                self.item.res.append(res)

                and for a PNG the Resource creation is like that

                res = Resource(url_for_thumbnail,
                        'http-get:*:image/png:%s'% ';'.join(
                        simple_dlna_tags+['DLNA.ORG_PN=PNG_TN']))

                if not hasattr(self.item, 'attachments'):
                    self.item.attachments = {}
                self.item.attachments[key] = utils.StaticFile(
                filename_of_thumbnail)
            '''

            if (self.mimetype in ('image/jpeg', 'image/png')
                    or self.mimetype.startswith('video/')):
                try:
                    filename, mimetype, dlna_pn = _find_thumbnail(
                        self.get_path())
                except NoThumbnailFound:
                    pass
                except Exception:
                    self.warning(traceback.format_exc())
                else:
                    dlna_tags = simple_dlna_tags[:]
                    dlna_tags[
                        3] = 'DLNA.ORG_FLAGS=00f00000000000000000000000000000'

                    hash_from_path = str(id(filename))
                    new_res = Resource(
                        self.url + '?attachment=' + hash_from_path,
                        f'http-get:*:{mimetype}:'
                        f'{";".join([dlna_pn] + dlna_tags)}',
                    )
                    new_res.size = os.path.getsize(filename)
                    self.item.res.append(new_res)
                    if not hasattr(self.item, 'attachments'):
                        self.item.attachments = {}
                    self.item.attachments[hash_from_path] = utils.StaticFile(
                        filename)

            if self.mimetype.startswith('video/'):
                # check for a subtitles file
                caption, _ = os.path.splitext(self.get_path())
                caption = caption + '.srt'
                if os.path.exists(caption):
                    hash_from_path = str(id(caption))
                    mimetype = 'smi/caption'
                    new_res = Resource(
                        self.url + '?attachment=' + hash_from_path,
                        f'http-get:*:{mimetype}:{"*"}',
                    )
                    new_res.size = os.path.getsize(caption)
                    self.caption = new_res.data
                    self.item.res.append(new_res)
                    if not hasattr(self.item, 'attachments'):
                        self.item.attachments = {}
                    self.item.attachments[hash_from_path] = utils.StaticFile(
                        caption,
                        defaultType=mimetype,
                    )

            try:
                # FIXME: getmtime is deprecated in Twisted 2.6
                self.item.date = datetime.fromtimestamp(
                    self.location.getmtime())
            except Exception:
                self.item.date = None

    def rebuild(self, urlbase):
        # print('rebuild', self.mimetype)
        if self.mimetype != 'item':
            return
        # print('rebuild for', self.get_path())
        mimetype, _ = mimetypes.guess_type(self.get_path(), strict=False)
        if mimetype is None:
            return
        self.mimetype = mimetype
        # print('rebuild', self.mimetype)
        UPnPClass = classChooser(self.mimetype)
        self.item = UPnPClass(self.id, self.parent.id, self.get_name())
        if getattr(self.parent, 'cover', None):
            _, ext = os.path.splitext(self.parent.cover)
            # add the cover image extension to help
            # clients not reacting on the mimetype
            self.item.albumArtURI = ''.join(
                (urlbase, str(self.id), '?cover', ext))

        _, host_port, _, _, _ = urlsplit(urlbase)
        if host_port.find(':') != -1:
            host, port = tuple(host_port.split(':'))
        else:
            host = host_port

        res = Resource(
            'file://' + quote(self.get_path()),
            f'internal:{host}:{self.mimetype}:*',
        )
        try:
            res.size = self.location.getsize()
        except Exception:
            res.size = 0
        self.item.res.append(res)
        res = Resource(self.url, f'http-get:*:{self.mimetype}:*')

        try:
            res.size = self.location.getsize()
        except Exception:
            res.size = 0
        self.item.res.append(res)

        try:
            # FIXME: getmtime is deprecated in Twisted 2.6
            self.item.date = datetime.fromtimestamp(self.location.getmtime())
        except Exception:
            self.item.date = None

        self.parent.update_id += 1

    def check_for_cover_art(self):
        ''' let's try to find in the current directory some jpg file,
            or png if the jpg search fails, and take the first one
            that comes around
        '''
        try:
            jpgs = [
                i.path for i in self.location.children()
                if i.splitext()[1] in ('.jpg', '.JPG')
            ]
            try:
                self.cover = jpgs[0]
            except IndexError:
                pngs = [
                    i.path for i in self.location.children()
                    if i.splitext()[1] in ('.png', '.PNG')
                ]
                try:
                    self.cover = pngs[0]
                except IndexError:
                    return
        except UnicodeDecodeError:
            self.warning(
                f'UnicodeDecodeError - there is something wrong with a ' +
                f'file located in {self.location.path}')

    def remove(self):
        # print('FSItem remove', self.id, self.get_name(), self.parent)
        if self.parent:
            self.parent.remove_child(self)
        del self.item

    def add_child(self, child, update=False):
        self.children.append(child)
        self.child_count += 1
        if isinstance(self.item, Container):
            self.item.childCount += 1
        if update:
            self.update_id += 1
        self.sorted = False

    def remove_child(self, child):
        # print(f'remove_from {self.id:d} ({self.get_name()}) '
        #       f'child {child.id:d} ({child.get_name()})')
        if child in self.children:
            self.child_count -= 1
            if isinstance(self.item, Container):
                self.item.childCount -= 1
            self.children.remove(child)
            self.update_id += 1
        self.sorted = False

    def get_children(self, start=0, request_count=0):
        if not self.sorted:
            self.children.sort(key=_natural_key)
            self.sorted = True
        if request_count == 0:
            return self.children[start:]
        else:
            return self.children[start:request_count]

    def get_child_count(self):
        return self.child_count

    def get_id(self):
        return self.id

    def get_update_id(self):
        if hasattr(self, 'update_id'):
            return self.update_id
        else:
            return None

    def get_path(self):
        if self.mimetype in ['directory', 'root']:
            return None
        if isinstance(self.location, FilePath):
            return self.location.path
        else:
            return self.location

    def get_realpath(self):
        if isinstance(self.location, FilePath):
            return self.location.path
        else:
            return self.location

    def set_path(self, path=None, extension=None):
        if path is None:
            path = self.get_path()
        if extension is not None:
            path, old_ext = os.path.splitext(path)
            path = ''.join((path, extension))
        if isinstance(self.location, FilePath):
            self.location = FilePath(path)
        else:
            self.location = path

    def get_name(self):
        if isinstance(self.location, FilePath):
            name = self.location.basename()
        else:
            name = self.location
        return name

    def get_cover(self):
        if self.cover:
            return self.cover
        try:
            return self.parent.cover
        except AttributeError:
            return None

    def get_parent(self):
        return self.parent

    def get_item(self):
        return self.item

    def get_xml(self):
        return self.item.toString()

    def __repr__(self):
        return ('id: ' + str(self.id) + ' @ ' +
                str(self.get_name().encode('ascii', 'xmlcharrefreplace')))
Esempio n. 29
0
def start_worker():
    global antanistaticmap
    global templates
    global pool
    global rexp
    global ports

    lc = LoopingCall(updateListsTask)
    lc.start(600)

    rexp = {
        'head': re.compile(r'(<head.*?\s*>)', re.I),
        'w2t': re.compile(r'(http.?:)?//([a-z0-9]{16}).' + config.basehost + '(?!:\d+)', re.I),
        't2w': re.compile(r'(http.?:)?//([a-z0-9]{16}).(?!' + config.basehost + ')onion(?!:\d+)', re.I)
    }

    ###############################################################################
    # Static Data loading
    #    Here we make a file caching to not handle I/O
    #    at run-time and achieve better performance
    ###############################################################################
    antanistaticmap = {}

    # system default static files
    sys_static_dir = os.path.join(config.sysdatadir, "static/")
    if os.path.exists(sys_static_dir):
        for root, dirs, files in os.walk(os.path.join(sys_static_dir)):
            for basename in files:
                filename = os.path.join(root, basename)
                f = FilePath(filename)
                antanistaticmap[filename.replace(sys_static_dir, "")] = f.getContent()

    # user defined static files
    usr_static_dir = os.path.join(config.datadir, "static/")
    if usr_static_dir != sys_static_dir and os.path.exists(usr_static_dir):
        for root, dirs, files in os.walk(os.path.join(usr_static_dir)):
            for basename in files:
                filename = os.path.join(root, basename)
                f = FilePath(filename)
                antanistaticmap[filename.replace(usr_static_dir, "")] = f.getContent()
    ###############################################################################

    ###############################################################################
    # Templates loading
    #    Here we make a templates cache in order to not handle I/O
    #    at run-time and achieve better performance
    ###############################################################################
    templates = {}

    # system default templates
    sys_tpl_dir = os.path.join(config.sysdatadir, "templates/")
    if os.path.exists(sys_tpl_dir):
        files = FilePath(sys_tpl_dir).globChildren("*.tpl")
        for f in files:
            f = FilePath(config.t2w_file_path(os.path.join('templates', f.basename())))
            templates[f.basename()] = PageTemplate(XMLString(f.getContent()))

    # user defined templates
    usr_tpl_dir = os.path.join(config.datadir, "templates/")
    if usr_tpl_dir != sys_tpl_dir and os.path.exists(usr_tpl_dir):
        files = FilePath(usr_tpl_dir).globChildren("*.tpl")
        for f in files:
            f = FilePath(config.t2w_file_path(os.path.join('templates', f.basename())))
            templates[f.basename()] = PageTemplate(XMLString(f.getContent()))
    ###############################################################################

    pool = HTTPConnectionPool(reactor, True,
                              config.sockmaxpersistentperhost,
                              config.sockcachedconnectiontimeout,
                              config.sockretryautomatically)

    factory = T2WProxyFactory()

    # we do not want all workers to die in the same moment
    requests_countdown = config.requests_per_process / random.randint(1, 3)

    factory = T2WLimitedRequestsFactory(factory, requests_countdown)

    context_factory = T2WSSLContextFactory(os.path.join(config.datadir, "certs/tor2web-key.pem"),
                                                       os.path.join(config.datadir, "certs/tor2web-intermediate.pem"),
                                                       os.path.join(config.datadir, "certs/tor2web-dh.pem"),
                                                       config.cipher_list)

    fds_https = []
    if  'T2W_FDS_HTTPS' in os.environ:
        fds_https = filter(None, os.environ['T2W_FDS_HTTPS'].split(","))
        fds_https = [int(i) for i in fds_https]

    fds_http = []
    if  'T2W_FDS_HTTP' in os.environ:
        fds_http = filter(None, os.environ['T2W_FDS_HTTP'].split(","))
        fds_http = [int(i) for i in fds_http]


    reactor.listenTCPonExistingFD = listenTCPonExistingFD
    reactor.listenSSLonExistingFD = listenSSLonExistingFD

    for fd in fds_https:
        ports.append(reactor.listenSSLonExistingFD(reactor,
                                                   fd=fd,
                                                   factory=factory,
                                                   contextFactory=context_factory))

    for fd in fds_http:
        ports.append(reactor.listenTCPonExistingFD(reactor,
                                                   fd=fd,
                                                   factory=factory))

    def MailException(etype, value, tb):
        sendexceptionmail(config, etype, value, tb)

    sys.excepthook = MailException