Ejemplo n.º 1
0
def fakeWSGIERRApp(envir, responder):
    codedict = envir.get('CODE_DICT', {})
    body = envir.get('PATH_INFO')
    code = codedict.get(envir.get('PATH_INFO'), 200)
    if envir.get('PATH_INFO') not in codedict:
        body = bodydict.get(envir.get('PATH_INFO'))
    else:
        body = 'Complete Utter Failure'
    response = HTTP(code, body)
    retval = response.to(responder)
    return retval
Ejemplo n.º 2
0
	def render_GET(self, req):
		# No archive no service
		if not os.path.isdir(self._sdsPath):
			msg = "SDS archive not found: %s" % self._sdsPath
			return HTTP.renderErrorPage(request, http.SERVICE_UNAVAILABLE, msg)

		# Parse and validate GET parameters
		try:
			ro = _DataSelectRequestOptions(req.args)
			ro.parse()
		except ValueError, e:
			Logging.warning(str(e))
			return HTTP.renderErrorPage(req, http.BAD_REQUEST, str(e))
Ejemplo n.º 3
0
	def resumeProducing(self):
		rec = None

		try: rec = self.rsInput.next()
		except Exception, e: Logging.warning("%s" % str(e))

		if self.written == 0:
			# read first record to test if any data exists at all
			if not rec:
				msg = "no waveform data found"
				data = HTTP.renderErrorPage(self.req, http.NO_CONTENT, msg, self.ro)
				if data:
					self.req.write(data)
				self.req.unregisterProducer()
				self.req.finish()
				return

			self.req.setHeader('Content-Type', 'application/vnd.fdsn.mseed')
			self.req.setHeader('Content-Disposition', "attachment; " \
			                   "filename=%s" % self.fileName)

		if not rec:
			self.req.unregisterProducer()
			Logging.debug("%s: returned %i bytes of mseed data" % (
			               self.ro.service, self.written))
			utils.accessLog(self.req, self.ro, http.OK, self.written, None)
			self.req.finish()
			return

		data = rec.raw().str()
		self.req.write(data)
		self.written += len(data)
Ejemplo n.º 4
0
	def _processRequest(self, req, ro, dbq, exp):
		if req._disconnected:
			return False

		DataModel.PublicObject.SetRegistrationEnabled(False)

		# query event(s)
		ep = DataModel.EventParameters()
		if ro.eventIDs:
			for eID in ro.eventIDs:
				event = dbq.getEventByPublicID(eID)
				event = DataModel.Event.Cast(event)
				if event:
					ep.add(event)
		else:
			self._findEvents(ep, ro, dbq)

		if ep.eventCount() == 0:
			msg = "no matching events found"
			utils.writeTS(req,
			              HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro))
			return False

		Logging.debug("events found: %i" % ep.eventCount())

		if ro.format == 'csv' or not exp:
			req.setHeader('Content-Type', 'text/plain')
		else:
			req.setHeader('Content-Type', 'application/xml')

		if exp:
			return self._processRequestExp(req, ro, dbq, exp, ep)

		return self._processRequestText(req, ro, dbq, ep)
Ejemplo n.º 5
0
    def open_epub(cls, url, content=None):
        """Cracks open an EPUB to expose its contents

        :param url: A url representing the EPUB, only used for errors and in
            the absence of the `content` parameter
        :param content: A string representing the compressed EPUB

        :return: A tuple containing a ZipFile of the EPUB and the path to its
        package
        """
        if not (url or content):
            raise ValueError("Cannot open epub without url or content")
        if url and not content:
            # Get the epub from the url if no content has been made available.
            content = HTTP.get_with_timeout(url).content
        content = StringIO(content)

        with ZipFile(content) as zip_file:
            if not cls.CONTAINER_FILE in zip_file.namelist():
                raise ValueError("Invalid EPUB file, not modifying: %s" % url)

            with zip_file.open(cls.CONTAINER_FILE) as container_file:
                container = container_file.read()
                rootfiles_element = etree.fromstring(container).find("{urn:oasis:names:tc:opendocument:xmlns:container}rootfiles")

                if rootfiles_element is None:
                    raise ValueError("Invalid EPUB file, not modifying: %s" % url)

                rootfile_element = rootfiles_element.find("{urn:oasis:names:tc:opendocument:xmlns:container}rootfile")
                if rootfile_element is None:
                    raise ValueError("Invalid EPUB file, not modifying: %s" % url)

                package_document_path = rootfile_element.get('full-path')
            yield zip_file, package_document_path
Ejemplo n.º 6
0
 def __init__(self, vulnerability, threads=0):
     self.vulnerability = vulnerability
     self.host = vulnerability['host']
     self.session = HTTP()
     self.output_folder = "output/{}/".format(self.host.host)
     self.output_git = self.output_folder + ".git/"
     self.threads = threads
Ejemplo n.º 7
0
	def resumeProducing(self):
		rec = None

		try: rec = self.rsInput.next()
		except Exception, e: Logging.warning("%s" % str(e)) # e.g. ArchiveException

		if not self.initialized:
			self.initialized = True
			# read first record to test if any data exists at all

			if not rec:
				msg = "No waveform data found"
				self.req.write(HTTP.renderErrorPage(self.req, http.NOT_FOUND, msg))
				self.req.unregisterProducer()
				self.req.finish()
				return

			self.req.setHeader("Content-Type", "application/vnd.fdsn.mseed")
			self.req.setHeader("Content-Disposition", "attachment; filename=%s" % \
			                   self.fileName)

		if not rec:
			self.req.unregisterProducer()
			self.req.finish()
			return

		self.req.write(rec.raw().str())
Ejemplo n.º 8
0
	def render_GET(self, req):
		# Parse and validate GET parameters
		ro = _EventRequestOptions(req.args)
		try:
			ro.parse()
		except ValueError, e:
			Logging.warning(str(e))
			return HTTP.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
Ejemplo n.º 9
0
	def render_POST(self, req):
		# Parse and validate POST parameters
		ro = _StationRequestOptions()
		try:
			ro.parsePOST(req.content)
			ro.parse()
		except ValueError, e:
			Logging.warning(str(e))
			return HTTP.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
Ejemplo n.º 10
0
def checkObjects(req, objCount, maxObj):
	if objCount > maxObj:
		msg = "The result set of your request exceeds the configured maximum " \
		      "number of objects (%i). Refine your request parameters." % maxObj
		writeTS(req, HTTP.renderErrorPage(req, http.REQUEST_ENTITY_TOO_LARGE,
		                                  msg))
		return False

	return True
Ejemplo n.º 11
0
	def _processRequest(self, req, ro, dbq, exp):
		if req._disconnected:
			return False

		DataModel.PublicObject.SetRegistrationEnabled(False)

		# query event(s)
		ep = DataModel.EventParameters()
		if ro.eventIDs:
			for eID in ro.eventIDs:
				obj = dbq.getEventByPublicID(eID)
				e = DataModel.Event.Cast(obj)
				if not e:
					continue

				if self._eventTypeWhitelist or self._eventTypeBlacklist:
					eType = None
					try: eType = DataModel.EEventTypeNames_name(e.type())
					except ValueException: pass
					if self._eventTypeWhitelist and \
					   not eType in self._eventTypeWhitelist: continue
					if self._eventTypeBlacklist and \
					   eType in self._eventTypeBlacklist: continue

				if self._evaluationMode is not None:
					obj = dbq.getObject(DataModel.Origin.TypeInfo(),
					                    e.preferredOriginID())
					o = DataModel.Origin.Cast(obj)
					try:
						if o is None or \
						   o.evaluationMode() != self._evaluationMode:
							continue
					except ValueException:
						continue

				ep.add(e)
		else:
			self._findEvents(ep, ro, dbq)

		if ep.eventCount() == 0:
			msg = "no matching events found"
			data = HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro)
			if data:
				utils.writeTS(req, data)
			return True

		Logging.debug("events found: %i" % ep.eventCount())

		if ro.format == 'csv' or not exp:
			req.setHeader('Content-Type', 'text/plain')
		else:
			req.setHeader('Content-Type', 'application/xml')

		if exp:
			return self._processRequestExp(req, ro, dbq, exp, ep)

		return self._processRequestText(req, ro, dbq, ep)
Ejemplo n.º 12
0
	def render_GET(self, req):
		# Parse and validate GET parameters
		ro = _StationRequestOptions(req.args)
		try:
			ro.parse()
			# the GET operation supports exactly one stream filter
			ro.streams.append(ro)
		except ValueError, e:
			Logging.warning(str(e))
			return HTTP.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
Ejemplo n.º 13
0
	def render_POST(self, req):
		# Parse and validate POST parameters
		ro = _DataSelectRequestOptions()
		ro.userName = self.__user and self.__user.get('mail')
		try:
			ro.parsePOST(req.content)
			ro.parse()
		except ValueError, e:
			Logging.warning(str(e))
			return HTTP.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
Ejemplo n.º 14
0
	def render_GET(self, req):
		# Parse and validate POST parameters
		ro = _DataSelectRequestOptions(req.args)
		ro.userName = self.__user and self.__user.get('mail')
		try:
			ro.parse()
			# the GET operation supports exactly one stream filter
			ro.streams.append(ro)
		except ValueError, e:
			Logging.warning(str(e))
			return HTTP.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)
Ejemplo n.º 15
0
	def _finish(self):
		if self.written == 0:
			msg = "no waveform data found"
			HTTP.renderErrorPage(self.req, http.NO_CONTENT, msg, self.ro)

			if self.tracker:
				self.tracker.volume_status("fdsnws", "NODATA", 0, "")
				self.tracker.request_status("END", "")

		else:
			Logging.debug("%s: returned %i bytes of mseed data" % (
			              self.ro.service, self.written))
			utils.accessLog(self.req, self.ro, http.OK, self.written, None)

			if self.tracker:
				self.tracker.volume_status("fdsnws", "OK", self.written, "")
				self.tracker.request_status("END", "")


		self.req.unregisterProducer()
		self.req.finish()
Ejemplo n.º 16
0
	def _prepareRequest(self, req, ro):
		if ro.availability:
			msg = "including of availability information not supported"
			return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro)

		if ro.updatedAfter:
			msg = "filtering based on update time not supported"
			return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro)

		if ro.matchTimeSeries:
			msg = "filtering based on available time series not supported"
			return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro)

		# Exporter, 'None' is used for text output
		if ro.format in ro.VText:
			if ro.includeRes:
				msg = "response level output not available in text format"
				return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE,
				       msg, ro)
			req.setHeader('Content-Type', 'text/plain')
			d = deferToThread(self._processRequestText, req, ro)
		else:
			exp = Exporter.Create(ro.Exporters[ro.format])
			if exp is None:
				msg = "output format '%s' no available, export module '%s' " \
				      "could not be loaded." % (
				      ro.format, ro.Exporters[ro.format])
				return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE,
				                            msg, ro)

			req.setHeader('Content-Type', 'application/xml')
			exp.setFormattedOutput(bool(ro.formatted))
			d = deferToThread(self._processRequestExp, req, ro, exp)

		# Process request in separate thread
		d.addCallback(utils.onRequestServed, req)
		d.addErrback(utils.onRequestError, req)

		# The request is handled by the deferred object
		return server.NOT_DONE_YET
Ejemplo n.º 17
0
class RipperSVN(object):
    def __init__(self, vulnerability, threads=0):
        self.vulnerability = vulnerability
        self.host = vulnerability['host']
        self.session = HTTP()
        self.output_folder = "output/{}/".format(self.host.host)
        self.threads = threads


    def parse_wc(self):
        self.session.get_file("", self.output_folder + "wc.db", with_data=self.vulnerability['data'])
        print "Wrote wc.db to disk"

        conn = sqlite3.connect(self.output_folder + 'wc.db')
        c = conn.cursor()
        files = []

        for row in c.execute('select local_relpath, checksum from NODES'):
            try:
                path = ".svn/pristine/" + row[1][6:8] + "/" + row[1][6:] + ".svn-base"
                url = self.host.replace(path=path)
                filename = row[0]

                if not os.path.exists(self.output_folder + filename):
                    files.append((url, self.output_folder + filename))

            except:
                pass


        if self.threads:
            with concurrent.futures.ThreadPoolExecutor(max_workers=self.threads) as executor:
                for file in files:
                    executor.submit(self.session.get_file, file[0], file[1])
        else:
            self.session.get_file(file[0], file[1])
Ejemplo n.º 18
0
	def _prepareRequest(self, req, ro):
		if ro.availability and not self._daEnabled:
			msg = "including of availability information not supported"
			return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

		if ro.updatedAfter:
			msg = "filtering based on update time not supported"
			return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

		if ro.matchTimeSeries and not self._daEnabled:
			msg = "filtering based on available time series not supported"
			return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

		# load data availability if requested
		dac = None
		if ro.availability or ro.matchTimeSeries:
			dac = Application.Instance().getDACache()
			if dac is None or len(dac.extents()) == 0:
				msg = "no data availabiltiy extent information found"
				return HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro)

		# Exporter, 'None' is used for text output
		if ro.format in ro.VText:
			if ro.includeRes:
				msg = "response level output not available in text format"
				return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro)
			req.setHeader('Content-Type', 'text/plain')
			d = deferToThread(self._processRequestText, req, ro, dac)
		else:
			exp = Exporter.Create(ro.Exporters[ro.format])
			if exp is None:
				msg = "output format '%s' no available, export module '%s' " \
				      "could not be loaded." % (
				      ro.format, ro.Exporters[ro.format])
				return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

			req.setHeader('Content-Type', 'application/xml')
			exp.setFormattedOutput(bool(ro.formatted))
			d = deferToThread(self._processRequestExp, req, ro, exp, dac)

		req.notifyFinish().addErrback(utils.onCancel, d)
		d.addBoth(utils.onFinish, req)

		# The request is handled by the deferred object
		return server.NOT_DONE_YET
Ejemplo n.º 19
0
def run_controller_in(controller, function, environment):
    """
    runs the controller.function() (for the app specified by 
    the current folder)  in the environment. 
    it tries precompiled controller_function.pyc first.
    """

    # if compiled should run compiled!

    folder = environment['request'].folder
    path = os.path.join(folder, 'compiled/')
    if os.path.exists(path):
        filename = os.path.join(
            path, 'controllers_%s_%s.pyc' % (controller, function))
        if not os.path.exists(filename):
            raise HTTP(400,
                       error_message_custom % 'invalid function',
                       web2py_error='invalid function')
        restricted(read_pyc(filename), environment, layer=filename)
    elif function == '_TEST':
        filename = os.path.join(folder, 'controllers/%s.py' % controller)
        if not os.path.exists(filename):
            raise HTTP(400,
                       error_message_custom % 'invalid controller',
                       web2py_error='invalid controller')
        environment['__symbols__'] = environment.keys()
        code = open(filename, 'r').read()
        code += TEST_CODE
        restricted(code, environment, layer=filename)
    else:
        filename = os.path.join(folder, 'controllers/%s.py' % controller)
        if not os.path.exists(filename):
            raise HTTP(400,
                       error_message_custom % 'invalid controller',
                       web2py_error='invalid controller')
        code = open(filename, 'r').read()
        exposed = regex_expose.findall(code)
        if not function in exposed:
            raise HTTP(400,
                       error_message_custom % 'invalid function',
                       web2py_error='invalid function')
        code = '''%s

response._vars=response._caller(%s)
''' % (code, function)
        if is_gae:
            layer = filename + ':' + function
            code = getcfs(layer, filename, lambda : \
                          compile(code.replace('\r\n', '\n'), layer,
                          'exec'))
        restricted(code, environment, filename)
    response = environment['response']
    if response.postprocessing:
        for p in response.postprocessing:
            response._vars = p(response._vars)
    if type(response._vars) == types.StringType:
        response.body = response._vars
    elif type(response._vars) == types.GeneratorType:
        response.body = response._vars
    elif type(response._vars) != types.DictType:
        response.body = str(response._vars)
Ejemplo n.º 20
0
    def _processRequest(self, req, ro):

        if ro.quality != 'B' and ro.quality != 'M':
            msg = "quality other than 'B' or 'M' not supported"
            return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro)

        if ro.minimumLength:
            msg = "enforcing of minimum record length not supported"
            return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro)

        if ro.longestOnly:
            msg = "limitation to longest segment not supported"
            return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro)

        app = Application.Instance()
        ro._checkTimes(app._realtimeGap)

        maxSamples = None
        if app._samplesM is not None:
            maxSamples = app._samplesM * 1000000
            samples = 0

        app = Application.Instance()
        if app._trackdbEnabled:
            userid = ro.userName or app._trackdbDefaultUser
            reqid = 'ws' + str(int(round(time.time() * 1000) - 1420070400000))
            xff = req.requestHeaders.getRawHeaders("x-forwarded-for")
            if xff:
                userIP = xff[0].split(",")[0].strip()
            else:
                userIP = req.getClientIP()

            tracker = RequestTrackerDB("fdsnws", app.connection(), reqid,
                                       "WAVEFORM", userid,
                                       "REQUEST WAVEFORM " + reqid, "fdsnws",
                                       userIP, req.getClientIP())

        else:
            tracker = None

        # Open record stream
        rs = _MyRecordStream(self._rsURL, tracker, self.__bufferSize)

        # Add request streams
        # iterate over inventory networks
        for s in ro.streams:
            for net in self._networkIter(s):
                for sta in self._stationIter(net, s):
                    for loc in self._locationIter(sta, s):
                        for cha in self._streamIter(loc, s):
                            try:
                                start_time = max(cha.start(), s.time.start)

                            except Exception:
                                start_time = s.time.start

                            try:
                                end_time = min(cha.end(), s.time.end)

                            except Exception:
                                end_time = s.time.end

                            if utils.isRestricted(cha) and \
                                (not self.__user or (self.__access and
                                 not self.__access.authorize(self.__user,
                                     net.code(), sta.code(), loc.code(),
                                     cha.code(), start_time, end_time))):
                                continue

                            # enforce maximum sample per request restriction
                            if maxSamples is not None:
                                try:
                                    n = cha.sampleRateNumerator()
                                    d = cha.sampleRateDenominator()
                                except ValueError:
                                    msg = "skipping stream without sampling " \
                                          "rate definition: %s.%s.%s.%s" % (
                                          net.code(), sta.code(), loc.code(),
                                          cha.code())
                                    Logging.warning(msg)
                                    continue

                                # calculate number of samples for requested
                                # time window
                                diffSec = (end_time - start_time).length()
                                samples += int(diffSec * n / d)
                                if samples > maxSamples:
                                    msg = "maximum number of %sM samples " \
                                          "exceeded" % str(app._samplesM)
                                    return HTTP.renderErrorPage(
                                        req, http.REQUEST_ENTITY_TOO_LARGE,
                                        msg, ro)

                            Logging.debug("adding stream: %s.%s.%s.%s %s - %s" \
                                          % (net.code(), sta.code(), loc.code(),
                                             cha.code(), start_time.iso(),
                                             end_time.iso()))
                            rs.addStream(net.code(), sta.code(), loc.code(),
                                         cha.code(), start_time, end_time,
                                         utils.isRestricted(cha),
                                         sta.archiveNetworkCode())

        # Build output filename
        fileName = Application.Instance()._fileNamePrefix.replace(
            "%time", time.strftime('%Y-%m-%dT%H:%M:%S')) + '.mseed'

        # Create producer for async IO
        prod = _WaveformProducer(req, ro, rs, fileName, tracker)
        req.registerProducer(prod, True)
        prod.resumeProducing()

        # The request is handled by the deferred object
        return server.NOT_DONE_YET
Ejemplo n.º 21
0
 def search_by_keyword(cls, keyword, count=15, start=0):
     url = cls.keyword_url.format(keyword, count, start)
     result = HTTP.get(url)
     return result
Ejemplo n.º 22
0
def run_view_in(environment):
    """
    Executes the view for the requested action.
    The view is the one specified in `response.view` or determined by the url
    or `view/generic.extension`
    It tries the pre-compiled views_controller_function.pyc before compiling it.
    """

    request = environment['request']
    response = environment['response']
    folder = request.folder
    path = os.path.join(folder, 'compiled')
    badv = 'invalid view (%s)' % response.view
    patterns = response.generic_patterns or []
    regex = re.compile('|'.join(fnmatch.translate(r) for r in patterns))
    short_action = '%(controller)s/%(function)s.%(extension)s' % request
    allow_generic = patterns and regex.search(short_action)
    if not isinstance(response.view, str):
        ccode = parse_template(response.view,
                               os.path.join(folder, 'views'),
                               context=environment)
        restricted(ccode, environment, 'file stream')
    elif os.path.exists(path):
        x = response.view.replace('/', '_')
        files = ['views_%s.pyc' % x]
        if allow_generic:
            files.append('views_generic.%s.pyc' % request.extension)
        # for backward compatibility
        if request.extension == 'html':
            files.append('views_%s.pyc' % x[:-5])
            if allow_generic:
                files.append('views_generic.pyc')
        # end backward compatibility code
        for f in files:
            filename = os.path.join(path, f)
            if os.path.exists(filename):
                code = read_pyc(filename)
                restricted(code, environment, layer=filename)
                return
        raise HTTP(404,
                   rewrite.thread.routes.error_message % badv,
                   web2py_error=badv)
    else:
        filename = os.path.join(folder, 'views', response.view)
        if not os.path.exists(filename) and allow_generic:
            response.view = 'generic.' + request.extension
            filename = os.path.join(folder, 'views', response.view)
        if not os.path.exists(filename):
            raise HTTP(404,
                       rewrite.thread.routes.error_message % badv,
                       web2py_error=badv)
        layer = filename
        if is_gae:
            ccode = getcfs(
                layer, filename, lambda: compile2(
                    parse_template(response.view,
                                   os.path.join(folder, 'views'),
                                   context=environment), layer))
        else:
            ccode = parse_template(response.view,
                                   os.path.join(folder, 'views'),
                                   context=environment)
        restricted(ccode, environment, layer)
Ejemplo n.º 23
0
def fakeWSGIOutApp(envir, responder):
    args = envir.get('PATH_INFO').split('/')
    response = HTTP(envir.get('ERROR_CODE', 200),
                    URL(a=args[1], c=args[2], f=args[3], args=args[4:]))
    retval = response.to(responder)
    return retval
Ejemplo n.º 24
0
def regex_url_in(request, environ):
    "rewrite and parse incoming URL"

    # ##################################################
    # select application
    # rewrite URL if routes_in is defined
    # update request.env
    # ##################################################

    regex_select(env=environ, request=request)

    if thread.routes.routes_in:
        environ = regex_filter_in(environ)

    for (key, value) in environ.items():
        request.env[key.lower().replace('.', '_')] = value

    path = request.env.path_info.replace('\\', '/')

    # ##################################################
    # serve if a static file
    # ##################################################

    match = regex_static.match(regex_space.sub('_', path))
    if match and match.group('x'):
        static_file = os.path.join(request.env.applications_parent,
                                   'applications', match.group('b'), 'static',
                                   match.group('x'))
        return (static_file, environ)

    # ##################################################
    # parse application, controller and function
    # ##################################################

    path = re.sub('%20', ' ', path)
    match = regex_url.match(path)
    if not match or match.group('c') == 'static':
        raise HTTP(400,
                   thread.routes.error_message % 'invalid request',
                   web2py_error='invalid path')

    request.application = \
        regex_space.sub('_', match.group('a') or thread.routes.default_application)
    request.controller = \
        regex_space.sub('_', match.group('c') or thread.routes.default_controller)
    request.function = \
        regex_space.sub('_', match.group('f') or thread.routes.default_function)
    group_e = match.group('e')
    request.raw_extension = group_e and regex_space.sub('_', group_e) or None
    request.extension = request.raw_extension or 'html'
    request.raw_args = match.group('r')
    request.args = List([])
    if request.application in thread.routes.routes_apps_raw:
        # application is responsible for parsing args
        request.args = None
    elif request.raw_args:
        match = regex_args.match(request.raw_args.replace(' ', '_'))
        if match:
            group_s = match.group('s')
            request.args = \
                List((group_s and group_s.split('/')) or [])
            if request.args and request.args[-1] == '':
                request.args.pop()  # adjust for trailing empty arg
        else:
            raise HTTP(400,
                       thread.routes.error_message % 'invalid request',
                       web2py_error='invalid path (args)')
    return (None, environ)
Ejemplo n.º 25
0
def parse_get_post_vars(request, environ):

    # always parse variables in URL for GET, POST, PUT, DELETE, etc. in get_vars
    env = request.env
    dget = cgi.parse_qsl(env.query_string or '', keep_blank_values=1)
    for (key, value) in dget:
        if key in request.get_vars:
            if isinstance(request.get_vars[key], list):
                request.get_vars[key] += [value]
            else:
                request.get_vars[key] = [request.get_vars[key]] + [value]
        else:
            request.get_vars[key] = value
        request.vars[key] = request.get_vars[key]

    # parse POST variables on POST, PUT, BOTH only in post_vars
    try:
        request.body = body = copystream_progress(request)
    except IOError:
        raise HTTP(400, "Bad Request - HTTP body is incomplete")
    if (body and env.request_method in ('POST', 'PUT', 'BOTH')):
        dpost = cgi.FieldStorage(fp=body, environ=environ, keep_blank_values=1)
        # The same detection used by FieldStorage to detect multipart POSTs
        is_multipart = dpost.type[:10] == 'multipart/'
        body.seek(0)
        isle25 = sys.version_info[1] <= 5

        def listify(a):
            return (not isinstance(a, list) and [a]) or a
        try:
            keys = sorted(dpost)
        except TypeError:
            keys = []
        for key in keys:
            if key is None:
                continue  # not sure why cgi.FieldStorage returns None key
            dpk = dpost[key]
            # if en element is not a file replace it with its value else leave it alone
            if isinstance(dpk, list):
                value = []
                for _dpk in dpk:
                    if not _dpk.filename:
                        value.append(_dpk.value)
                    else:
                        value.append(_dpk)
            elif not dpk.filename:
                value = dpk.value
            else:
                value = dpk
            pvalue = listify(value)
            if key in request.vars:
                gvalue = listify(request.vars[key])
                if isle25:
                    value = pvalue + gvalue
                elif is_multipart:
                    pvalue = pvalue[len(gvalue):]
                else:
                    pvalue = pvalue[:-len(gvalue)]
            request.vars[key] = value
            if len(pvalue):
                request.post_vars[key] = (len(pvalue) >
                                          1 and pvalue) or pvalue[0]
Ejemplo n.º 26
0
	def _processRequest(self, req, ro):

		if ro.quality != 'B' and ro.quality != 'M':
			msg = "quality other than 'B' or 'M' not supported"
			return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

		if ro.minimumLength:
			msg = "enforcing of minimum record length not supported"
			return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

		if ro.longestOnly:
			msg = "limitation to longest segment not supported"
			return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

		app = Application.Instance()
		ro._checkTimes(app._realtimeGap)

		maxSamples = None
		if app._samplesM is not None:
			maxSamples = app._samplesM * 1000000
			samples = 0

		app = Application.Instance()
		if app._trackdbEnabled:
			userid = ro.userName or app._trackdbDefaultUser
			reqid = 'ws' + str(int(round(time.time() * 1000) - 1420070400000))
			xff = req.requestHeaders.getRawHeaders("x-forwarded-for")
			if xff:
				userIP = xff[0].split(",")[0].strip()
			else:
				userIP = req.getClientIP()

			tracker = RequestTrackerDB("fdsnws", app.connection(), reqid,
			                           "WAVEFORM", userid,
			                           "REQUEST WAVEFORM " + reqid,
			                           "fdsnws", userIP, req.getClientIP())

		else:
			tracker = None

		# Open record stream
		rs = _MyRecordStream(self._rsURL, tracker, self.__bufferSize)

		forbidden = None

		# Add request streams
		# iterate over inventory networks
		for s in ro.streams:
			for net in self._networkIter(s):
				for sta in self._stationIter(net, s):
					for loc in self._locationIter(sta, s):
						for cha in self._streamIter(loc, s):
							try:
								start_time = max(cha.start(), s.time.start)

							except Exception:
								start_time = s.time.start

							try:
								end_time = min(cha.end(), s.time.end)

							except Exception:
								end_time = s.time.end

							if utils.isRestricted(cha) and \
							    (not self.__user or (self.__access and
							     not self.__access.authorize(self.__user,
							         net.code(), sta.code(), loc.code(),
							         cha.code(), start_time, end_time))):

								if tracker:
									net_class = 't' if net.code()[0] in "0123456789XYZ" else 'p'
									tracker.line_status(start_time, end_time,
									    net.code(), sta.code(), cha.code(), loc.code(),
									    True, net_class, True, [],
									    "fdsnws", "DENIED", 0, "")

								forbidden = forbidden or (forbidden is None)
								continue

							forbidden = False

							# enforce maximum sample per request restriction
							if maxSamples is not None:
								try:
									n = cha.sampleRateNumerator()
									d = cha.sampleRateDenominator()
								except ValueError:
									msg = "skipping stream without sampling " \
									      "rate definition: %s.%s.%s.%s" % (
									      net.code(), sta.code(), loc.code(),
									      cha.code())
									Logging.warning(msg)
									continue

								# calculate number of samples for requested
								# time window
								diffSec = (end_time - start_time).length()
								samples += int(diffSec * n / d)
								if samples > maxSamples:
									msg = "maximum number of %sM samples " \
									      "exceeded" % str(app._samplesM)
									return HTTP.renderErrorPage(req,
									       http.REQUEST_ENTITY_TOO_LARGE, msg,
									       ro)

							Logging.debug("adding stream: %s.%s.%s.%s %s - %s" \
							              % (net.code(), sta.code(), loc.code(),
							                 cha.code(), start_time.iso(),
							                 end_time.iso()))
							rs.addStream(net.code(), sta.code(), loc.code(),
							             cha.code(), start_time, end_time,
							             utils.isRestricted(cha),
							             sta.archiveNetworkCode())

		if forbidden:
			if tracker:
				tracker.volume_status("fdsnws", "NODATA", 0, "")
				tracker.request_status("END", "")

			msg = "access denied"
			return HTTP.renderErrorPage(req, http.FORBIDDEN, msg, ro)

		elif forbidden is None:
			if tracker:
				tracker.volume_status("fdsnws", "NODATA", 0, "")
				tracker.request_status("END", "")

			msg = "no metadata found"
			return HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro)

		# Build output filename
		fileName = Application.Instance()._fileNamePrefix.replace("%time",
		           time.strftime('%Y-%m-%dT%H:%M:%S')) + '.mseed'

		# Create producer for async IO
		prod = _WaveformProducer(req, ro, rs, fileName, tracker)
		req.registerProducer(prod, True)
		prod.resumeProducing()

		# The request is handled by the deferred object
		return server.NOT_DONE_YET
Ejemplo n.º 27
0
def run_controller_in(controller, function, environment):
    """
    Runs the controller.function() (for the app specified by
    the current folder).
    It tries pre-compiled controller_function.pyc first before compiling it.
    """

    # if compiled should run compiled!

    folder = environment['request'].folder
    path = os.path.join(folder, 'compiled')
    badc = 'invalid controller (%s/%s)' % (controller, function)
    badf = 'invalid function (%s/%s)' % (controller, function)
    if os.path.exists(path):
        filename = os.path.join(
            path, 'controllers_%s_%s.pyc' % (controller, function))
        if not os.path.exists(filename):
            raise HTTP(404,
                       rewrite.thread.routes.error_message % badf,
                       web2py_error=badf)
        restricted(read_pyc(filename), environment, layer=filename)
    elif function == '_TEST':
        filename = os.path.join(folder, 'controllers/%s.py' % controller)
        if not os.path.exists(filename):
            raise HTTP(404,
                       rewrite.thread.routes.error_message % badc,
                       web2py_error=badc)
        environment['__symbols__'] = environment.keys()
        fp = open(filename, 'r')
        code = fp.read()
        fp.close()
        code += TEST_CODE
        restricted(code, environment, layer=filename)
    else:
        filename = os.path.join(folder, 'controllers/%s.py' % controller)
        if not os.path.exists(filename):
            raise HTTP(404,
                       rewrite.thread.routes.error_message % badc,
                       web2py_error=badc)
        fp = open(filename, 'r')
        code = fp.read()
        fp.close()
        exposed = regex_expose.findall(code)
        if not function in exposed:
            raise HTTP(404,
                       rewrite.thread.routes.error_message % badf,
                       web2py_error=badf)
        code = "%s\nresponse._vars=response._caller(%s)\n" % (code, function)
        if is_gae:
            layer = filename + ':' + function
            code = getcfs(layer, filename, lambda: compile2(code, layer))
        restricted(code, environment, filename)
    response = environment['response']
    vars = response._vars
    if response.postprocessing:
        for p in response.postprocessing:
            vars = p(vars)
    if isinstance(vars, unicode):
        vars = vars.encode('utf8')
    if hasattr(vars, 'xml'):
        vars = vars.xml()
    return vars
Ejemplo n.º 28
0
def run_view_in(environment):
    """
    Executes the view for the requested action.
    The view is the one specified in `response.view` or determined by the url
    or `view/generic.extension`
    It tries the pre-compiled views_controller_function.pyc before compiling it.
    """

    request = environment['request']
    response = environment['response']
    folder = request.folder
    path = os.path.join(folder, 'compiled')
    badv = 'invalid view (%s)' % response.view
    if not isinstance(response.view, str):
        ccode = parse_template(response.view,
                               os.path.join(folder, 'views'),
                               context=environment)
        restricted(ccode, environment, 'file stream')
    elif os.path.exists(path):
        x = response.view.replace('/', '_')
        if request.extension == 'html':
            # for backward compatibility
            files = [
                os.path.join(path, 'views_%s.pyc' % x),
                os.path.join(path, 'views_%s.pyc' % x[:-5]),
                os.path.join(path, 'views_generic.html.pyc'),
                os.path.join(path, 'views_generic.pyc')
            ]
        else:
            files = [
                os.path.join(path, 'views_%s.pyc' % x),
                os.path.join(path, 'views_generic.%s.pyc' % request.extension)
            ]
        for filename in files:
            if os.path.exists(filename):
                code = read_pyc(filename)
                restricted(code, environment, layer=filename)
                return
        raise HTTP(404,
                   rewrite.thread.routes.error_message % badv,
                   web2py_error=badv)
    else:
        filename = os.path.join(folder, 'views', response.view)
        if not os.path.exists(filename):
            response.view = 'generic.' + request.extension
        filename = os.path.join(folder, 'views', response.view)
        if not os.path.exists(filename):
            raise HTTP(404,
                       rewrite.thread.routes.error_message % badv,
                       web2py_error=badv)
        layer = filename
        if is_gae:
            ccode = getcfs(
                layer, filename, lambda: compile2(
                    parse_template(response.view,
                                   os.path.join(folder, 'views'),
                                   context=environment), layer))
        else:
            ccode = parse_template(response.view,
                                   os.path.join(folder, 'views'),
                                   context=environment)
        restricted(ccode, environment, layer)
Ejemplo n.º 29
0
class ConnectionManager(object):

    min_server_version = "10.1.0"
    server_version = min_server_version
    user = {}
    server_id = None
    timeout = 10

    def __init__(self, client):

        LOG.debug("ConnectionManager initializing...")

        self.client = client
        self.config = client.config
        self.credentials = Credentials()

        self.http = HTTP(client)

    def clear_data(self):

        LOG.info("connection manager clearing data")

        self.user = None
        credentials = self.credentials.get_credentials()
        credentials['Servers'] = list()
        self.credentials.get_credentials(credentials)

        self.config.auth(None, None)

    def revoke_token(self):

        LOG.info("revoking token")

        self['server']['AccessToken'] = None
        self.credentials.get_credentials(self.credentials.get_credentials())

        self.config.data['auth.token'] = None

    def get_available_servers(self):

        LOG.info("Begin getAvailableServers")

        # Clone the credentials
        credentials = self.credentials.get_credentials()
        found_servers = self._find_servers(self._server_discovery())

        if not found_servers and not credentials['Servers']:  # back out right away, no point in continuing
            LOG.info("Found no servers")
            return list()

        servers = list(credentials['Servers'])
        self._merge_servers(servers, found_servers)

        try:
            servers.sort(key=lambda x: datetime.strptime(x['DateLastAccessed'], "%Y-%m-%dT%H:%M:%SZ"), reverse=True)
        except TypeError:
            servers.sort(key=lambda x: datetime(*(time.strptime(x['DateLastAccessed'], "%Y-%m-%dT%H:%M:%SZ")[0:6])), reverse=True)

        credentials['Servers'] = servers
        self.credentials.get_credentials(credentials)

        return servers

    def login(self, server, username, password=None, clear=True, options={}):

        if not username:
            raise AttributeError("username cannot be empty")

        if not server:
            raise AttributeError("server cannot be empty")

        try:
            request = {
                'type': "POST",
                'url': self.get_jellyfin_url(server, "Users/AuthenticateByName"),
                'json': {
                    'Username': username,
                    'Pw': password or ""
                }
            }

            result = self._request_url(request, False)
        except Exception as error:  # Failed to login
            LOG.exception(error)
            return False
        else:
            self._on_authenticated(result, options)

        return result

    def connect_to_address(self, address, options={}):

        if not address:
            return False

        address = self._normalize_address(address)

        try:
            public_info = self._try_connect(address, options=options)
            LOG.info("connectToAddress %s succeeded", address)
            server = {
                'address': address,
            }
            self._update_server_info(server, public_info)
            server = self.connect_to_server(server, options)
            if server is False:
                LOG.error("connectToAddress %s failed", address)
                return { 'State': CONNECTION_STATE['Unavailable'] }

            return server
        except Exception as error:
            LOG.exception(error)
            LOG.error("connectToAddress %s failed", address)
            return { 'State': CONNECTION_STATE['Unavailable'] }
           

    def connect_to_server(self, server, options={}):

        LOG.info("begin connectToServer")
        timeout = self.timeout

        try:
            result = self._try_connect(server['address'], timeout, options)
            LOG.info("calling onSuccessfulConnection with server %s", server.get('Name'))
            credentials = self.credentials.get_credentials()
            return self._after_connect_validated(server, credentials, result, True, options)

        except Exception as e:
            LOG.info("Failing server connection. ERROR msg: {}".format(e))
            return { 'State': CONNECTION_STATE['Unavailable'] }            

    def connect(self, options={}):
        LOG.info("Begin connect")
        return self._connect_to_servers(self.get_available_servers(), options)

    def jellyfin_user_id(self):
        return self.get_server_info(self.server_id)['UserId']

    def jellyfin_token(self):
        return self.get_server_info(self.server_id)['AccessToken']

    def get_server_info(self, server_id):

        if server_id is None:
            LOG.info("server_id is empty")
            return {}

        servers = self.credentials.get_credentials()['Servers']

        for server in servers:
            if server['Id'] == server_id:
                return server

    def get_public_users(self):
        return self.client.jellyfin.get_public_users()

    def get_jellyfin_url(self, base, handler):
        return "%s/emby/%s" % (base, handler)

    def _request_url(self, request, headers=True):

        request['timeout'] = request.get('timeout') or self.timeout
        if headers:
            self._get_headers(request)

        try:
            return self.http.request(request)
        except Exception as error:
            LOG.exception(error)
            raise

    def _add_app_info(self):
        return "%s/%s" % (self.config.data['app.name'], self.config.data['app.version'])

    def _get_headers(self, request):

        headers = request.setdefault('headers', {})

        if request.get('dataType') == "json":
            headers['Accept'] = "application/json"
            request.pop('dataType')

        headers['X-Application'] = self._add_app_info()
        headers['Content-type'] = request.get(
            'contentType',
            'application/x-www-form-urlencoded; charset=UTF-8'
        )

    def _connect_to_servers(self, servers, options):

        LOG.info("Begin connectToServers, with %s servers", len(servers))
        result = {}

        if len(servers) == 1:
            result = self.connect_to_server(servers[0], options)
            LOG.debug("resolving connectToServers with result['State']: %s", result)

            return result

        first_server = self._get_last_used_server()
        # See if we have any saved credentials and can auto sign in
        if first_server is not None and first_server['DateLastAccessed'] != "2001-01-01T00:00:00Z":
            result = self.connect_to_server(first_server, options)

            if result['State'] in (CONNECTION_STATE['SignedIn'], CONNECTION_STATE['Unavailable']):
                return result

        # Return loaded credentials if exists
        credentials = self.credentials.get_credentials()

        return {
            'Servers': servers,
            'State': result.get('State') or CONNECTION_STATE['ServerSelection'],
        }

    def _try_connect(self, url, timeout=None, options={}):

        url = self.get_jellyfin_url(url, "system/info/public")
        LOG.info("tryConnect url: %s", url)

        return self._request_url({
            'type': "GET",
            'url': url,
            'dataType': "json",
            'timeout': timeout,
            'verify': options.get('ssl'),
            'retry': False
        })

    def _server_discovery(self):

        MULTI_GROUP = ("<broadcast>", 7359)
        MESSAGE = "who is JellyfinServer?"

        sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        sock.settimeout(1.0)  # This controls the socket.timeout exception

        sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 20)
        sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
        sock.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_LOOP, 1)
        sock.setsockopt(socket.IPPROTO_IP, socket.SO_REUSEADDR, 1)

        LOG.debug("MultiGroup      : %s", str(MULTI_GROUP))
        LOG.debug("Sending UDP Data: %s", MESSAGE)

        servers = []

        try:
            sock.sendto(MESSAGE, MULTI_GROUP)
        except Exception as error:
            LOG.exception(error)
            return servers

        while True:
            try:
                data, addr = sock.recvfrom(1024)  # buffer size
                servers.append(json.loads(data))

            except socket.timeout:
                LOG.info("Found Servers: %s", servers)
                return servers

            except Exception as e:
                LOG.exception("Error trying to find servers: %s", e)
                return servers

    def _get_last_used_server(self):

        servers = self.credentials.get_credentials()['Servers']

        if not len(servers):
            return

        try:
            servers.sort(key=lambda x: datetime.strptime(x['DateLastAccessed'], "%Y-%m-%dT%H:%M:%SZ"), reverse=True)
        except TypeError:
            servers.sort(key=lambda x: datetime(*(time.strptime(x['DateLastAccessed'], "%Y-%m-%dT%H:%M:%SZ")[0:6])), reverse=True)

        return servers[0]

    def _merge_servers(self, list1, list2):

        for i in range(0, len(list2), 1):
            try:
                self.credentials.add_update_server(list1, list2[i])
            except KeyError:
                continue

        return list1

    def _find_servers(self, found_servers):

        servers = []

        for found_server in found_servers:

            server = self._convert_endpoint_address_to_manual_address(found_server)

            info = {
                'Id': found_server['Id'],
                'address': server or found_server['Address'],
                'Name': found_server['Name']
            } 

            servers.append(info)
        else:
            return servers

    # TODO: Make IPv6 compatable
    def _convert_endpoint_address_to_manual_address(self, info):

        if info.get('Address') and info.get('EndpointAddress'):
            address = info['EndpointAddress'].split(':')[0]

            # Determine the port, if any
            parts = info['Address'].split(':')
            if len(parts) > 1:
                port_string = parts[len(parts) - 1]

                try:
                    address += ":%s" % int(port_string)
                    return self._normalize_address(address)
                except ValueError:
                    pass

        return None

    def _normalize_address(self, address):
        # TODO: Try HTTPS first, then HTTP if that fails.
        if '://' not in address:
            address = 'http://' + address

        # Attempt to correct bad input
        url = urllib3.util.parse_url(address.strip())

        if url.scheme is None:
            url = url._replace(scheme='http')

        if url.scheme == 'http' and url.port == 80:
            url = url._replace(port=None)

        if url.scheme == 'https' and url.port == 443:
            url = url._replace(port=None)

        return url.url

    def _save_user_info_into_credentials(self, server, user):

        info = {
            'Id': user['Id'],
            'IsSignedInOffline': True
        }
        self.credentials.add_update_user(server, info)

    def _after_connect_validated(self, server, credentials, system_info, verify_authentication, options):
        if options.get('enableAutoLogin') is False:

            self.config.data['auth.user_id'] = server.pop('UserId', None)
            self.config.data['auth.token'] = server.pop('AccessToken', None)

        elif verify_authentication and server.get('AccessToken'):
            if self._validate_authentication(server, options) is not False:

                self.config.data['auth.user_id'] = server['UserId']
                self.config.data['auth.token'] = server['AccessToken']
                return self._after_connect_validated(server, credentials, system_info, False, options)

            return { 'State': CONNECTION_STATE['Unavailable'] }

        self._update_server_info(server, system_info)
        self.server_version = system_info['Version']

        if options.get('updateDateLastAccessed') is not False:
            server['DateLastAccessed'] = datetime.now().strftime('%Y-%m-%dT%H:%M:%SZ')

        self.credentials.add_update_server(credentials['Servers'], server)
        self.credentials.get_credentials(credentials)
        self.server_id = server['Id']

        # Update configs
        self.config.data['auth.server'] = server['address']
        self.config.data['auth.server-name'] = server['Name']
        self.config.data['auth.server=id'] = server['Id']
        self.config.data['auth.ssl'] = options.get('ssl', self.config.data['auth.ssl'])

        result = {
            'Servers': [server]
        }

        result['State'] = CONNECTION_STATE['SignedIn'] if server.get('AccessToken') else CONNECTION_STATE['ServerSignIn']
        # Connected
        return result

    def _validate_authentication(self, server, options={}):

        try:
            system_info = self._request_url({
                'type': "GET",
                'url': self.get_jellyfin_url(server['address'], "System/Info"),
                'verify': options.get('ssl'),
                'dataType': "json",
                'headers': {
                    'X-MediaBrowser-Token': server['AccessToken']
                }
            })
            self._update_server_info(server, system_info)
        except Exception as error:
            LOG.exception(error)

            server['UserId'] = None
            server['AccessToken'] = None

            return False

    def _update_server_info(self, server, system_info):

        if server is None or system_info is None:
            return

        server['Name'] = system_info['ServerName']
        server['Id'] = system_info['Id']

        if system_info.get('address'):
            server['address'] = system_info['address']

        ## Finish updating server info 

    def _on_authenticated(self, result, options={}):

        credentials = self.credentials.get_credentials()

        self.config.data['auth.user_id'] = result['User']['Id']
        self.config.data['auth.token'] = result['AccessToken']

        for server in credentials['Servers']:
            if server['Id'] == result['ServerId']:
                found_server = server
                break
        else:
            return  # No server found

        if options.get('updateDateLastAccessed') is not False:
            found_server['DateLastAccessed'] = datetime.now().strftime('%Y-%m-%dT%H:%M:%SZ')

        found_server['UserId'] = result['User']['Id']
        found_server['AccessToken'] = result['AccessToken']

        self.credentials.add_update_server(credentials['Servers'], found_server)
        self._save_user_info_into_credentials(found_server, result['User'])
        self.credentials.get_credentials(credentials)
Ejemplo n.º 30
0
	def _processRequestExp(self, req, ro, dbq, exp, ep):
		objCount = ep.eventCount()
		maxObj = Application.Instance()._queryObjects

		if not HTTP.checkObjects(req, objCount, maxObj):
			return False

		pickIDs = set()
		if ro.picks is None:
			ro.picks = True

		# add related information
		for iEvent in xrange(ep.eventCount()):
			if req._disconnected:
				return False
			e = ep.event(iEvent)
			if self._hideAuthor:
				self._removeAuthor(e)

			originIDs = set()
			magIDs = set()
			magIDs.add(e.preferredMagnitudeID())

			# eventDescriptions and comments
			objCount += dbq.loadEventDescriptions(e)
			if ro.comments:
				objCount += self._loadComments(dbq, e)
			if not HTTP.checkObjects(req, objCount, maxObj):
				return False

			# origin references: either all or preferred only
			dbIter = dbq.getObjects(e, DataModel.OriginReference.TypeInfo())
			for obj in dbIter:
				oRef = DataModel.OriginReference.Cast(obj)
				if oRef is None:
					continue
				if ro.allOrigins:
					e.add(oRef)
					originIDs.add(oRef.originID())
				elif oRef.originID() == e.preferredOriginID():
					e.add(oRef)
					originIDs.add(oRef.originID())
					dbIter.close()

			objCount += e.originReferenceCount()

			# focalMechanism references: either none, preferred only or all
			if ro.fm or ro.allFMs:
				dbIter = dbq.getObjects(e, DataModel.FocalMechanismReference.TypeInfo())
				for obj in dbIter:
					fmRef = DataModel.FocalMechanismReference.Cast(obj)
					if fmRef is None:
						continue
					if ro.allFMs:
						e.add(fmRef)
					elif fmRef.focalMechanismID() == e.preferredFocalMechanismID():
						e.add(fmRef)
						dbIter.close()

			objCount += e.focalMechanismReferenceCount()

			if not HTTP.checkObjects(req, objCount, maxObj):
				return False

			# focal mechanisms: process before origins to add derived origin to
			# originID list since it may be missing from origin reference list
			for iFMRef in xrange(e.focalMechanismReferenceCount()):
				if req._disconnected:
					return False
				fmID = e.focalMechanismReference(iFMRef).focalMechanismID()
				obj = dbq.getObject(DataModel.FocalMechanism.TypeInfo(), fmID)
				fm = DataModel.FocalMechanism.Cast(obj)
				if fm is None:
					continue

				ep.add(fm)
				objCount += 1
				if self._hideAuthor:
					self._removeAuthor(fm)

				# comments
				if ro.comments:
					objCount += self._loadComments(dbq, fm)

				# momentTensors
				objCount += dbq.loadMomentTensors(fm)

				if not HTTP.checkObjects(req, objCount, maxObj):
					return False

				for iMT in xrange(fm.momentTensorCount()):
					mt = fm.momentTensor(iMT)

					originIDs.add(mt.derivedOriginID())
					magIDs.add(mt.momentMagnitudeID())

					if self._hideAuthor:
						self._removeAuthor(mt)

					if ro.comments:
						for iMT in xrange(fm.momentTensorCount()):
							objCount += self._loadComments(dbq, mt)

					objCount += dbq.loadDataUseds(mt);
					objCount += dbq.loadMomentTensorPhaseSettings(mt);
					if ro.staMTs:
						objCount += dbq.loadMomentTensorStationContributions(mt);
						for iStaMT in xrange(mt.momentTensorStationContributionCount()):
							objCount += dbq.load(mt.momentTensorStationContribution(iStaMT))

					if not HTTP.checkObjects(req, objCount, maxObj):
						return False

			# find ID of origin containing preferred Magnitude
			if e.preferredMagnitudeID():
				obj = dbq.getObject(DataModel.Magnitude.TypeInfo(),
				                    e.preferredMagnitudeID())
				m = DataModel.Magnitude.Cast(obj)
				if m is not None:
					oID = dbq.parentPublicID(m)
					if oID:
						originIDs.add(oID)

			# origins
			for oID in originIDs:
				if req._disconnected:
					return False
				obj = dbq.getObject(DataModel.Origin.TypeInfo(), oID)
				o = DataModel.Origin.Cast(obj)
				if o is None:
					continue

				ep.add(o)
				objCount += 1
				if self._hideAuthor:
					self._removeAuthor(o)

				# comments
				if ro.comments:
					objCount += self._loadComments(dbq, o)
				if not HTTP.checkObjects(req, objCount, maxObj):
					return False

				# magnitudes
				dbIter = dbq.getObjects(oID, DataModel.Magnitude.TypeInfo())
				for obj in dbIter:
					mag = DataModel.Magnitude.Cast(obj)
					if mag is None:
						continue
					if ro.allMags:
						o.add(mag)
					elif mag.publicID() in magIDs:
						o.add(mag)
						dbIter.close()

					if self._hideAuthor:
						self._removeAuthor(mag)

				objCount += o.magnitudeCount()
				if ro.comments:
					for iMag in xrange(o.magnitudeCount()):
						objCount += self._loadComments(dbq, o.magnitude(iMag))
				if not HTTP.checkObjects(req, objCount, maxObj):
					return False

				# TODO station magnitudes, amplitudes
				# - added pick id for each pick referenced by amplitude

				# arrivals
				if ro.arrivals:
					objCount += dbq.loadArrivals(o)
					if self._removeAuthor:
						for iArrival in xrange(o.arrivalCount()):
							self._removeAuthor(o.arrival(iArrival))

					# collect pick IDs if requested
					if ro.picks:
						for iArrival in xrange(o.arrivalCount()):
							pickIDs.add(o.arrival(iArrival).pickID())

				if not HTTP.checkObjects(req, objCount, maxObj):
					return False

		# picks
		if pickIDs:
			objCount += len(pickIDs)
			if not HTTP.checkObjects(req, objCount, maxObj):
				return False

			for pickID in pickIDs:
				obj = dbq.getObject(DataModel.Pick.TypeInfo(), pickID)
				pick = DataModel.Pick.Cast(obj)
				if pick is not None:
					if self._hideAuthor:
						self._removeAuthor(pick)
					if ro.comments:
						objCount += self._loadComments(dbq, pick)
					ep.add(pick)
				if not HTTP.checkObjects(req, objCount, maxObj):
					return False

		# write response
		sink = utils.Sink(req)
		if not exp.write(sink, ep):
			return False
		Logging.debug("%s: returned %i events and %i origins (total " \
		               "objects/bytes: %i/%i)" % (ro.service, ep.eventCount(),
		               ep.originCount(), objCount, sink.written))
		utils.accessLog(req, ro, http.OK, sink.written, None)
		return True
Ejemplo n.º 31
0
class FDSNEvent(resource.Resource):
	isLeaf = True

	#---------------------------------------------------------------------------
	def __init__(self, hideAuthor = False, evaluationMode = None,
	             eventTypeWhitelist = None, eventTypeBlacklist = None,
	             formatList = None):
		self._hideAuthor = hideAuthor
		self._evaluationMode = evaluationMode
		self._eventTypeWhitelist = eventTypeWhitelist
		self._eventTypeBlacklist = eventTypeBlacklist
		self._formatList = formatList


	#---------------------------------------------------------------------------
	def render_OPTIONS(self, req):
		req.setHeader('Access-Control-Allow-Origin', '*')
		req.setHeader('Access-Control-Allow-Methods', 'GET, POST, OPTIONS')
		req.setHeader('Access-Control-Allow-Headers',
                      'Accept, Content-Type, X-Requested-With, Origin')
		req.setHeader('Content-Type', 'text/plain')
		return ""


	#---------------------------------------------------------------------------
	def render_GET(self, req):
		# Parse and validate GET parameters
		ro = _EventRequestOptions(req.args)
		try:
			ro.parse()
		except ValueError, e:
			Logging.warning(str(e))
			return HTTP.renderErrorPage(req, http.BAD_REQUEST, str(e), ro)

		# Catalog filter is not supported
		if ro.catalogs:
			msg = "catalog filter not supported"
			return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

		# updateafter not implemented
		if ro.updatedAfter:
			msg = "filtering based on update time not supported"
			return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

		if self._formatList is not None and ro.format not in self._formatList:
			msg = "output format '%s' not available" % ro.format
			return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

		# Exporter, 'None' is used for text output
		if ro.format in ro.VText:
			exp = None
		else:
			exp = Exporter.Create(ro.Exporters[ro.format])
			if exp:
				exp.setFormattedOutput(bool(ro.formatted))
			else:
				msg = "output format '%s' not available, export module '%s' could " \
				      "not be loaded." % (ro.format, ro.Exporters[ro.format])
				return HTTP.renderErrorPage(req, http.BAD_REQUEST, msg, ro)

		# Create database query
		db = DatabaseInterface.Open(Application.Instance().databaseURI())
		if db is None:
			msg = "could not connect to database: %s" % dbq.errorMsg()
			return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro)

		dbq = DataModel.DatabaseQuery(db)

		# Process request in separate thread
		d = deferToThread(self._processRequest, req, ro, dbq, exp)
		req.notifyFinish().addErrback(utils.onCancel, d)
		d.addBoth(utils.onFinish, req)

		# The request is handled by the deferred object
		return server.NOT_DONE_YET
Ejemplo n.º 32
0
def parse_get_post_vars(request, environ):

    # always parse variables in URL for GET, POST, PUT, DELETE, etc. in get_vars
    env = request.env
    dget = cgi.parse_qsl(env.query_string or '', keep_blank_values=1)
    for (key, value) in dget:
        if key in request.get_vars:
            if isinstance(request.get_vars[key], list):
                request.get_vars[key] += [value]
            else:
                request.get_vars[key] = [request.get_vars[key]] + [value]
        else:
            request.get_vars[key] = value
        request.vars[key] = request.get_vars[key]

    try:
        request.body = body = copystream_progress(request)
    except IOError:
        raise HTTP(400, "Bad Request - HTTP body is incomplete")

    #if content-type is application/json, we must read the body
    is_json = env.get('http_content_type', '')[:16] == 'application/json'

    if is_json:
        try:
            json_vars = sj.load(body)
            body.seek(0)
        except:
            # incoherent request bodies can still be parsed "ad-hoc"
            json_vars = {}
            pass
        # update vars and get_vars with what was posted as json
        request.get_vars.update(json_vars)
        request.vars.update(json_vars)

    # parse POST variables on POST, PUT, BOTH only in post_vars
    if (body and env.request_method in ('POST', 'PUT', 'DELETE', 'BOTH')):
        dpost = cgi.FieldStorage(fp=body, environ=environ, keep_blank_values=1)
        # The same detection used by FieldStorage to detect multipart POSTs
        is_multipart = dpost.type[:10] == 'multipart/'
        body.seek(0)

        def listify(a):
            return (not isinstance(a, list) and [a]) or a

        try:
            keys = sorted(dpost)
        except TypeError:
            keys = []
        for key in keys:
            if key is None:
                continue  # not sure why cgi.FieldStorage returns None key
            dpk = dpost[key]
            # if en element is not a file replace it with its value else leave it alone
            if isinstance(dpk, list):
                value = []
                for _dpk in dpk:
                    if not _dpk.filename:
                        value.append(_dpk.value)
                    else:
                        value.append(_dpk)
            elif not dpk.filename:
                value = dpk.value
            else:
                value = dpk
            pvalue = listify(value)
            if key in request.vars:
                gvalue = listify(request.vars[key])
                if ISLE25:
                    value = pvalue + gvalue
                elif is_multipart:
                    pvalue = pvalue[len(gvalue):]
                else:
                    pvalue = pvalue[:-len(gvalue)]
            request.vars[key] = value
            if len(pvalue):
                request.post_vars[key] = (len(pvalue) > 1
                                          and pvalue) or pvalue[0]
        if is_json:
            # update post_vars with what was posted as json
            request.post_vars.update(json_vars)
Ejemplo n.º 33
0
# _*_ coding: utf-8 _*_
__author__ = '54h50m'
__date__ = '2018/7/12 23:16'

from http import HTTP

class YuShuBook:
    isbn_url = 'http://t.yushu.im/v2/book/isbn/{}'
    keyword_url = 'http://t.yushu.im/v2/book/search?q={}&count={}&start={}'
    def search_by_isbn(self,isbn):
        url = YuShuBook.isbn_url.format(isbn)
        result = HTTP.get(url)
        # dict
        return result


    def search_by_keyword(self,url):
        pass
Ejemplo n.º 34
0
 def search_by_keyword(cls, keyword):
     url = cls.KEYWORD_URL.format(keyword)
     result = HTTP.get(url)
     return result
Ejemplo n.º 35
0
	def _processRequest(self, req, ro):

		if ro.quality != 'B' and ro.quality != 'M':
			msg = "quality other than 'B' or 'M' not supported"
			return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro)

		if ro.minimumLength:
			msg = "enforcing of minimum record length not supported"
			return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro)

		if ro.longestOnly:
			msg = "limitation to longest segment not supported"
			return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro)

		app = Application.Instance()
		ro._checkTimes(app._realtimeGap)

		# Open record stream
		rs = RecordStream.Open(self._rsURL)
		if rs is None:
			msg = "could not open record stream"
			return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro)

		maxSamples = None
		if app._samplesM is not None:
			maxSamples = app._samplesM * 1000000
			samples = 0

		# Add request streams
		# iterate over inventory networks
		for s in ro.streams:
			for net in self._networkIter(s):
				if ro.userName is None and utils.isRestricted(net):
					continue
				for sta in self._stationIter(net, s):
					if ro.userName is None and utils.isRestricted(sta):
						continue
					for loc in self._locationIter(sta, s):
						for cha in self._streamIter(loc, s):
							# enforce maximum sample per request restriction
							if maxSamples is not None:
								try:
									n = cha.sampleRateNumerator()
									d = cha.sampleRateDenominator()
								except ValueException:
									msg = "skipping stream without sampling " \
									      "rate definition: %s.%s.%s.%s" % (
									      net.code(), sta.code(), loc.code(),
									      cha.code())
									Logging.warning(msg)
									continue

								# calculate number of samples for requested
								# time window
								diffSec = (s.time.end - s.time.start).length()
								samples += int(diffSec * n / d)
								if samples > maxSamples:
									msg = "maximum number of %sM samples " \
									      "exceeded" % str(app._samplesM)
									return HTTP.renderErrorPage(req,
									       http.REQUEST_ENTITY_TOO_LARGE, msg,
									       ro)

							Logging.debug("adding stream: %s.%s.%s.%s %s - %s" \
							              % (net.code(), sta.code(), loc.code(),
							                 cha.code(), s.time.start.iso(),
							                 s.time.end.iso()))
							rs.addStream(net.code(), sta.code(), loc.code(),
							             cha.code(), s.time.start, s.time.end)

		# Build output filename
		fileName = Application.Instance()._fileNamePrefix+'.mseed'

		# Create producer for async IO
		req.registerProducer(_WaveformProducer(req, ro, rs, fileName), False)

		# The request is handled by the deferred object
		return server.NOT_DONE_YET
Ejemplo n.º 36
0
	def _processRequest(self, req, ro, dbq, exp):
		if req._disconnected:
			return False

		DataModel.PublicObject.SetRegistrationEnabled(False)
		maxObj = Application.Instance()._queryObjects

		# query event(s)
		ep = DataModel.EventParameters()
		if ro.eventIDs:
			for eID in ro.eventIDs:
				event = dbq.getEventByPublicID(eID)
				event = DataModel.Event.Cast(event)
				if event:
					ep.add(event)
		else:
			self._findEvents(ep, ro, dbq)

		if ep.eventCount() == 0:
			msg = "No matching events found"
			utils.writeTS(req,
			              HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro))
			return False

		objCount = ep.eventCount()
		Logging.debug("events found: %i" % objCount)
		if not HTTP.checkObjects(req, objCount, maxObj): return False

		pickIDs = set()

		# add related information
		for iEvent in xrange(ep.eventCount()):
			e = ep.event(iEvent)
			eID = e.publicID()

			# eventDescriptions and comments
			objCount += dbq.loadEventDescriptions(e)
			if ro.comments:
				objCount += dbq.loadComments(e)
			if not HTTP.checkObjects(req, objCount, maxObj): return False

			# origin references: either all or preferred only
			dbIter = dbq.getObjects(e, DataModel.OriginReference.TypeInfo())
			for obj in dbIter:
				oRef = DataModel.OriginReference.Cast(obj)
				if oRef is None:
					continue
				if ro.allOrigins:
					e.add(oRef)
				elif oRef.originID() == e.preferredOriginID():
					e.add(oRef)
					dbIter.close()
			objCount += e.originReferenceCount()
			# TODO: load FocalMechanismReferences???
			if not HTTP.checkObjects(req, objCount, maxObj): return False

			# origins
			for iORef in xrange(e.originReferenceCount()):
				oID = e.originReference(iORef).originID()
				obj = dbq.getObject(DataModel.Origin.TypeInfo(), oID)
				o = DataModel.Origin.Cast(obj)
				if o is None:
					continue

				ep.add(o)
				objCount += 1

				# comments
				if ro.comments:
					objCount += dbq.loadComments(o)
				if not HTTP.checkObjects(req, objCount, maxObj): return False

				# magnitudes
				dbIter = dbq.getObjects(oID, DataModel.Magnitude.TypeInfo())
				for obj in dbIter:
					mag = DataModel.Magnitude.Cast(obj)
					if mag is None:
						continue
					if ro.allMags:
						o.add(mag)
					elif mag.publicID() == e.preferredMagnitudeID():
						o.add(mag)
						dbIter.close()
				objCount += o.magnitudeCount()
				if ro.comments:
					for iMag in xrange(o.magnitudeCount()):
						objCount += dbq.loadComments(o.magnitude(iMag))
				if not HTTP.checkObjects(req, objCount, maxObj): return False

				# arrivals
				if ro.arrivals:
					objCount += dbq.loadArrivals(o)

					# collect pick IDs if requested
					if ro.picks:
						for iArrival in xrange(o.arrivalCount()):
							pickIDs.add(o.arrival(iArrival).pickID())

				if not HTTP.checkObjects(req, objCount, maxObj): return False

		# picks
		if pickIDs:
			objCount += len(pickIDs)
			if not HTTP.checkObjects(req, objCount, maxObj): return False
			for pickID in pickIDs:
				obj = dbq.getObject(DataModel.Pick.TypeInfo(), pickID)
				pick = DataModel.Pick.Cast(obj)
				if pick is not None:
					ep.add(pick)


		if ro.output == "csv":
			req.setHeader("Content-Type", "text/plain")
		else:
			req.setHeader("Content-Type", "application/xml")
		sink = utils.Sink(req)
		if not exp.write(sink, ep):
			return False

		Logging.notice("%s: returned %i events and %i origins (total " \
		               "objects/bytes: %i/%i)" % (ro.service, ep.eventCount(),
		               ep.originCount(), objCount, sink.written))
		utils.accessLog(req, ro, http.OK, sink.written, None)
		return True
Ejemplo n.º 37
0
    def _processRequest(self, req, ro, dbq, exp):
        if req._disconnected:
            return False

        DataModel.PublicObject.SetRegistrationEnabled(False)

        # query event(s)
        ep = DataModel.EventParameters()
        if ro.eventIDs:
            for eID in ro.eventIDs:
                obj = dbq.getEventByPublicID(eID)
                e = DataModel.Event.Cast(obj)
                if not e:
                    continue

                if self._eventTypeWhitelist or self._eventTypeBlacklist:
                    eType = None
                    try:
                        eType = DataModel.EEventTypeNames_name(e.type())
                    except ValueError:
                        pass
                    if self._eventTypeWhitelist and \
                       not eType in self._eventTypeWhitelist:
                        continue
                    if self._eventTypeBlacklist and \
                       eType in self._eventTypeBlacklist:
                        continue

                if self._evaluationMode is not None:
                    obj = dbq.getObject(DataModel.Origin.TypeInfo(),
                                        e.preferredOriginID())
                    o = DataModel.Origin.Cast(obj)
                    try:
                        if o is None or \
                           o.evaluationMode() != self._evaluationMode:
                            continue
                    except ValueError:
                        continue

                ep.add(e)
        else:
            self._findEvents(ep, ro, dbq)

        if ep.eventCount() == 0:
            msg = "no matching events found"
            data = HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro)
            if data:
                utils.writeTS(req, data)
            return True

        Logging.debug("events found: %i" % ep.eventCount())

        if ro.format == 'csv' or not exp:
            req.setHeader('Content-Type', 'text/plain')
        else:
            req.setHeader('Content-Type', 'application/xml')

        if exp:
            return self._processRequestExp(req, ro, dbq, exp, ep)

        return self._processRequestText(req, ro, dbq, ep)
Ejemplo n.º 38
0
class RipperGIT(object):

    META_FILES = ['HEAD', 'FETCH_HEAD', 'COMMIT_EDITMSG', 'ORIG_HEAD', 'config', 'packed-refs', 'objects/info/packs']

    def __init__(self, vulnerability, threads=0):
        self.vulnerability = vulnerability
        self.host = vulnerability['host']
        self.session = HTTP()
        self.output_folder = "output/{}/".format(self.host.host)
        self.output_git = self.output_folder + ".git/"
        self.threads = threads

    def get_meta_files(self):
        for meta_file in self.META_FILES:
            url = self.vulnerability['host'].replace(path = ".git/" + meta_file)
            destination = self.output_git + meta_file

            if self.session.get_file(url, destination):
                logging.debug("Fetched {}".format(url))

        self.session.get_file("", self.output_git + "index", with_data = self.vulnerability['data'])
        self.index_files = parse_index(self.output_git + "index")

        logging.debug("Writing index")

    def get_objects(self):
        objects = []

        for file in self.index_files:
            git_file_path = ".git/objects/" + file['sha1'][0:2] + "/" + file['sha1'][2:]
            path =  self.output_folder + git_file_path
            url = self.vulnerability['host'].replace(path = git_file_path)

            objects.append((url, path))

        if self.threads:
            with concurrent.futures.ThreadPoolExecutor(max_workers=self.threads) as executor:
                for object in objects:
                    executor.submit(self.session.get_file, object[0], object[1])
        else:
            for object in objects:
                self.session.get_file(object[0], object[1])


    def get_pack_files(self):
        if os.path.exists(self.output_git + "objects/info/packs"):
            f = open(self.output_git + "objects/info/packs").read()

            for pack in f.split("\n"):
                if not len(pack):
                    continue

                pack_url = self.vulnerability['host'].replace(path = ".git/objects/pack/" + pack[2:])
                pack_dest = self.output_git + "objects/pack/" + pack[2:]

                idx_url = str(pack_url).replace(".pack", ".idx")
                idx_dest = pack_dest.replace(".pack", ".idx")

                if self.session.get_file(pack_url, pack_dest):
                    logging.debug("Failed {}".format(pack_url))
                if self.session.get_file(idx_url, idx_dest):
                    logging.debug("Failed {}".format(idx_url))

    def unpack_objects(self):
        for file in self.index_files:
            object_path = self.output_git + "objects/" + file['sha1'][0:2] + "/" + file['sha1'][2:]
            file_path = self.output_folder + file['name']

            with open(object_path) as f:
                object_data = f.read()

            if not os.path.exists(os.path.dirname(file_path)):
                os.makedirs(os.path.dirname(file_path))

            unpacked_object = zlib.decompress(object_data)

            with open(file_path, "wb") as f:
                f.write(unpacked_object.split('\x00', 1)[1])

    def extract_pack_file(self):
        raise NotImplementedError("TODO")
Ejemplo n.º 39
0
                                 __/ |                
                                |___/  
          _)                              (_
         _) \ /\%/\  /\%/\  /\%/\  /\%/\ / (_
        _)  \\(0 0)  (0 0)  (0 0)  (0 0)//  (_
        )_ -- \(oo)   (oo)   (oo)  (oo)/-- _(
         )_ / /  \_,__/ \__,__/ \_,__/  \ \ _(
          )_ /   --;-- --;- --;-- --;--  \ _( 
       *.   (      ))   ()   ((     ()    )    .*
         '...(___)z z(__)z(__)z(__)z z(__)....'  

"""

exploits = {
    'ssh': SSH(),
    'http': HTTP(),
    'postgres': postgresql(),
    'ldap': LDAP()
}

def read_lines(filename):
    with open(filename, 'r') as f:
        lines = f.read_lines()
    return lines

def main():
    import argparse
    parser = argparse.ArgumentParser(description="Automate Hydra attacks against a specified network range.")
    parser.add_argument("-i", "--host_ips", metavar="<i>", type=str,
                        help="IP address range to search", dest='host_ips')
    parser.add_argument("-u", "--usernames", metavar="<u>", type=str,
Ejemplo n.º 40
0
 def search_by_isbn(cls, isbn):
     url = cls.isbn_url.format(isbn)
     result = HTTP.get(url)
     return result
Ejemplo n.º 41
0
    def _processRequestExp(self, req, ro, dbq, exp, ep):
        objCount = ep.eventCount()
        maxObj = Application.Instance()._queryObjects

        if not HTTP.checkObjects(req, objCount, maxObj):
            return False

        pickIDs = set()
        if ro.picks is None:
            ro.picks = True

        # add related information
        for iEvent in xrange(ep.eventCount()):
            if req._disconnected:
                return False
            e = ep.event(iEvent)
            if self._hideAuthor:
                self._removeAuthor(e)

            # eventDescriptions and comments
            objCount += dbq.loadEventDescriptions(e)
            if ro.comments:
                objCount += self._loadComments(dbq, e)
            if not HTTP.checkObjects(req, objCount, maxObj):
                return False

            # origin references: either all or preferred only
            dbIter = dbq.getObjects(e, DataModel.OriginReference.TypeInfo())
            for obj in dbIter:
                oRef = DataModel.OriginReference.Cast(obj)
                if oRef is None:
                    continue
                if ro.allOrigins:
                    e.add(oRef)
                elif oRef.originID() == e.preferredOriginID():
                    e.add(oRef)
                    dbIter.close()
                # TODO: if focal mechanisms are added make sure derived
                # origin is loaded

            objCount += e.originReferenceCount()

            if not HTTP.checkObjects(req, objCount, maxObj):
                return False

            # TODO: add focal mechanisms

            # origins
            for iORef in xrange(e.originReferenceCount()):
                if req._disconnected:
                    return False
                oID = e.originReference(iORef).originID()
                obj = dbq.getObject(DataModel.Origin.TypeInfo(), oID)
                o = DataModel.Origin.Cast(obj)
                if o is None:
                    continue

                ep.add(o)
                objCount += 1
                if self._hideAuthor:
                    self._removeAuthor(o)

                # comments
                if ro.comments:
                    objCount += self._loadComments(dbq, o)
                if not HTTP.checkObjects(req, objCount, maxObj):
                    return False

                # magnitudes
                dbIter = dbq.getObjects(oID, DataModel.Magnitude.TypeInfo())
                for obj in dbIter:
                    mag = DataModel.Magnitude.Cast(obj)
                    if mag is None:
                        continue
                    if ro.allMags:
                        o.add(mag)
                    elif mag.publicID() == e.preferredMagnitudeID():
                        o.add(mag)
                        dbIter.close()

                    if self._hideAuthor:
                        self._removeAuthor(mag)

                objCount += o.magnitudeCount()
                if ro.comments:
                    for iMag in xrange(o.magnitudeCount()):
                        objCount += self._loadComments(dbq, o.magnitude(iMag))
                if not HTTP.checkObjects(req, objCount, maxObj):
                    return False

                # TODO station magnitudes, amplitudes
                # - added pick id for each pick referenced by amplitude

                # arrivals
                if ro.arrivals:
                    objCount += dbq.loadArrivals(o)
                    if self._removeAuthor:
                        for iArrival in xrange(o.arrivalCount()):
                            self._removeAuthor(o.arrival(iArrival))

                    # collect pick IDs if requested
                    if ro.picks:
                        for iArrival in xrange(o.arrivalCount()):
                            pickIDs.add(o.arrival(iArrival).pickID())

                if not HTTP.checkObjects(req, objCount, maxObj):
                    return False

        # picks
        if pickIDs:
            objCount += len(pickIDs)
            if not HTTP.checkObjects(req, objCount, maxObj):
                return False

            for pickID in pickIDs:
                obj = dbq.getObject(DataModel.Pick.TypeInfo(), pickID)
                pick = DataModel.Pick.Cast(obj)
                if pick is not None:
                    if self._hideAuthor:
                        self._removeAuthor(pick)
                    if ro.comments:
                        objCount += self._loadComments(dbq, pick)
                    ep.add(pick)
                if not HTTP.checkObjects(req, objCount, maxObj):
                    return False

        # write response
        sink = utils.Sink(req)
        if not exp.write(sink, ep):
            return False
        Logging.debug("%s: returned %i events and %i origins (total " \
                       "objects/bytes: %i/%i)" % (ro.service, ep.eventCount(),
                       ep.originCount(), objCount, sink.written))
        utils.accessLog(req, ro, http.OK, sink.written, None)
        return True
Ejemplo n.º 42
0
def run_controller_in(controller, function, environment):
    """
    Runs the controller.function() (for the app specified by
    the current folder).
    It tries pre-compiled controller_function.pyc first before compiling it.
    """

    # if compiled should run compiled!

    folder = environment['request'].folder
    path = os.path.join(folder, 'compiled')
    badc = 'invalid controller (%s/%s)' % (controller, function)
    badf = 'invalid function (%s/%s)' % (controller, function)
    if os.path.exists(path):
        filename = os.path.join(
            path, 'controllers_%s_%s.pyc' % (controller, function))
        if not os.path.exists(filename):
            raise HTTP(404,
                       rewrite.thread.routes.error_message % badf,
                       web2py_error=badf)
        restricted(read_pyc(filename), environment, layer=filename)
    elif function == '_TEST':
        # TESTING: adjust the path to include site packages
        from settings import global_settings
        from admin import abspath, add_path_first
        paths = (global_settings.gluon_parent,
                 abspath('site-packages',
                         gluon=True), abspath('gluon', gluon=True), '')
        [add_path_first(path) for path in paths]
        # TESTING END

        filename = os.path.join(folder, 'controllers/%s.py' % controller)
        if not os.path.exists(filename):
            raise HTTP(404,
                       rewrite.thread.routes.error_message % badc,
                       web2py_error=badc)
        environment['__symbols__'] = environment.keys()
        code = read_file(filename)
        code += TEST_CODE
        restricted(code, environment, layer=filename)
    else:
        filename = os.path.join(folder, 'controllers/%s.py' % controller)
        if not os.path.exists(filename):
            raise HTTP(404,
                       rewrite.thread.routes.error_message % badc,
                       web2py_error=badc)
        code = read_file(filename)
        exposed = regex_expose.findall(code)
        if not function in exposed:
            raise HTTP(404,
                       rewrite.thread.routes.error_message % badf,
                       web2py_error=badf)
        code = "%s\nresponse._vars=response._caller(%s)\n" % (code, function)
        if is_gae:
            layer = filename + ':' + function
            code = getcfs(layer, filename, lambda: compile2(code, layer))
        restricted(code, environment, filename)
    response = environment['response']
    vars = response._vars
    if response.postprocessing:
        for p in response.postprocessing:
            vars = p(vars)
    if isinstance(vars, unicode):
        vars = vars.encode('utf8')
    if hasattr(vars, 'xml'):
        vars = vars.xml()
    return vars
Ejemplo n.º 43
0
							              "%s|%s|%s|%s|%s|%s\n" % (
							                  net.code(), sta.code(),
							                  loc.code(), stream.code(), lat,
							                  lon, elev, depth, azi, dip, desc,
							                  scale, scaleFreq, scaleUnit, sr,
							                  start, end)))

		# sort lines and append to final data string
		lines.sort(key = lambda line: line[0])
		for line in lines:
			data += line[1]

		# Return 204 if no matching inventory was found
		if len(lines) == 0:
			msg = "no matching inventory found"
			data = HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro)
			if data:
				utils.writeTS(req, data)
			return False

		utils.writeTS(req, data)
		Logging.debug("%s: returned %i lines (total bytes: %i)" % (
		              ro.service, len(lines), len(data)))
		utils.accessLog(req, ro, http.OK, len(data), None)
		return True


	#---------------------------------------------------------------------------
	# Checks if at least one location and channel combination matches the
	# request options
	@staticmethod
Ejemplo n.º 44
0
def fakeWSGIInApp(envir, responder):
    response = HTTP(envir.get('ERROR_CODE', 200),
                    bodydict.get(envir.get('PATH_INFO'), "404'd"))
    retval = response.to(responder)
    return retval
Ejemplo n.º 45
0
	def _processRequestExp(self, req, ro, dbq, exp, ep):
		objCount = ep.eventCount()
		maxObj = Application.Instance()._queryObjects

		if not HTTP.checkObjects(req, objCount, maxObj):
			return False

		pickIDs = set()
		if ro.picks is None:
			ro.picks = True

		# add related information
		for iEvent in xrange(ep.eventCount()):
			if req._disconnected:
				return False
			e = ep.event(iEvent)
			if self._hideAuthor:
				self._removeAuthor(e)

			originIDs = set()
			magIDs = set()
			magIDs.add(e.preferredMagnitudeID())

			# eventDescriptions and comments
			objCount += dbq.loadEventDescriptions(e)
			if ro.comments:
				objCount += self._loadComments(dbq, e)
			if not HTTP.checkObjects(req, objCount, maxObj):
				return False

			# origin references: either all or preferred only
			dbIter = dbq.getObjects(e, DataModel.OriginReference.TypeInfo())
			for obj in dbIter:
				oRef = DataModel.OriginReference.Cast(obj)
				if oRef is None:
					continue
				if ro.allOrigins:
					e.add(oRef)
					originIDs.add(oRef.originID())
				elif oRef.originID() == e.preferredOriginID():
					e.add(oRef)
					originIDs.add(oRef.originID())
					dbIter.close()

			objCount += e.originReferenceCount()

			# focalMechanism references: either none, preferred only or all
			if ro.fm or ro.allFMs:
				dbIter = dbq.getObjects(e, DataModel.FocalMechanismReference.TypeInfo())
				for obj in dbIter:
					fmRef = DataModel.FocalMechanismReference.Cast(obj)
					if fmRef is None:
						continue
					if ro.allFMs:
						e.add(fmRef)
					elif fmRef.focalMechanismID() == e.preferredFocalMechanismID():
						e.add(fmRef)
						dbIter.close()

			objCount += e.focalMechanismReferenceCount()

			if not HTTP.checkObjects(req, objCount, maxObj):
				return False

			# focal mechanisms: process before origins to add derived origin to
			# originID list since it may be missing from origin reference list
			for iFMRef in xrange(e.focalMechanismReferenceCount()):
				if req._disconnected:
					return False
				fmID = e.focalMechanismReference(iFMRef).focalMechanismID()
				obj = dbq.getObject(DataModel.FocalMechanism.TypeInfo(), fmID)
				fm = DataModel.FocalMechanism.Cast(obj)
				if fm is None:
					continue

				ep.add(fm)
				objCount += 1
				if self._hideAuthor:
					self._removeAuthor(fm)

				# comments
				if ro.comments:
					objCount += self._loadComments(dbq, fm)

				# momentTensors
				objCount += dbq.loadMomentTensors(fm)

				if not HTTP.checkObjects(req, objCount, maxObj):
					return False

				for iMT in xrange(fm.momentTensorCount()):
					mt = fm.momentTensor(iMT)

					originIDs.add(mt.derivedOriginID())
					magIDs.add(mt.momentMagnitudeID())

					if self._hideAuthor:
						self._removeAuthor(mt)

					if ro.comments:
						for iMT in xrange(fm.momentTensorCount()):
							objCount += self._loadComments(dbq, mt)

					objCount += dbq.loadDataUseds(mt);
					objCount += dbq.loadMomentTensorPhaseSettings(mt);
					if ro.staMTs:
						objCount += dbq.loadMomentTensorStationContributions(mt);
						for iStaMT in xrange(mt.momentTensorStationContributionCount()):
							objCount += dbq.load(mt.momentTensorStationContribution(iStaMT))

					if not HTTP.checkObjects(req, objCount, maxObj):
						return False

			# find ID of origin containing preferred Magnitude
			if e.preferredMagnitudeID():
				obj = dbq.getObject(DataModel.Magnitude.TypeInfo(),
				                    e.preferredMagnitudeID())
				m = DataModel.Magnitude.Cast(obj)
				if m is not None:
					oID = dbq.parentPublicID(m)
					if oID:
						originIDs.add(oID)

			# origins
			for oID in originIDs:
				if req._disconnected:
					return False
				obj = dbq.getObject(DataModel.Origin.TypeInfo(), oID)
				o = DataModel.Origin.Cast(obj)
				if o is None:
					continue

				ep.add(o)
				objCount += 1
				if self._hideAuthor:
					self._removeAuthor(o)

				# comments
				if ro.comments:
					objCount += self._loadComments(dbq, o)
				if not HTTP.checkObjects(req, objCount, maxObj):
					return False

				# magnitudes
				dbIter = dbq.getObjects(oID, DataModel.Magnitude.TypeInfo())
				for obj in dbIter:
					mag = DataModel.Magnitude.Cast(obj)
					if mag is None:
						continue
					if ro.allMags:
						o.add(mag)
					elif mag.publicID() in magIDs:
						o.add(mag)
						dbIter.close()

					if self._hideAuthor:
						self._removeAuthor(mag)

				objCount += o.magnitudeCount()
				if ro.comments:
					for iMag in xrange(o.magnitudeCount()):
						objCount += self._loadComments(dbq, o.magnitude(iMag))
				if not HTTP.checkObjects(req, objCount, maxObj):
					return False

				# TODO station magnitudes, amplitudes
				# - added pick id for each pick referenced by amplitude

				# arrivals
				if ro.arrivals:
					objCount += dbq.loadArrivals(o)
					if self._removeAuthor:
						for iArrival in xrange(o.arrivalCount()):
							self._removeAuthor(o.arrival(iArrival))

					# collect pick IDs if requested
					if ro.picks:
						for iArrival in xrange(o.arrivalCount()):
							pickIDs.add(o.arrival(iArrival).pickID())

				if not HTTP.checkObjects(req, objCount, maxObj):
					return False

		# picks
		if pickIDs:
			objCount += len(pickIDs)
			if not HTTP.checkObjects(req, objCount, maxObj):
				return False

			for pickID in pickIDs:
				obj = dbq.getObject(DataModel.Pick.TypeInfo(), pickID)
				pick = DataModel.Pick.Cast(obj)
				if pick is not None:
					if self._hideAuthor:
						self._removeAuthor(pick)
					if ro.comments:
						objCount += self._loadComments(dbq, pick)
					ep.add(pick)
				if not HTTP.checkObjects(req, objCount, maxObj):
					return False

		# write response
		sink = utils.Sink(req)
		if not exp.write(sink, ep):
			return False
		Logging.debug("%s: returned %i events and %i origins (total " \
		               "objects/bytes: %i/%i)" % (ro.service, ep.eventCount(),
		               ep.originCount(), objCount, sink.written))
		utils.accessLog(req, ro, http.OK, sink.written, None)
		return True
Ejemplo n.º 46
0
            raise HTTP(403, error_message, web2py_error='inaccessible file')
        else:
            raise HTTP(404, error_message, web2py_error='invalid file')
    else:
        fp.close()
    stat_file = os.stat(static_file)
    fsize = stat_file[stat.ST_SIZE]
    mtime = time.strftime('%a, %d %b %Y %H:%M:%S GMT',
                          time.gmtime(stat_file[stat.ST_MTIME]))
    headers['Content-Type'] = contenttype(static_file)
    headers['Last-Modified'] = mtime
    headers['Pragma'] = 'cache'
    headers['Cache-Control'] = 'private'

    if request and request.env.http_if_modified_since == mtime:
        raise HTTP(304, **{'Content-Type': headers['Content-Type']})

    elif request and request.env.http_range:
        start_items = regex_start_range.findall(request.env.http_range)
        if not start_items:
            start_items = [0]
        stop_items = regex_stop_range.findall(request.env.http_range)
        if not stop_items or int(stop_items[0]) > fsize - 1:
            stop_items = [fsize - 1]
        part = (int(start_items[0]), int(stop_items[0]), fsize)
        bytes = part[1] - part[0] + 1
        try:
            stream = open(static_file, 'rb')
        except IOError, e:
            if e[0] in (errno.EISDIR, errno.EACCES):
                raise HTTP(403)
Ejemplo n.º 47
0
            print(TAB_2 + 'Source Port: {}, Destination Port: {}'.format(
                tcp.src_port, tcp.dest_port))
            print(TAB_2 + 'Sequence: {}, Acknowledgment: {}'.format(
                tcp.sequence, tcp.acknowledgment))
            print(TAB_2 + 'Flags:')
            print(TAB_3 + 'URG: {}, ACK: {}, PSH: {}'.format(
                tcp.flag_urg, tcp.flag_ack, tcp.flag_psh))
            print(TAB_3 + 'RST: {}, SYN: {}, FIN:{}'.format(
                tcp.flag_rst, tcp.flag_syn, tcp.flag_fin))

            if len(tcp.data) > 0:
                # HTTP
                if tcp.src_port == 80 or tcp.dest_port == 80:
                    print(TAB_2 + 'HTTP Data:')
                    try:
                        http = HTTP(tcp.data)
                        http_info = str(http.data).split('\n')
                        for line in http_info:
                            print(DATA_TAB_3 + str(line))
                    except:
                        print(format_multi_line(DATA_TAB_3, tcp.data))
                else:
                    print(TAB_2 + 'TCP Data:')
                    print(format_multi_line(DATA_TAB_3, tcp.data))
        elif ipv4.proto == 17:
            udp = UDP(ipv4.data)
            print(TAB_1 + 'UDP Segment:')
            print(TAB_2 +
                  'Source Port: {}, Destination Port: {}, Length: {}'.format(
                      udp.src_port, udp.dest_port, udp.size))
Ejemplo n.º 48
0
            # ##################################################
            # on application error, rollback database
            # ##################################################

            try:
                if response._custom_rollback:
                    response._custom_rollback()
                else:
                    BaseAdapter.close_all_instances('rollback')
            except:
                pass
            e = RestrictedError('Framework', '', '', locals())
            ticket = e.log(request) or 'unrecoverable'
            http_response = \
                HTTP(500, rwthread.routes.error_message_ticket
                     % dict(ticket=ticket),
                     web2py_error='ticket %s' % ticket)

    finally:
        if response and hasattr(response, 'session_file') \
                and response.session_file:
            response.session_file.close()

    session._unlock(response)
    http_response, new_environ = try_rewrite_on_error(http_response, request,
                                                      environ, ticket)
    if not http_response:
        return wsgibase(new_environ, responder)
    if global_settings.web2py_crontype == 'soft':
        newcron.softcron(global_settings.applications_parent).start()
    return http_response.to(responder, env=env)
Ejemplo n.º 49
0
	def _processRequest(self, req, ro):

		if ro.quality != 'B' and ro.quality != 'M':
			msg = "quality other than 'B' or 'M' not supported"
			return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro)

		if ro.minimumLength:
			msg = "enforcing of minimum record length not supported"
			return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro)

		if ro.longestOnly:
			msg = "limitation to longest segment not supported"
			return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro)

		app = Application.Instance()
		ro._checkTimes(app._realtimeGap)

		# Open record stream
		rs = RecordStream.Open(self._rsURL)
		if rs is None:
			msg = "could not open record stream"
			return HTTP.renderErrorPage(req, http.SERVICE_UNAVAILABLE, msg, ro)

		maxSamples = None
		if app._samplesM is not None:
			maxSamples = app._samplesM * 1000000
			samples = 0

		app = Application.Instance()
		if app._trackdbEnabled:
			userid = ro.userName or app._trackdbDefaultUser
			reqid = 'ws' + str(int(round(time.time() * 1000) - 1420070400000))
			tracker = RequestTrackerDB("fdsnws", app.connection(), reqid, "WAVEFORM", userid, "REQUEST WAVEFORM " + reqid, "fdsnws", req.getClientIP(), req.getClientIP())

		else:
			tracker = None

		# Add request streams
		# iterate over inventory networks
		for s in ro.streams:
			for net in self._networkIter(s):
				for sta in self._stationIter(net, s):
					for loc in self._locationIter(sta, s):
						for cha in self._streamIter(loc, s):
							if utils.isRestricted(cha) and (self.__user is None or \
								not self.__access.authorize(self.__user,
											    net.code(),
											    sta.code(),
											    loc.code(),
											    cha.code(),
											    s.time.start,
											    s.time.end)):
								continue

							# enforce maximum sample per request restriction
							if maxSamples is not None:
								try:
									n = cha.sampleRateNumerator()
									d = cha.sampleRateDenominator()
								except ValueException:
									msg = "skipping stream without sampling " \
									      "rate definition: %s.%s.%s.%s" % (
									      net.code(), sta.code(), loc.code(),
									      cha.code())
									Logging.warning(msg)
									continue

								# calculate number of samples for requested
								# time window
								diffSec = (s.time.end - s.time.start).length()
								samples += int(diffSec * n / d)
								if samples > maxSamples:
									msg = "maximum number of %sM samples " \
									      "exceeded" % str(app._samplesM)
									return HTTP.renderErrorPage(req,
									       http.REQUEST_ENTITY_TOO_LARGE, msg,
									       ro)

							Logging.debug("adding stream: %s.%s.%s.%s %s - %s" \
							              % (net.code(), sta.code(), loc.code(),
							                 cha.code(), s.time.start.iso(),
							                 s.time.end.iso()))
							rs.addStream(net.code(), sta.code(), loc.code(),
							             cha.code(), s.time.start, s.time.end)

							if tracker:
								tracker.line_status(s.time.start, s.time.end,
								    net.code(), sta.code(), cha.code(), loc.code(),
								    False, "", True, [], "fdsnws", "OK", 0, "")

		# Build output filename
		fileName = Application.Instance()._fileNamePrefix.replace("%time", time.strftime('%Y-%m-%dT%H:%M:%S'))+'.mseed'

		# Create producer for async IO
		req.registerProducer(_WaveformProducer(req, ro, rs, fileName, tracker), False)

		# The request is handled by the deferred object
		return server.NOT_DONE_YET
Ejemplo n.º 50
0
def wsgibase(environ, responder):
    """
    this is the gluon wsgi application. the first function called when a page
    is requested (static or dynamic). it can be called by paste.httpserver
    or by apache mod_wsgi.

      - fills request with info
      - the environment variables, replacing '.' with '_'
      - adds web2py path and version info
      - compensates for fcgi missing path_info and query_string
      - validates the path in url

    The url path must be either:

    1. for static pages:

      - /<application>/static/<file>

    2. for dynamic pages:

      - /<application>[/<controller>[/<function>[/<sub>]]][.<extension>]
      - (sub may go several levels deep, currently 3 levels are supported:
         sub1/sub2/sub3)

    The naming conventions are:

      - application, controller, function and extension may only contain
        [a-zA-Z0-9_]
      - file and sub may also contain '-', '=', '.' and '/'
    """

    current.__dict__.clear()
    request = Request()
    response = Response()
    session = Session()
    env = request.env
    env.web2py_path = global_settings.applications_parent
    env.web2py_version = web2py_version
    env.update(global_settings)
    static_file = False
    try:
        try:
            try:
                # ##################################################
                # handle fcgi missing path_info and query_string
                # select rewrite parameters
                # rewrite incoming URL
                # parse rewritten header variables
                # parse rewritten URL
                # serve file if static
                # ##################################################

                fixup_missing_path_info(environ)
                (static_file, version, environ) = url_in(request, environ)
                response.status = env.web2py_status_code or response.status

                if static_file:
                    if environ.get('QUERY_STRING',
                                   '').startswith('attachment'):
                        response.headers['Content-Disposition'] \
                            = 'attachment'
                    if version:
                        response.headers['Cache-Control'] = 'max-age=315360000'
                        response.headers[
                            'Expires'] = 'Thu, 31 Dec 2037 23:59:59 GMT'
                    response.stream(static_file, request=request)

                # ##################################################
                # fill in request items
                # ##################################################
                app = request.application  # must go after url_in!

                if not global_settings.local_hosts:
                    local_hosts = set(['127.0.0.1', '::ffff:127.0.0.1', '::1'])
                    if not global_settings.web2py_runtime_gae:
                        try:
                            fqdn = socket.getfqdn()
                            local_hosts.add(socket.gethostname())
                            local_hosts.add(fqdn)
                            local_hosts.update([
                                addrinfo[4][0]
                                for addrinfo in getipaddrinfo(fqdn)
                            ])
                            if env.server_name:
                                local_hosts.add(env.server_name)
                                local_hosts.update([
                                    addrinfo[4][0] for addrinfo in
                                    getipaddrinfo(env.server_name)
                                ])
                        except (socket.gaierror, TypeError):
                            pass
                    global_settings.local_hosts = list(local_hosts)
                else:
                    local_hosts = global_settings.local_hosts
                client = get_client(env)
                x_req_with = str(env.http_x_requested_with).lower()

                request.update(
                    client = client,
                    folder = abspath('applications', app) + os.sep,
                    ajax = x_req_with == 'xmlhttprequest',
                    cid = env.http_web2py_component_element,
                    is_local = env.remote_addr in local_hosts,
                    is_https = env.wsgi_url_scheme in HTTPS_SCHEMES or \
                        request.env.http_x_forwarded_proto in HTTPS_SCHEMES \
                        or env.https == 'on')
                request.compute_uuid()  # requires client
                request.url = environ['PATH_INFO']

                # ##################################################
                # access the requested application
                # ##################################################

                if not exists(request.folder):
                    if app == rwthread.routes.default_application \
                            and app != 'welcome':
                        redirect(URL('welcome', 'default', 'index'))
                    elif rwthread.routes.error_handler:
                        _handler = rwthread.routes.error_handler
                        redirect(
                            URL(_handler['application'],
                                _handler['controller'],
                                _handler['function'],
                                args=app))
                    else:
                        raise HTTP(404,
                                   rwthread.routes.error_message %
                                   'invalid request',
                                   web2py_error='invalid application')
                elif not request.is_local and \
                        exists(pjoin(request.folder, 'DISABLED')):
                    raise HTTP(
                        503,
                        "<html><body><h1>Temporarily down for maintenance</h1></body></html>"
                    )

                # ##################################################
                # build missing folders
                # ##################################################

                create_missing_app_folders(request)

                # ##################################################
                # get the GET and POST data
                # ##################################################

                parse_get_post_vars(request, environ)

                # ##################################################
                # expose wsgi hooks for convenience
                # ##################################################

                request.wsgi.environ = environ_aux(environ, request)
                request.wsgi.start_response = \
                    lambda status='200', headers=[], \
                    exec_info=None, response=response: \
                    start_response_aux(status, headers, exec_info, response)
                request.wsgi.middleware = \
                    lambda *a: middleware_aux(request, response, *a)

                # ##################################################
                # load cookies
                # ##################################################

                if env.http_cookie:
                    try:
                        request.cookies.load(env.http_cookie)
                    except Cookie.CookieError, e:
                        pass  # invalid cookies

                # ##################################################
                # try load session or create new session file
                # ##################################################

                if not env.web2py_disable_session:
                    session.connect(request, response)

                # ##################################################
                # run controller
                # ##################################################

                if global_settings.debugging and app != "admin":
                    import gluon.debug
                    # activate the debugger
                    gluon.debug.dbg.do_debug(mainpyfile=request.folder)

                serve_controller(request, response, session)

            except HTTP, http_response:

                if static_file:
                    return http_response.to(responder, env=env)

                if request.body:
                    request.body.close()

                # ##################################################
                # on success, try store session in database
                # ##################################################
                session._try_store_in_db(request, response)

                # ##################################################
                # on success, commit database
                # ##################################################

                if response.do_not_commit is True:
                    BaseAdapter.close_all_instances(None)
                # elif response._custom_commit:
                #     response._custom_commit()
                elif response.custom_commit:
                    BaseAdapter.close_all_instances(response.custom_commit)
                else:
                    BaseAdapter.close_all_instances('commit')

                # ##################################################
                # if session not in db try store session on filesystem
                # this must be done after trying to commit database!
                # ##################################################

                session._try_store_in_cookie_or_file(request, response)

                if request.cid:
                    if response.flash:
                        http_response.headers['web2py-component-flash'] = \
                            urllib2.quote(xmlescape(response.flash)\
                                              .replace('\n',''))
                    if response.js:
                        http_response.headers['web2py-component-command'] = \
                            urllib2.quote(response.js.replace('\n',''))

                # ##################################################
                # store cookies in headers
                # ##################################################

                rcookies = response.cookies
                if session._forget and response.session_id_name in rcookies:
                    del rcookies[response.session_id_name]
                elif session._secure:
                    rcookies[response.session_id_name]['secure'] = True
                http_response.cookies2headers(rcookies)
                ticket = None

            except RestrictedError, e:

                if request.body:
                    request.body.close()

                # ##################################################
                # on application error, rollback database
                # ##################################################

                ticket = e.log(request) or 'unknown'
                if response._custom_rollback:
                    response._custom_rollback()
                else:
                    BaseAdapter.close_all_instances('rollback')

                http_response = \
                    HTTP(500, rwthread.routes.error_message_ticket %
                         dict(ticket=ticket),
                         web2py_error='ticket %s' % ticket)
Ejemplo n.º 51
0
            # ##################################################
            # on application error, rollback database
            # ##################################################

            try:
                if response._custom_rollback:
                    response._custom_rollback()
                else:
                    BaseAdapter.close_all_instances('rollback')
            except:
                pass
            e = RestrictedError('Framework', '', '', locals())
            ticket = e.log(request) or 'unrecoverable'
            http_response = \
                HTTP(500, rewrite.thread.routes.error_message_ticket \
                         % dict(ticket=ticket),
                     web2py_error='ticket %s' % ticket)

    finally:
        if response and hasattr(response, 'session_file') \
                and response.session_file:
            response.session_file.close()

    session._unlock(response)
    http_response, new_environ = rewrite.try_rewrite_on_error(
        http_response, request, environ, ticket)
    if not http_response:
        return wsgibase(new_environ,responder)
    if global_settings.web2py_crontype == 'soft':
        newcron.softcron(global_settings.applications_parent).start()
    return http_response.to(responder)
Ejemplo n.º 52
0
	def _processRequestExp(self, req, ro, exp, dac):
		if req._disconnected: return False

		staCount, locCount, chaCount, extCount, objCount = 0, 0, 0, 0, 0

		DataModel.PublicObject.SetRegistrationEnabled(False)
		newInv = DataModel.Inventory()
		dataloggers, sensors, extents = set(), set(), set()

		skipRestricted = not self._allowRestricted or \
		                 (ro.restricted is not None and not ro.restricted)
		levelNet = not ro.includeSta
		levelSta = ro.includeSta and not ro.includeCha

		# iterate over inventory networks
		for net in ro.networkIter(self._inv, levelNet):
			if req._disconnected: return False
			if skipRestricted and utils.isRestricted(net): continue
			newNet = DataModel.Network(net)

			# Copy comments
			for i in xrange(net.commentCount()):
				newNet.add(DataModel.Comment(net.comment(i)))

			# iterate over inventory stations of current network
			for sta in ro.stationIter(net, levelSta):
				if req._disconnected:
					return False
				if skipRestricted and utils.isRestricted(sta):
					continue
				if not HTTP.checkObjects(req, objCount, self._maxObj):
					return False

				if ro.includeCha:
					numCha, numLoc, d, s, e = \
						self._processStation(newNet, net, sta, ro, dac,
						                     skipRestricted)
					if numCha > 0:
						locCount += numLoc
						chaCount += numCha
						extCount += len(e)
						objCount += numLoc + numCha + extCount
						if not HTTP.checkObjects(req, objCount, self._maxObj):
							return False
						dataloggers |= d
						sensors |= s
						extents |= e
				elif self._matchStation(net, sta, ro, dac):
					if ro.includeSta:
						newSta = DataModel.Station(sta)
						# Copy comments
						for i in xrange(sta.commentCount()):
							newSta.add(DataModel.Comment(sta.comment(i)))
						newNet.add(newSta)
					else:
						# no station output requested: one matching station
						# is sufficient to include the network
						newInv.add(newNet)
						objCount += 1
						break

			if newNet.stationCount() > 0:
				newInv.add(newNet)
				staCount += newNet.stationCount()
				objCount += staCount + 1

		# Return 204 if no matching inventory was found
		if newInv.networkCount() == 0:
			msg = "no matching inventory found"
			data = HTTP.renderErrorPage(req, http.NO_CONTENT, msg, ro)
			if data:
				utils.writeTS(req, data)
			return True

		# Copy references (dataloggers, responses, sensors)
		decCount, resCount = 0, 0
		if ro.includeCha:
			decCount = self._copyReferences(newInv, req, objCount, self._inv,
			           ro, dataloggers, sensors, self._maxObj)
			if decCount is None:
				return False
			else:
				resCount = newInv.responsePAZCount() + \
				           newInv.responseFIRCount() + \
				           newInv.responsePolynomialCount() + \
				           newInv.responseFAPCount() + \
				           newInv.responseIIRCount()
				objCount += resCount + decCount + newInv.dataloggerCount() + \
				            newInv.sensorCount()

		# Copy data extents
		objOut = newInv
		if len(extents) > 0:
			objCount += 1
			da = DataModel.DataAvailability()
			for e in extents:
				da.add(DataModel.DataExtent(e))
			objOut = ExportObjectList()
			objOut.append(newInv)
			objOut.append(da)

		sink = utils.Sink(req)
		if not exp.write(sink, objOut):
			return False

		Logging.debug("%s: returned %iNet, %iSta, %iLoc, %iCha, " \
		               "%iDL, %iDec, %iSen, %iRes, %iDAExt (total objects/" \
		               "bytes: %i/%i) " % (ro.service, newInv.networkCount(),
		               staCount, locCount, chaCount, newInv.dataloggerCount(),
		               decCount, newInv.sensorCount(), resCount, extCount,
		               objCount, sink.written))
		utils.accessLog(req, ro, http.OK, sink.written, None)
		return True
Ejemplo n.º 53
0
Archivo: main.py Proyecto: w3fs/web2py
def wsgibase(environ, responder):
    """
    this is the gluon wsgi application. the first function called when a page
    is requested (static or dynamic). it can be called by paste.httpserver
    or by apache mod_wsgi.

      - fills request with info
      - the environment variables, replacing '.' with '_'
      - adds web2py path and version info
      - compensates for fcgi missing path_info and query_string
      - validates the path in url

    The url path must be either:

    1. for static pages:

      - /<application>/static/<file>

    2. for dynamic pages:

      - /<application>[/<controller>[/<function>[/<sub>]]][.<extension>]
      - (sub may go several levels deep, currently 3 levels are supported:
         sub1/sub2/sub3)

    The naming conventions are:

      - application, controller, function and extension may only contain
        [a-zA-Z0-9_]
      - file and sub may also contain '-', '=', '.' and '/'
    """

    current.__dict__.clear()
    request = Request()
    response = Response()
    session = Session()
    request.env.web2py_path = global_settings.applications_parent
    request.env.web2py_version = web2py_version
    request.env.update(global_settings)
    static_file = False
    try:
        try:
            try:
                # ##################################################
                # handle fcgi missing path_info and query_string
                # select rewrite parameters
                # rewrite incoming URL
                # parse rewritten header variables
                # parse rewritten URL
                # serve file if static
                # ##################################################

                if not environ.get('PATH_INFO',None) and \
                        environ.get('REQUEST_URI',None):
                    # for fcgi, get path_info and query_string from request_uri
                    items = environ['REQUEST_URI'].split('?')
                    environ['PATH_INFO'] = items[0]
                    if len(items) > 1:
                        environ['QUERY_STRING'] = items[1]
                    else:
                        environ['QUERY_STRING'] = ''
                if not environ.get('HTTP_HOST',None):
                    environ['HTTP_HOST'] = '%s:%s' % (environ.get('SERVER_NAME'),
                                                      environ.get('SERVER_PORT'))

                (static_file, environ) = rewrite.url_in(request, environ)
                if static_file:
                    if request.env.get('query_string', '')[:10] == 'attachment':
                        response.headers['Content-Disposition'] = 'attachment'
                    response.stream(static_file, request=request)

                # ##################################################
                # fill in request items
                # ##################################################

                http_host = request.env.http_host.split(':',1)[0]

                local_hosts = [http_host,'::1','127.0.0.1','::ffff:127.0.0.1']
                if not global_settings.web2py_runtime_gae:
                    local_hosts += [socket.gethostname(),
                                    socket.gethostbyname(http_host)]
                request.client = get_client(request.env)
                request.folder = abspath('applications',
                                         request.application) + os.sep
                x_req_with = str(request.env.http_x_requested_with).lower()
                request.ajax = x_req_with == 'xmlhttprequest'
                request.cid = request.env.http_web2py_component_element
                request.is_local = request.env.remote_addr in local_hosts
                request.is_https = request.env.wsgi_url_scheme \
                    in ['https', 'HTTPS'] or request.env.https == 'on'

                # ##################################################
                # compute a request.uuid to be used for tickets and toolbar
                # ##################################################

                response.uuid = request.compute_uuid()

                # ##################################################
                # access the requested application
                # ##################################################

                if not os.path.exists(request.folder):
                    if request.application == \
                            rewrite.thread.routes.default_application \
                            and request.application != 'welcome':
                        request.application = 'welcome'
                        redirect(Url(r=request))
                    elif rewrite.thread.routes.error_handler:
                        _handler = rewrite.thread.routes.error_handler
                        redirect(Url(_handler['application'],
                                     _handler['controller'],
                                     _handler['function'],
                                     args=request.application))
                    else:
                        raise HTTP(404, rewrite.thread.routes.error_message \
                                       % 'invalid request',
                                   web2py_error='invalid application')
                elif not request.is_local and \
                        os.path.exists(os.path.join(request.folder,'DISABLED')):
                    raise HTTP(200, "<html><body><h1>Down for maintenance</h1></body></html>")
                request.url = Url(r=request, args=request.args,
                                       extension=request.raw_extension)

                # ##################################################
                # build missing folders
                # ##################################################

                create_missing_app_folders(request)

                # ##################################################
                # get the GET and POST data
                # ##################################################

                parse_get_post_vars(request, environ)

                # ##################################################
                # expose wsgi hooks for convenience
                # ##################################################

                request.wsgi.environ = environ_aux(environ,request)
                request.wsgi.start_response = \
                    lambda status='200', headers=[], \
                    exec_info=None, response=response: \
                    start_response_aux(status, headers, exec_info, response)
                request.wsgi.middleware = \
                    lambda *a: middleware_aux(request,response,*a)

                # ##################################################
                # load cookies
                # ##################################################

                if request.env.http_cookie:
                    try:
                        request.cookies.load(request.env.http_cookie)
                    except Cookie.CookieError, e:
                        pass # invalid cookies

                # ##################################################
                # try load session or create new session file
                # ##################################################

                session.connect(request, response)

                # ##################################################
                # set no-cache headers
                # ##################################################

                response.headers['Content-Type'] = \
                    contenttype('.'+request.extension)
                response.headers['Cache-Control'] = \
                    'no-store, no-cache, must-revalidate, post-check=0, pre-check=0'
                response.headers['Expires'] = \
                    time.strftime('%a, %d %b %Y %H:%M:%S GMT', time.gmtime())
                response.headers['Pragma'] = 'no-cache'

                # ##################################################
                # run controller
                # ##################################################

                serve_controller(request, response, session)

            except HTTP, http_response:
                if static_file:
                    return http_response.to(responder)

                if request.body:
                    request.body.close()

                # ##################################################
                # on success, try store session in database
                # ##################################################
                session._try_store_in_db(request, response)

                # ##################################################
                # on success, commit database
                # ##################################################

                if response.do_not_commit is True:
                    BaseAdapter.close_all_instances(None)
                elif response._custom_commit:
                    response._custom_commit()
                else:
                    BaseAdapter.close_all_instances('commit')

                # ##################################################
                # if session not in db try store session on filesystem
                # this must be done after trying to commit database!
                # ##################################################

                session._try_store_on_disk(request, response)

                # ##################################################
                # store cookies in headers
                # ##################################################

                if request.cid:

                    if response.flash and not 'web2py-component-flash' in http_response.headers:
                        http_response.headers['web2py-component-flash'] = \
                            str(response.flash).replace('\n','')
                    if response.js and not 'web2py-component-command' in http_response.headers:
                        http_response.headers['web2py-component-command'] = \
                            response.js.replace('\n','')
                if session._forget and \
                        response.session_id_name in response.cookies:
                    del response.cookies[response.session_id_name]
                elif session._secure:
                    response.cookies[response.session_id_name]['secure'] = True
                if len(response.cookies)>0:
                    http_response.headers['Set-Cookie'] = \
                        [str(cookie)[11:] for cookie in response.cookies.values()]
                ticket=None

            except RestrictedError, e:

                if request.body:
                    request.body.close()

                # ##################################################
                # on application error, rollback database
                # ##################################################

                ticket = e.log(request) or 'unknown'
                if response._custom_rollback:
                    response._custom_rollback()
                else:
                    BaseAdapter.close_all_instances('rollback')

                http_response = \
                    HTTP(500, rewrite.thread.routes.error_message_ticket % \
                             dict(ticket=ticket),
                         web2py_error='ticket %s' % ticket)
Ejemplo n.º 54
0
            # ##################################################
            # on application error, rollback database
            # ##################################################

            try:
                if response._custom_rollback:
                    response._custom_rollback()
                else:
                    BaseAdapter.close_all_instances('rollback')
            except:
                pass
            e = RestrictedError('Framework', '', '', locals())
            ticket = e.log(request) or 'unrecoverable'
            http_response = \
                HTTP(500, rwthread.routes.error_message_ticket
                     % dict(ticket=ticket),
                     web2py_error='ticket %s' % ticket)

    finally:
        if response and hasattr(response, 'session_file') \
                and response.session_file:
            response.session_file.close()

    session._unlock(response)
    http_response, new_environ = try_rewrite_on_error(
        http_response, request, environ, ticket)
    if not http_response:
        return wsgibase(new_environ, responder)
    if global_settings.web2py_crontype == 'soft':
        newcron.softcron(global_settings.applications_parent).start()
    return http_response.to(responder, env=env)
Ejemplo n.º 55
0
	def _copyReferences(newInv, req, objCount, inv, ro, dataloggers, sensors,
	                    maxObj):

		responses = set()
		decCount = 0

		# datalogger
		for i in xrange(inv.dataloggerCount()):
			if req._disconnected: return None
			logger = inv.datalogger(i)
			if logger.publicID() not in dataloggers:
				continue
			newLogger = DataModel.Datalogger(logger)
			newInv.add(newLogger)
			# decimations are only needed for responses
			if ro.includeRes:
				for j in xrange(logger.decimationCount()):
					decimation = logger.decimation(j)
					newLogger.add(DataModel.Decimation(decimation))

					# collect response ids
					filterStr = ""
					try: filterStr = decimation.analogueFilterChain().content() + " "
					except ValueError: pass
					try: filterStr += decimation.digitalFilterChain().content()
					except ValueError: pass
					for resp in filterStr.split():
						responses.add(resp)
				decCount += newLogger.decimationCount()

		objCount += newInv.dataloggerCount() + decCount
		resCount = len(responses)
		if not HTTP.checkObjects(req, objCount + resCount, maxObj):
			return None

		# sensor
		for i in xrange(inv.sensorCount()):
			if req._disconnected: return None
			sensor = inv.sensor(i)
			if sensor.publicID() not in sensors:
				continue
			newSensor = DataModel.Sensor(sensor)
			newInv.add(newSensor)
			resp = newSensor.response()
			if resp:
				if ro.includeRes:
					responses.add(resp)
				else:
					# no responses: remove response reference to avoid missing
					# response warning of exporter
					newSensor.setResponse("")

		objCount += newInv.sensorCount()
		resCount = len(responses)
		if not HTTP.checkObjects(req, objCount + resCount, maxObj):
			return None

		# responses
		if ro.includeRes:
			if req._disconnected: return None
			for i in xrange(inv.responsePAZCount()):
				resp = inv.responsePAZ(i)
				if resp.publicID() in responses:
					newInv.add(DataModel.ResponsePAZ(resp))
			if req._disconnected: return None
			for i in xrange(inv.responseFIRCount()):
				resp = inv.responseFIR(i)
				if resp.publicID() in responses:
					newInv.add(DataModel.ResponseFIR(resp))
			if req._disconnected: return None
			for i in xrange(inv.responsePolynomialCount()):
				resp = inv.responsePolynomial(i)
				if resp.publicID() in responses:
					newInv.add(DataModel.ResponsePolynomial(resp))
			if req._disconnected: return None
			for i in xrange(inv.responseFAPCount()):
				resp = inv.responseFAP(i)
				if resp.publicID() in responses:
					newInv.add(DataModel.ResponseFAP(resp))
			if req._disconnected: return None
			for i in xrange(inv.responseIIRCount()):
				resp = inv.responseIIR(i)
				if resp.publicID() in responses:
					newInv.add(DataModel.ResponseIIR(resp))

		return decCount
Ejemplo n.º 56
0
class ConnectionManager(object):

    min_server_version = "3.0.5930"
    server_version = min_server_version
    user = {}
    server_id = None
    timeout = 10

    def __init__(self, client):

        LOG.debug("ConnectionManager initializing...")

        self.client = client
        self.config = client.config
        self.credentials = Credentials()

        self.http = HTTP(client)

    def __shortcuts__(self, key):

        if key == "clear":
            return self.clear_data
        elif key == "servers":
            return self.get_available_servers()
        elif key in ("reconnect", "refresh"):
            return self.connect
        elif key == "login":
            return self.login
        elif key == "login-connect":
            return self.login_to_connect
        elif key == "connect-user":
            return self.connect_user()
        elif key == "connect-token":
            return self.connect_token()
        elif key == "connect-user-id":
            return self.connect_user_id()
        elif key == "server":
            return self.get_server_info(self.server_id)
        elif key == "server-id":
            return self.server_id
        elif key == "server-version":
            return self.server_version
        elif key == "user-id":
            return self.emby_user_id()
        elif key == "public-users":
            return self.get_public_users()
        elif key == "token":
            return self.emby_token()
        elif key == "manual-server":
            return self.connect_to_address
        elif key == "connect-to-server":
            return self.connect_to_server
        elif key == "server-address":
            server = self.get_server_info(self.server_id)
            return get_server_address(server, server['LastConnectionMode'])
        elif key == "revoke-token":
            return self.revoke_token()
        elif key == "server-mode":
            server = self.get_server_info(self.server_id)
            return server['LastConnectionMode']

        return

    def __getitem__(self, key):
        return self.__shortcuts__(key)

    def clear_data(self):

        LOG.info("connection manager clearing data")

        self.user = None
        credentials = self.credentials.get_credentials()
        credentials['ConnectAccessToken'] = None
        credentials['ConnectUserId'] = None
        credentials['Servers'] = list()
        self.credentials.get_credentials(credentials)

        self.config.auth(None, None)

    def revoke_token(self):

        LOG.info("revoking token")

        self['server']['AccessToken'] = None
        self.credentials.get_credentials(self.credentials.get_credentials())

        self.config['auth.token'] = None

    def get_available_servers(self):
        
        LOG.info("Begin getAvailableServers")

        # Clone the credentials
        credentials = self.credentials.get_credentials()
        connect_servers = self._get_connect_servers(credentials)
        found_servers = self._find_servers(self._server_discovery())

        if not connect_servers and not found_servers and not credentials['Servers']: # back out right away, no point in continuing
            LOG.info("Found no servers")

            return list()

        servers = list(credentials['Servers'])
        self._merge_servers(servers, found_servers)
        self._merge_servers(servers, connect_servers)

        servers = self._filter_servers(servers, connect_servers)

        try:
            servers.sort(key=lambda x: datetime.strptime(x['DateLastAccessed'], "%Y-%m-%dT%H:%M:%SZ"), reverse=True)
        except TypeError:
            servers.sort(key=lambda x: datetime(*(time.strptime(x['DateLastAccessed'], "%Y-%m-%dT%H:%M:%SZ")[0:6])), reverse=True)

        credentials['Servers'] = servers
        self.credentials.get_credentials(credentials)

        return servers

    def login_to_connect(self, username, password):

        if not username:
            raise AttributeError("username cannot be empty")

        if not password:
            raise AttributeError("password cannot be empty")

        try:
            result = self._request_url({
                'type': "POST",
                'url': self.get_connect_url("user/authenticate"),
                'data': {
                    'nameOrEmail': username,
                    'password': self._get_connect_password_hash(password)
                },
                'dataType': "json"
            })
        except Exception as error: # Failed to login
            LOG.error(error)
            return False
        else:
            credentials = self.credentials.get_credentials()
            credentials['ConnectAccessToken'] = result['AccessToken']
            credentials['ConnectUserId'] = result['User']['Id']
            credentials['ConnectUser'] = result['User']['DisplayName']
            self.credentials.get_credentials(credentials)
            # Signed in
            self._on_connect_user_signin(result['User'])

        return result

    def login(self, server, username, password=None, clear=True, options={}):

        if not username:
            raise AttributeError("username cannot be empty")

        if not server:
            raise AttributeError("server cannot be empty")

        try:
            request = {
                'type': "POST",
                'url': self.get_emby_url(server, "Users/AuthenticateByName"),
                'json': {
                    'username': username,
                    'password': hashlib.sha1(password or "").hexdigest(),
                }
            }
            if clear:
                request['json']['pw'] = password or ""

            result = self._request_url(request, False)
        except Exception as error: # Failed to login
            LOG.error(error)
            return False
        else:
            self._on_authenticated(result, options)
        
        return result

    def connect_to_address(self, address, options={}):

        if not address:
            return False

        address = self._normalize_address(address)

        def _on_fail():
            LOG.error("connectToAddress %s failed", address)
            return self._resolve_failure()

        try:
            public_info = self._try_connect(address, options=options)
        except Exception:
            return _on_fail()
        else:
            LOG.info("connectToAddress %s succeeded", address)
            server = {
                'ManualAddress': address,
                'LastConnectionMode': CONNECTION_MODE['Manual']
            }
            self._update_server_info(server, public_info)
            server = self.connect_to_server(server, options)
            if server is False:
                return _on_fail()

            return server

    def connect_to_server(self, server, options={}):

        LOG.info("begin connectToServer")

        tests = []


        if server.get('LastConnectionMode') is not None and server.get('AccessToken'):
            tests.append(server['LastConnectionMode'])

        if CONNECTION_MODE['Manual'] not in tests:
            tests.append(CONNECTION_MODE['Manual'])
        if CONNECTION_MODE['Local'] not in tests:
            tests.append(CONNECTION_MODE['Local'])
        if CONNECTION_MODE['Remote'] not in tests:
            tests.append(CONNECTION_MODE['Remote'])

        # TODO: begin to wake server

        LOG.info("beginning connection tests")
        return self._test_next_connection_mode(tests, 0, server, options)

    def connect(self, options={}):

        LOG.info("Begin connect")
        return self._connect_to_servers(self.get_available_servers(), options)

    def connect_user(self):
        return self.user

    def connect_user_id(self):
        return self.credentials.get_credentials().get('ConnectUserId')

    def connect_token(self):
        return self.credentials.get_credentials().get('ConnectAccessToken')

    def emby_user_id(self):
        return self.get_server_info(self.server_id)['UserId']

    def emby_token(self):
        return self.get_server_info(self.server_id)['AccessToken']

    def get_server_info(self, server_id):

        if server_id is None:
            LOG.info("server_id is empty")
            return {}

        servers = self.credentials.get_credentials()['Servers']
        
        for server in servers:
            if server['Id'] == server_id:
                return server

    def get_public_users(self):
        return self.client.emby.get_public_users()

    def get_connect_url(self, handler):
        return "https://connect.emby.media/service/%s" % handler

    def get_emby_url(self, base, handler):
        return "%s/emby/%s" % (base, handler)

    def _request_url(self, request, headers=True):

        request['timeout'] = request.get('timeout') or self.timeout
        if headers:
            self._get_headers(request)

        try:
            return self.http.request(request)
        except Exception as error:
            LOG.error(error)
            raise

    def _add_app_info(self):
        return "%s/%s" % (self.config['app.name'], self.config['app.version'])

    def _get_headers(self, request):
        
        headers = request.setdefault('headers', {})

        if request.get('dataType') == "json":
            headers['Accept'] = "application/json"
            request.pop('dataType')

        headers['X-Application'] = self._add_app_info()
        headers['Content-type'] = request.get('contentType',
            'application/x-www-form-urlencoded; charset=UTF-8')

    def _connect_to_servers(self, servers, options):

        LOG.info("Begin connectToServers, with %s servers", len(servers))
        result = {}

        if len(servers) == 1:
            result = self.connect_to_server(servers[0], options)

            """
            if result['State'] == CONNECTION_STATE['Unavailable']:
                result['State'] = CONNECTION_STATE['ConnectSignIn'] if result['ConnectUser'] is None else CONNECTION_STATE['ServerSelection']
            """

            LOG.debug("resolving connectToServers with result['State']: %s", result)

            return result

        first_server = self._get_last_used_server()
        # See if we have any saved credentials and can auto sign in
        if first_server is not None and first_server['DateLastAccessed'] != "2001-01-01T00:00:00Z":
            result = self.connect_to_server(first_server, options)

            if result['State'] in (CONNECTION_STATE['SignedIn'], CONNECTION_STATE['Unavailable']):
                return result

        # Return loaded credentials if exists
        credentials = self.credentials.get_credentials()
        self._ensure_connect_user(credentials)

        return {
            'Servers': servers,
            'State': CONNECTION_STATE['ConnectSignIn'] if (not len(servers) and not self.connect_user()) else (result.get('State') or CONNECTION_STATE['ServerSelection']),
            'ConnectUser': self.connect_user()
        }

    def _try_connect(self, url, timeout=None, options={}):

        url = self.get_emby_url(url, "system/info/public")
        LOG.info("tryConnect url: %s", url)

        return self._request_url({
            'type': "GET",
            'url': url,
            'dataType': "json",
            'timeout': timeout,
            'verify': options.get('ssl'),
            'retry': False
        })

    def _test_next_connection_mode(self, tests, index, server, options):

        if index >= len(tests):
            LOG.info("Tested all connection modes. Failing server connection.")
            return self._resolve_failure()

        mode = tests[index]
        address = get_server_address(server, mode)
        enable_retry = False
        skip_test = False
        timeout = self.timeout

        LOG.info("testing connection mode %s with server %s", mode, server.get('Name'))

        if mode == CONNECTION_MODE['Local']:
            enable_retry = True
            timeout = 8

            if self._string_equals_ignore_case(address, server.get('ManualAddress')):
                LOG.info("skipping LocalAddress test because it is the same as ManualAddress")
                skip_test = True

        elif mode == CONNECTION_MODE['Manual']:
            if self._string_equals_ignore_case(address, server.get('LocalAddress')):
                enable_retry = True
                timeout = 8

        if skip_test or not address:
            LOG.info("skipping test at index: %s", index)
            return self._test_next_connection_mode(tests, index + 1, server, options)

        try:
            result = self._try_connect(address, timeout, options)
        
        except Exception:
            LOG.error("test failed for connection mode %s with server %s", mode, server.get('Name'))

            if enable_retry:
                # TODO: wake on lan and retry
                return self._test_next_connection_mode(tests, index + 1, server, options)
            else:
                return self._test_next_connection_mode(tests, index + 1, server, options)
        else:
            if self._compare_versions(self._get_min_server_version(), result['Version']) == 1:
                LOG.warn("minServerVersion requirement not met. Server version: %s", result['Version'])
                return {
                    'State': CONNECTION_STATE['ServerUpdateNeeded'],
                    'Servers': [server]
                }
            else:
                LOG.info("calling onSuccessfulConnection with connection mode %s with server %s", mode, server.get('Name'))
                return self._on_successful_connection(server, result, mode, options)

    def _on_successful_connection(self, server, system_info, connection_mode, options):

        credentials = self.credentials.get_credentials()

        if credentials.get('ConnectAccessToken') and options.get('enableAutoLogin') is not False:
            
            if self._ensure_connect_user(credentials) is not False:

                if server.get('ExchangeToken'):
                    self._add_authentication_info_from_connect(server, connection_mode, credentials, options)

        return self._after_connect_validated(server, credentials, system_info, connection_mode, True, options)

    def _resolve_failure(self):
        return {
            'State': CONNECTION_STATE['Unavailable'],
            'ConnectUser': self.connect_user()
        }

    def _get_min_server_version(self, val=None):

        if val is not None:
            self.min_server_version = val

        return self.min_server_version

    def _compare_versions(self, a, b):

        ''' -1 a is smaller
            1 a is larger
            0 equal
        '''
        a = LooseVersion(a)
        b = LooseVersion(b)

        if a < b:
            return -1

        if a > b:
            return 1
        
        return 0

    def _string_equals_ignore_case(self, str1, str2):
        return (str1 or "").lower() == (str2 or "").lower()

    def _get_connect_user(self, user_id, access_token):

        if not user_id:
            raise AttributeError("null userId")

        if not access_token:
            raise AttributeError("null accessToken")

        return self._request_url({
            'type': "GET",
            'url': self.get_connect_url('user?id=%s' % user_id),
            'dataType': "json",
            'headers': {
                'X-Connect-UserToken': access_token
            }
        })

    def _server_discovery(self):
        
        MULTI_GROUP = ("<broadcast>", 7359)
        MESSAGE = "who is EmbyServer?"
        
        sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        sock.settimeout(1.0) # This controls the socket.timeout exception

        sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 20)
        sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
        sock.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_LOOP, 1)
        sock.setsockopt(socket.IPPROTO_IP, socket.SO_REUSEADDR, 1)
        
        LOG.debug("MultiGroup      : %s", str(MULTI_GROUP))
        LOG.debug("Sending UDP Data: %s", MESSAGE)

        servers = []

        try:
            sock.sendto(MESSAGE, MULTI_GROUP)
        except Exception as error:
            LOG.error(error)
            return servers

        while True:
            try:
                data, addr = sock.recvfrom(1024) # buffer size
                servers.append(json.loads(data))
            
            except socket.timeout:
                LOG.info("Found Servers: %s", servers)
                return servers
            
            except Exception as e:
                LOG.error("Error trying to find servers: %s", e)
                return servers

    def _get_connect_servers(self, credentials):

        LOG.info("Begin getConnectServers")
        
        servers = list()

        if not credentials.get('ConnectAccessToken') or not credentials.get('ConnectUserId'):
            return servers

        url = self.get_connect_url("servers?userId=%s" % credentials['ConnectUserId'])
        request = {
            'type': "GET",
            'url': url,
            'dataType': "json",
            'headers': {
                'X-Connect-UserToken': credentials['ConnectAccessToken']
            }
        }
        for server in self._request_url(request):
            servers.append({
                'ExchangeToken': server['AccessKey'],
                'ConnectServerId': server['Id'],
                'Id': server['SystemId'],
                'Name': server['Name'],
                'RemoteAddress': server['Url'],
                'LocalAddress': server['LocalAddress'],
                'UserLinkType': "Guest" if server['UserType'].lower() == "guest" else "LinkedUser",
            })

        return servers

    def _get_last_used_server(self):

        servers = self.credentials.get_credentials()['Servers']

        if not len(servers):
            return

        try:
            servers.sort(key=lambda x: datetime.strptime(x['DateLastAccessed'], "%Y-%m-%dT%H:%M:%SZ"), reverse=True)
        except TypeError:
            servers.sort(key=lambda x: datetime(*(time.strptime(x['DateLastAccessed'], "%Y-%m-%dT%H:%M:%SZ")[0:6])), reverse=True)

        return servers[0]

    def _merge_servers(self, list1, list2):

        for i in range(0, len(list2), 1):
            try:
                self.credentials.add_update_server(list1, list2[i])
            except KeyError:
                continue

        return list1

    def _find_servers(self, found_servers):

        servers = []

        for found_server in found_servers:

            server = self._convert_endpoint_address_to_manual_address(found_server)

            info = {
                'Id': found_server['Id'],
                'LocalAddress': server or found_server['Address'],
                'Name': found_server['Name']
            } #TODO
            info['LastConnectionMode'] = CONNECTION_MODE['Manual'] if info.get('ManualAddress') else CONNECTION_MODE['Local']

            servers.append(info)
        else:
            return servers

    def _filter_servers(self, servers, connect_servers):
        
        filtered = list()
        for server in servers:
            if server.get('ExchangeToken') is None:
                # It's not a connect server, so assume it's still valid
                filtered.append(server)
                continue

            for connect_server in connect_servers:
                if server['Id'] == connect_server['Id']:
                    filtered.append(server)
                    break

        return filtered

    def _convert_endpoint_address_to_manual_address(self, info):
        
        if info.get('Address') and info.get('EndpointAddress'):
            address = info['EndpointAddress'].split(':')[0]

            # Determine the port, if any
            parts = info['Address'].split(':')
            if len(parts) > 1:
                port_string = parts[len(parts)-1]

                try:
                    address += ":%s" % int(port_string)
                    return self._normalize_address(address)
                except ValueError:
                    pass

        return None

    def _normalize_address(self, address):
        # Attempt to correct bad input
        address = address.strip()
        address = address.lower()

        if 'http' not in address:
            address = "http://%s" % address

        return address

    def _get_connect_password_hash(self, password):

        password = self._clean_connect_password(password)
        return hashlib.md5(password).hexdigest()

    def _clean_connect_password(self, password):

        password = password or ""

        password = password.replace("&", '&amp;')
        password = password.replace("/", '&#092;')
        password = password.replace("!", '&#33;')
        password = password.replace("$", '&#036;')
        password = password.replace("\"", '&quot;')
        password = password.replace("<", '&lt;')
        password = password.replace(">", '&gt;')
        password = password.replace("'", '&#39;')

        return password

    def _ensure_connect_user(self, credentials):

        if self.user and self.user['Id'] == credentials['ConnectUserId']:
            return

        elif credentials.get('ConnectUserId') and credentials.get('ConnectAccessToken'):
            self.user = None

            try:
                result = self._get_connect_user(credentials['ConnectUserId'], credentials['ConnectAccessToken'])
                self._on_connect_user_signin(result)
            except Exception:
                return False

    def _on_connect_user_signin(self, user):

        self.user = user
        LOG.info("connectusersignedin %s", user)

    def _save_user_info_into_credentials(self, server, user):

        info = {
            'Id': user['Id'],
            'IsSignedInOffline': True
        }
        self.credentials.add_update_user(server, info)

    def _add_authentication_info_from_connect(self, server, connection_mode, credentials, options={}):

        if not server.get('ExchangeToken'):
            raise KeyError("server['ExchangeToken'] cannot be null")

        if not credentials.get('ConnectUserId'):
            raise KeyError("credentials['ConnectUserId'] cannot be null")

        auth =  "MediaBrowser "
        auth += "Client=%s, " % self.config['app.name']
        auth += "Device=%s, " % self.config['app.device_name']
        auth += "DeviceId=%s, " % self.config['app.device_id']
        auth += "Version=%s " % self.config['app.version']

        try:
            auth = self._request_url({
                'url': self.get_emby_url(get_server_address(server, connection_mode), "Connect/Exchange"),
                'type': "GET",
                'dataType': "json",
                'verify': options.get('ssl'),
                'params': {
                    'ConnectUserId': credentials['ConnectUserId']
                },
                'headers': {
                    'X-MediaBrowser-Token': server['ExchangeToken'],
                    'X-Emby-Authorization': auth
                }
            })
        except Exception:
            server['UserId'] = None
            server['AccessToken'] = None
            return False
        else:
            server['UserId'] = auth['LocalUserId']
            server['AccessToken'] = auth['AccessToken']
            return auth

    def _after_connect_validated(self, server, credentials, system_info, connection_mode, verify_authentication, options):

        if options.get('enableAutoLogin') == False:

            self.config['auth.user_id'] = server.pop('UserId', None)
            self.config['auth.token'] = server.pop('AccessToken', None)
        
        elif verify_authentication and server.get('AccessToken'):

            if self._validate_authentication(server, connection_mode, options) is not False:

                self.config['auth.user_id'] = server['UserId']
                self.config['auth.token'] = server['AccessToken']
                return self._after_connect_validated(server, credentials, system_info, connection_mode, False, options)

            return self._resolve_failure()

        self._update_server_info(server, system_info)
        self.server_version = system_info['Version']
        server['LastConnectionMode'] = connection_mode

        if options.get('updateDateLastAccessed') is not False:
            server['DateLastAccessed'] = datetime.now().strftime('%Y-%m-%dT%H:%M:%SZ')

        self.credentials.add_update_server(credentials['Servers'], server)
        self.credentials.get_credentials(credentials)
        self.server_id = server['Id']

        # Update configs
        self.config['auth.server'] = get_server_address(server, connection_mode)
        self.config['auth.server-name'] = server['Name']
        self.config['auth.server=id'] = server['Id']
        self.config['auth.ssl'] = options.get('ssl', self.config['auth.ssl'])

        result = {
            'Servers': [server],
            'ConnectUser': self.connect_user()
        }

        result['State'] = CONNECTION_STATE['SignedIn'] if server.get('AccessToken') else CONNECTION_STATE['ServerSignIn']
        # Connected
        return result

    def _validate_authentication(self, server, connection_mode, options={}):

        try:
            system_info = self._request_url({
                'type': "GET",
                'url': self.get_emby_url(get_server_address(server, connection_mode), "System/Info"),
                'verify': options.get('ssl'),
                'dataType': "json",
                'headers': {
                    'X-MediaBrowser-Token': server['AccessToken']
                }
            })
            self._update_server_info(server, system_info)
        except Exception as error:

            server['UserId'] = None
            server['AccessToken'] = None

            return False

    def _update_server_info(self, server, system_info):

        if server is None or system_info is None:
            return

        server['Name'] = system_info['ServerName']
        server['Id'] = system_info['Id']

        if system_info.get('LocalAddress'):
            server['LocalAddress'] = system_info['LocalAddress']
        if system_info.get('WanAddress'):
            server['RemoteAddress'] = system_info['WanAddress']
        if 'MacAddress' in system_info:
            server['WakeOnLanInfos'] = [{'MacAddress': system_info['MacAddress']}]

    def _on_authenticated(self, result, options={}):

        credentials = self.credentials.get_credentials()

        self.config['auth.user_id'] = result['User']['Id']
        self.config['auth.token'] = result['AccessToken']

        for server in credentials['Servers']:
            if server['Id'] == result['ServerId']:
                found_server = server
                break
        else: return # No server found

        if options.get('updateDateLastAccessed') is not False:
            found_server['DateLastAccessed'] = datetime.now().strftime('%Y-%m-%dT%H:%M:%SZ')

        found_server['UserId'] = result['User']['Id']
        found_server['AccessToken'] = result['AccessToken']

        self.credentials.add_update_server(credentials['Servers'], found_server)
        self._save_user_info_into_credentials(found_server, result['User'])
        self.credentials.get_credentials(credentials)
Ejemplo n.º 57
0
	def _processRequestExp(self, req, ro, dbq, exp, ep):
		objCount = ep.eventCount()
		maxObj = Application.Instance()._queryObjects

		if not HTTP.checkObjects(req, objCount, maxObj):
			return False

		pickIDs = set()
		if ro.picks is None:
			ro.picks = True

		# add related information
		for iEvent in xrange(ep.eventCount()):
			if req._disconnected:
				return False
			e = ep.event(iEvent)
			if self._hideAuthor:
				self._removeAuthor(e)

			# eventDescriptions and comments
			objCount += dbq.loadEventDescriptions(e)
			if ro.comments:
				objCount += self._loadComment(dbq, e)
			if not HTTP.checkObjects(req, objCount, maxObj):
				return False

			# origin references: either all or preferred only
			dbIter = dbq.getObjects(e, DataModel.OriginReference.TypeInfo())
			for obj in dbIter:
				oRef = DataModel.OriginReference.Cast(obj)
				if oRef is None:
					continue
				if ro.allOrigins:
					e.add(oRef)
				elif oRef.originID() == e.preferredOriginID():
					e.add(oRef)
					dbIter.close()
				# TODO: if focal mechanisms are added make sure derived
				# origin is loaded

			objCount += e.originReferenceCount()

			if not HTTP.checkObjects(req, objCount, maxObj):
				return False

			# TODO: add focal mechanisms

			# origins
			for iORef in xrange(e.originReferenceCount()):
				if req._disconnected:
					return False
				oID = e.originReference(iORef).originID()
				obj = dbq.getObject(DataModel.Origin.TypeInfo(), oID)
				o = DataModel.Origin.Cast(obj)
				if o is None:
					continue

				ep.add(o)
				objCount += 1
				if self._hideAuthor:
					self._removeAuthor(o)

				# comments
				if ro.comments:
					objCount += self._loadComments(dbq, o)
				if not HTTP.checkObjects(req, objCount, maxObj):
					return False

				# magnitudes
				dbIter = dbq.getObjects(oID, DataModel.Magnitude.TypeInfo())
				for obj in dbIter:
					mag = DataModel.Magnitude.Cast(obj)
					if mag is None:
						continue
					if ro.allMags:
						o.add(mag)
					elif mag.publicID() == e.preferredMagnitudeID():
						o.add(mag)
						dbIter.close()

					if self._hideAuthor:
						self._removeAuthor(mag)

				objCount += o.magnitudeCount()
				if ro.comments:
					for iMag in xrange(o.magnitudeCount()):
						objCount += self._loadComments(dbq, o.magnitude(iMag))
				if not HTTP.checkObjects(req, objCount, maxObj):
					return False

				# TODO station magnitudes, amplitudes
				# - added pick id for each pick referenced by amplitude

				# arrivals
				if ro.arrivals:
					objCount += dbq.loadArrivals(o)
					if self._removeAuthor:
						for iArrival in xrange(o.arrivalCount()):
							self._removeAuthor(o.arrival(iArrival))

					# collect pick IDs if requested
					if ro.picks:
						for iArrival in xrange(o.arrivalCount()):
							pickIDs.add(o.arrival(iArrival).pickID())

				if not HTTP.checkObjects(req, objCount, maxObj):
					return False

		# picks
		if pickIDs:
			objCount += len(pickIDs)
			if not HTTP.checkObjects(req, objCount, maxObj):
				return False

			for pickID in pickIDs:
				obj = dbq.getObject(DataModel.Pick.TypeInfo(), pickID)
				pick = DataModel.Pick.Cast(obj)
				if pick is not None:
					if self._hideAuthor:
						self._removeAuthor(pick)
					if ro.comments:
						objCount += self._loadComments(dbq, pick)
					ep.add(pick)
				if not HTTP.checkObjects(req, objCount, maxObj):
					return False

		# write response
		sink = utils.Sink(req)
		if not exp.write(sink, ep):
			return False
		Logging.debug("%s: returned %i events and %i origins (total " \
		               "objects/bytes: %i/%i)" % (ro.service, ep.eventCount(),
		               ep.originCount(), objCount, sink.written))
		utils.accessLog(req, ro, http.OK, sink.written, None)
		return True