Пример #1
0
def hitcount_image(request):
    size = (250, 50)
    im = Image.new('RGB', size, (25, 25, 25))
    draw = ImageDraw.Draw(im)   # create a drawing object that is
                                # used to draw on the new image
    all_hits_count = str(Hits.objects.count())

    a_day_ago = django.utils.timezone.now() - timedelta(days=1)
    today_hits_count = str(Hits.objects.filter(time__gt=a_day_ago).count())

    address = request.META['REMOTE_ADDR']
    user_agent = request.META['HTTP_USER_AGENT']
    last_visit = Hits.objects.filter(ip=address, user_agent=user_agent).latest()

    (os, browser) = httpagentparser.simple_detect(user_agent)

    lines_to_render = ["All: " + all_hits_count,
                       "Today: " + today_hits_count,
                       "Your last visit: " + formats.date_format(timezone.localtime(last_visit.time), "SHORT_DATETIME_FORMAT"),
                       "Your browser: " + browser]
    white_color = (255, 255, 255)
    for lineIdx in range(0, len(lines_to_render)):
        text_pos = (0, 12 * lineIdx)
        draw.text(text_pos, lines_to_render[lineIdx], white_color)
    del draw

    response = HttpResponse(content_type="image/png")
    im.save(response, 'PNG')
    return response
Пример #2
0
def worker(line):
    global count, line_count, reader_queue, writer_queue

    # line = reader_queue.get()
    # if not line:
    # time.sleep(0.1)
    # continue
    line_count = line_count + 1
    # > 	line = myfile.readline()
    parts = line.split()
    if not (parts[3] == "4069"):
        return
        # output.write(os.linesep)

    count += 1
    agent = " ".join(parts[14:-1])
    # parsed_ua = uas_parser.parse(agent,entire_url='ua_icon,os_icon')
    # parsed_ua = parsed_ua.get('os_family',"")
    parsed_ua = "-".join(httpagentparser.simple_detect(agent))
    # print parsed_ua
    # exit()
    hashed_agent = hashlib.md5(agent).hexdigest()
    # agents[hashed_agent] = agent
    other = parts[:14]
    # writer_queue.put(" ".join( [parts[0], parsed_ua ,parts[2], hashed_agent,os.linesep] ))
    output.write(" ".join([parts[0], parsed_ua, parts[2], hashed_agent, os.linesep]))
    return
Пример #3
0
	def do_authhead(self):
		"""
			Send authentication headers to end user.
		"""
		# Load the configuration file.
		self.config.read("theatre.config")

		stats_db_location = self.config.get("stats", "location")
		stats_db = dbm.open("%s/theatre_stats" % stats_db_location, "c")

		#auth_key = self.config.get("auth", "auth_key")

		auth_msg = self.config.get("auth", "auth_box_message")

		self.send_response(401)

		#self.send_header("WWW-Authenticate", "Basic %s" % auth_key)
		self.send_header(r"WWW-Authenticate", "Basic realm=\"%s\"" % auth_msg)
		self.send_header("Content-type", "text/html")

		# http://b.leppoc.net/2010/02/12/simple-webserver-in-python/
		headers = self.headers.get_all("User-Agent")
		# print(libuasparser.browser_search(headers))
		# http://shon.github.io/httpagentparser/
		simpleheaders = httpagentparser.simple_detect(headers[0])
		# print(simpleheaders)

		os_header = str(simpleheaders[0].replace(" ", "_"))
		browser_header = str(simpleheaders[1].replace(" ", "_"))

		try:
			stats_db["os_%s" % os_header] = str(int(stats_db["os_%s" % os_header]) + 1)
		except KeyError:
			stats_db["os_%s" % os_header] = "1"

		try:
			stats_db["browser_%s" % browser_header] = \
				str(int(stats_db["browser_%s" % browser_header]) + 1)
		except KeyError:
			stats_db["browser_%s" % browser_header] = "1"

		__clientinfo__ = self.config.get("setup", "clientInfo")

		# Check to see if the user wants detailed logging.
		if __clientinfo__ == "True":
			if __plat__ == "win32":
				c_info = "	[CL] %s, %s" % \
						(os_header.replace("_", " "), browser_header.replace("_", " "))
			else:
				c_info = "	\033[0;36;49m[RQ]\033[0m %s, %s" % (
					os_header.replace("_", " "), browser_header.replace("_", " "))
			print(c_info)
			logging.info("	[CL] %s, %s", os_header.replace("_", " "),
						 browser_header.replace("_", " "))

		# End the headers.
		self.end_headers()
Пример #4
0
def process(files, cnum, delimiter):
    #Iterate over the lines of all files listed in sys.argv[1:], defaulting to sys.stdin if the list is empty.
    #If a filename is '-', it is also replaced by sys.stdin.

    for record in fileinput.input(files):
        record = str(record).strip().split(delimiter)
        print delimiter.join(record[:cnum] +
                             list(httpagentparser.simple_detect(record[cnum])) +
                             record[cnum + 1:])
Пример #5
0
def http_login(request):
    """
    Called after successfull basic HTTP authentication and check if user filled
    his profile.
    """
    logger.debug('Request full path: %s', request.get_full_path())
    redirection = ""
    response = None

    # Should we redirect after login ?
    if request.GET.has_key('next'):
        qr = request.GET.copy()
        next = qr.pop('next')[0]
        remains = qr.urlencode()
        redirection = '{0}?{1}'.format(next, remains)
        logger.debug('Should redirect to: %s', redirection)

    # Find user
    if settings.DEBUG:
        user = User.objects.get(username='******')
        userauth = authenticate(username=user.username, password='******')
        login(request, userauth)
    else:
        user = User.objects.get(username=request.META['REMOTE_USER'])

    operating_system, browser = httpagentparser.simple_detect(request.META.get('HTTP_USER_AGENT'))
    logger.info('%s logged in using browser %s on %s.', user.username, browser, operating_system)

    # Validation
    if not user.is_active or user.username.startswith('0') or not user.username.endswith('@corp'):
        # Be sure that the login will not be used anymore
        user.is_active = False
        user.save()
        logger.info('User name %s is mal-formed !', user.username)
        return HttpResponse('Invalid account. Please use your <strong>normal</strong> user account and append <em>@corp</em>.')

    # Auto fill profile (if possible)
    user.first_name = request.META.get('AUTHENTICATE_GIVENNAME', '')
    user.last_name = request.META.get('AUTHENTICATE_SN', '')
    user.email = request.META.get('AUTHENTICATE_MAIL', '')
    user.save()

    if (user.first_name or user.last_name) and user.email:
        if redirection:
            response = redirect(redirection)
        else:
            response = redirect('index')
    else:
        # Profile is not completed
        logger.info('User %s has not filled his profile.', user.username)
        if redirection:
            response = redirect('%s?redirect=%s' % (reverse('user_profile'), redirection))
        else:
            response = redirect('user_profile')

    return response
Пример #6
0
    def do_head(self):
        """
            Send headers to end user.
        """
        # Load the configuration file.
        self.config.read("theatre.config")

        stats_db_location = self.config.get("stats", "location")
        stats_db = anydbm.open("%s/theatre_stats.db" % stats_db_location, "c")

        self.send_response(200)

        # Set the content to html.
        self.send_header("Content-type", "text/html; charset=utf-8")

        # http://b.leppoc.net/2010/02/12/simple-webserver-in-python/
        headers = self.headers.getheader("User-Agent")
        # print(libuasparser.browser_search(headers))
        # http://shon.github.io/httpagentparser/
        simpleheaders = httpagentparser.simple_detect(headers)
        # print(simpleheaders)

        os_header = str(simpleheaders[0].replace(" ", "_"))
        browser_header = str(simpleheaders[1].replace(" ", "_"))

        try:
            stats_db["os_%s" % os_header] = str(int(stats_db["os_%s" % os_header]) + 1)
        except KeyError:
            stats_db["os_%s" % os_header] = "1"

        try:
            stats_db["browser_%s" % browser_header] = \
                str(int(stats_db["browser_%s" % browser_header]) + 1)
        except KeyError:
            stats_db["browser_%s" % browser_header] = "1"

        __clientinfo__ = self.config.get("setup", "clientInfo")

        # Check to see if the user wants detailed logging.
        if __clientinfo__ == "True":
            if __plat__ == "win32":
                c_info = "	[CL] %s, %s" % \
                        (os_header.replace("_", " "), browser_header.replace("_", " "))
            else:
                c_info = "	\033[0;36;49m[CL]\033[0m %s, %s" % (
                    os_header.replace("_", " "), browser_header.replace("_", " "))
            print(c_info)
            logging.info("	[CL] %s, %s",
                         os_header.replace("_", " "), browser_header.replace("_", " "))

        self.write_get()
        # End the headers.
        self.end_headers()
Пример #7
0
	def __reduce__(self,parts):
		self.eligible_count += 1
		agent = " ".join(parts[14:-1])
		hashed_agent = hashlib.md5(agent).hexdigest()

		if self.__agent_cache.has_key(hashed_agent):
			parsed_ua = self.__agent_cache.get(hashed_agent,"")
		else:
			parsed_ua = "-".join(httpagentparser.simple_detect(agent))
			self.__agent_cache[hashed_agent] = parsed_ua

		output_line = " ".join([str(self.line_cursor), parts[0], parsed_ua, parts[2], hashed_agent, os.linesep])
		self.__output_queue.put(output_line)
Пример #8
0
    def initial(self, request, *args, **kwargs):
        request_method = request.method
        resource_path = request.path.replace('/api', '')
        user_agent_string = request.META.get('HTTP_USER_AGENT', None)

        # do the logging
        data = request._data
        if user_agent_string:
            logger.info("{0} {1} {2}".format(request_method, resource_path, httpagentparser.simple_detect(user_agent_string)[1]))
        else:
            logger.info("{0} {1} Unknown browser".format(request_method, resource_path))

        super(LoggingMixin, self).initial(request, *args, **kwargs)
Пример #9
0
def enhance_flow(flowDF, ftu):
    """
      Add some useful columns to a http dataframe.

      Parameters
      ----------
      flowDF : dataframe
          The enhanced HTTP log dataframe

      Returns
      -------
      flowDF: the dataframe with some columns added

    """

    #create some useful pre-features

    #stringify the port. probably no longer needed since we defensivley stringify things elsewhere.
    #flowDF['resp_p_str'] = flowDF['resp_p'].apply(str)

    #extract the browser string from the user agent.
    if 'browser_string' in ftu:
      flowDF['browser_string'] = flowDF['user_agent'].apply(lambda agent: httpagentparser.simple_detect(agent)[1])
    
    def paramsSSV(uri):
        fullUri = 'http://bogus.com/'+uri
        parseResult = parse_qs(urlparse(fullUri).query)
        return ' '.join(parseResult.keys())

    #create a SSV of the URI parameter keys
    if 'URIparams' in ftu:
      flowDF['URIparams'] = flowDF['uri'].apply(paramsSSV)
    
    def tokensSSV(uri):
        fullUri = 'http://bogus.com/'+uri
        parseResult = parse_qs(urlparse(fullUri).query)
        return ' '.join([" ".join(vals) for vals in parseResult.values()])

    #create a SSV of the URI parameter values
    if 'URItokens' in ftu:
      flowDF['URItokens'] = flowDF['uri'].apply(tokensSSV)

    #extract the subdomain from the host
    if 'subdomain' in ftu:
      flowDF['subdomain'] = flowDF['host'].apply(lambda host: tldextract.extract(host)[0])

    #extract the TLD from the host
    if 'tld' in ftu:
      flowDF['tld'] = flowDF['host'].apply(lambda host: tldextract.extract(host)[1])

    return flowDF
Пример #10
0
    def build_request_data(self, request):
        '''
        only get request ip address
        if you want to add more properties from request, you should overwrite this method
        @param request:
        '''
        if request.META.has_key('HTTP_X_REAL_IP'):
            ip = request.META['HTTP_X_REAL_IP']
        elif request.META.has_key('REMOTE_ADDR'):
            ip = request.META['REMOTE_ADDR']
        else:
            ip = '127.0.0.1'

        user_agent = request.META['HTTP_USER_AGENT']
        return {'ip': ip, 'user-agent':' '.join(httpagentparser.simple_detect(user_agent))}
Пример #11
0
def create_database_browsers(cur, browsers):
    print "[+] Build `browsers` table"
    cur.execute("DROP TABLE IF EXISTS browsers")
    cur.execute("CREATE TABLE browsers (ip TEXT, ua TEXT, name TEXT, os TEXT)")
    for row in gzip.open(browsers):
        row = row.strip()
        ip, ua = row.split(";", 1)
        if not httpagentparser:
            cur.execute("INSERT INTO browsers (ip, ua) VALUES (?,?)", (ip, ua))
        else:
            os, name = httpagentparser.simple_detect(ua)
            cur.execute("INSERT INTO browsers (ip, ua, name, os) "
                        "   VALUES (?,?,?,?)", (ip, ua, name, os))
    # Remove IP with several browsers
    cur.execute("DELETE FROM browsers WHERE ip IN "
                "  (SELECT ip FROM (SELECT COUNT(ua) AS uas, ip FROM browsers GROUP BY ip) WHERE uas > 1);")
Пример #12
0
 def last_login(self):
   info = self.info.get('history')
   if not info:
     return None
   elif len(info) == 1:
     info = info[-1]
   else:
     info = info[-2]
     
   if not info.get('user_agent'):
     return None
   os, browser = simple_detect(info.get('user_agent'))
   return {'timestamp': info.get('timestamp'),
           'os': os, 
           'browser': browser,
           'ip': info.get('remote_addr')}   
Пример #13
0
def index(request):
    p = IP();

    p.addr = request.META['REMOTE_ADDR']
    s = httpagentparser.simple_detect(request.META['HTTP_USER_AGENT'])
    p.browser = s[0]
    p.os = s[1]
    
    context = {
        'addr': p.addr,
        'os':p.os,
        'browser': p.browser
    }

    p.save()

    return render(request, 'basics/index.html', context)
Пример #14
0
def parse(ua: str):
  dresult = httpagentparser.detect(ua)
  platform = 'unknown'
  if 'platform' in dresult and dresult['platform']['name'] != None:
    platform = dresult['platform']['name']
  elif 'os' in dresult and dresult['os']['name'] != None:
    platform = dresult['os']['name']
  icon = _PLATFORM_ICON_MAP.get(platform.strip().lower(), 'unknown')
  os, browser = httpagentparser.simple_detect(ua)
  if (ua.lower().find('amd64') >= 0 or ua.lower().find('x86_64') >= 0 or
      ua.lower().find('x64') >= 0):
    os += ' x64'
  return {
    'str': ua,
    'icon': icon,
    'os': os,
    'browser': browser,
  }
Пример #15
0
    def process_request(self, request):
        address = request.META['REMOTE_ADDR']
        user_agent = request.META['HTTP_USER_AGENT']
        # print >> sys.stderr,
        (os, browser) = httpagentparser.simple_detect(user_agent)
        d = timezone.localtime(timezone.now()) - timedelta(days=1)
        recent_hit = Hits.objects.filter(time__gt=d, ip=address, user_agent=user_agent)
        if not recent_hit:
            newhit = Hits(time=timezone.localtime(timezone.now()),
                          ip=address,
                          user_agent=user_agent,
                          os=os,
                          browser=browser)
            newhit.save()
        # else:
        #     print >>sys.stderr, "RECENT" + str(recent_hit[0])

        return None
Пример #16
0
def parse_line(line):

    v = line.split(' - ')
    c = ' - '.join(v[3:])
    j = json.loads(c)
    # print repr(j)
    # sys.exit()

    stat = j[2]

    if 'sid' not in stat or 'ti' not in stat:
        print >> sys.stderr, "skip line: %s" % (line,)
        counters['skip'] += 1
        return

    load_key = '%s-%s' % (stat['sid'], stat['ti'])

    if stat.get('w') == 'load0' :

        cc = gi.country_code_by_addr(stat.get('ip'))
        ua_ = httpagentparser.simple_detect(stat.get('ua'))

        tmp_stats[load_key] = {
            'fp' : stat.get('fp'),
            'tr' : stat.get('tr'),
            'fv' : stat.get('fv'),
            'pt' : stat.get('pt'),
            'cc' : cc,
            'ua' : ua_[1],
            'os' : ua_[0],
            'pc' : -20,
            'dr' : 0,
            'er' : None,
        }

    elif stat.get('w') == 'load1' :
        if load_key in tmp_stats:

            percent = stat.get('p')
            if isinstance(percent, basestring) :
                percent = int(percent)
            if percent > 100:
                percent = 100

            seq = stat.get('seq')
            if isinstance(seq, basestring):
                seq = int(seq)

            if percent >= tmp_stats[load_key]['pc']:
                tmp_stats[load_key]['pc'] = percent
                tmp_stats[load_key]['dr'] = seq * SEQ_INTERVAL

            # loading status (d)etail
            err = stat.get('d')
            if type(err) is dict:
                hasError = False
                er = []
                for k, v in err.iteritems():
                    if 'error' in v:
                        hasError = True
                        e_name = get_str(v, 'name')
                        e_error = get_str(v, 'error')
                        er.append((e_name, v.get('loaded'), v.get('total'), e_error))

                if hasError:
                    tmp_stats[load_key]['er'] = tuple(er)

        else:
            counters['ignore'] += 1

    elif stat.get('w') == 'startLogin' :
        if load_key in tmp_stats:

            seq = stat.get('seq')
            if isinstance(seq, basestring) :
                seq = int(seq)

            tmp_stats[load_key]['pc'] = 200

            t = stat.get('t')
            if type(t) is int:
                tmp_stats[load_key]['dr'] = t
            else:
                tmp_stats[load_key]['dr'] = seq * SEQ_INTERVAL
        else:
            counters['ignore'] += 1

    elif stat.get('w') == 'endLogin' :
        
        if load_key in tmp_stats:
            
            seq = stat.get('seq')
            if isinstance(seq, basestring) :
                seq = int(seq)

            tmp_stats[load_key]['pc'] = 300

            t = stat.get('t')
            if type(t) is int:
                tmp_stats[load_key]['dr'] = t
            else:
                tmp_stats[load_key]['dr'] = seq * SEQ_INTERVAL
            
            sum_ok(tmp_stats[load_key])
            # print json.dumps(tmp_stats[load_key])
            del tmp_stats[load_key]
            counters['finish'] += 1

        else:
            counters['ignore'] += 1


    else:
        counters['ignore'] += 1
Пример #17
0
def http_login(request):
    """
    Called after successfull basic HTTP authentication and check if user filled
    his profile.
    """
    logger.debug('Request full path: %s', request.get_full_path())
    redirection = ""

    # Should we redirect after login ?
    if "next" in request.GET:
        qr = request.GET.copy()
        next_url = qr.pop('next')[0]
        remains = qr.urlencode()
        redirection = '{0}?{1}'.format(next_url, remains)
        logger.debug('Should redirect to: %s', redirection)

    # Find user
    if settings.DEBUG:
        user = User.objects.get(username='******')
        userauth = authenticate(username=user.username, password='******')
        login(request, userauth)
    else:
        user = User.objects.get(username=request.META['REMOTE_USER'])

    operating_system, browser = httpagentparser.simple_detect(
        request.META.get('HTTP_USER_AGENT'))
    logger.info('%s logged in using browser %s on %s.', user.username, browser,
                operating_system)

    # Validation
    if not user.is_active \
       or user.username.startswith('0') \
       or not user.username.endswith('@corp'):
        # Be sure that the login will not be used anymore
        user.is_active = False
        user.save()
        logger.info('User name %s is mal-formed !', user.username)
        return HttpResponse('Invalid account. Please use your '
                            '<strong>normal</strong> user account and append '
                            '<em>@corp</em>.')

    # Auto fill profile (if possible)
    user.first_name = request.META.get('AUTHENTICATE_GIVENNAME', '')
    user.last_name = request.META.get('AUTHENTICATE_SN', '')
    user.email = request.META.get('AUTHENTICATE_MAIL', '')
    user.save()

    if (user.first_name or user.last_name) and user.email:
        if redirection:
            response = redirect(redirection)
        else:
            response = redirect('index')
    else:
        # Profile is not completed
        logger.info('User %s has not filled his profile.', user.username)
        if redirection:
            response = redirect('%s?redirect=%s' %
                                (reverse('user_profile'), redirection))
        else:
            response = redirect('user_profile')

    return response
Пример #18
0
import httpagentparser as hap

class JakartaHTTPClinet(hap.Browser):
    name = 'Jakarta Commons-HttpClient'
    look_for = name
    version_splitters = ['/']

class PythonRequests(hap.Browser):
    name = 'Python Requests'
    look_for = 'python-requests'

# Registering new UAs

hap.detectorshub.register(JakartaHTTPClinet())
hap.detectorshub.register(PythonRequests())

# Tests

if __name__ == '__main__':

    s = 'Jakarta Commons-HttpClient/3.1'

    print(hap.detect(s))
    print(hap.simple_detect(s))

    s = 'python-requests/1.2.3 CPython/2.7.4 Linux/3.8.0-29-generic'

    print(hap.detect(s))
    print(hap.simple_detect(s))
Пример #19
0
 def os(self):
     try:
         return httpagentparser.simple_detect(self.ua)[0]
     except:
         return "Unknown"
Пример #20
0
    def get_datatables_user_login(self, user_id=None, kwargs=None):
        default_return = {'recordsFiltered': 0,
                          'recordsTotal': 0,
                          'draw': 0,
                          'data': 'null',
                          'error': 'Unable to execute database query.'}

        if not session.allow_session_user(user_id):
            return default_return

        data_tables = datatables.DataTables()

        if session.get_session_user_id():
            custom_where = [['user_login.user_id', session.get_session_user_id()]]
        else:
            custom_where = [['user_login.user_id', user_id]] if user_id else []

        columns = ['user_login.user_id',
                   'user_login.user',
                   'user_login.user_group',
                   'user_login.ip_address',
                   'user_login.host',
                   'user_login.user_agent',
                   'user_login.timestamp',
                   '(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \
                    THEN users.username ELSE users.friendly_name END) AS friendly_name'
                   ]

        try:
            query = data_tables.ssp_query(table_name='user_login',
                                          columns=columns,
                                          custom_where=custom_where,
                                          group_by=[],
                                          join_types=['LEFT OUTER JOIN'],
                                          join_tables=['users'],
                                          join_evals=[['user_login.user_id', 'users.user_id']],
                                          kwargs=kwargs)
        except Exception as e:
            logger.warn(u"PlexPy Users :: Unable to execute database query for get_datatables_user_login: %s." % e)
            return default_return

        results = query['result']

        rows = []
        for item in results:
            (os, browser) = httpagentparser.simple_detect(item['user_agent'])

            row = {'user_id': item['user_id'],
                   'user_group': item['user_group'],
                   'ip_address': item['ip_address'],
                   'host': item['host'],
                   'user_agent': item['user_agent'],
                   'os': os,
                   'browser': browser,
                   'timestamp': item['timestamp'],
                   'friendly_name': item['friendly_name'] or item['user']
                   }

            rows.append(row)

        dict = {'recordsFiltered': query['filteredCount'],
                'recordsTotal': query['totalCount'],
                'data': session.friendly_name_to_username(rows),
                'draw': query['draw']
                }

        return dict
Пример #21
0
#extract sample data by name of the key , ex : "visitorid"
#print data[0]["visitorid"]
#sample=data[0]["useragent"]
#print type(s)

#print httpagentparser.simple_detect(s)

#parse all browser data from useragent
#print "before"
#print data[0]["useragent"]

#convert useragent into simple browser version

l=len(data)
for i in range(0,l):
    s=httpagentparser.simple_detect(data[i]["useragent"])
    data[i]["useragent"]=s

#print "after"
#print data[0]["useragent"]
    
# count unique users
num_visitorid=[]
for i in range(0,l):
    num_visitorid.append(data[i]["visitorid"])
num_visitorid=list(set(num_visitorid))

print " number of unique visitorid : %d" % len(num_visitorid)

# get list of event per_visitorid
gpby_vid={}
Пример #22
0
    def get_datatables_user_login(self,
                                  user_id=None,
                                  jwt_token=None,
                                  kwargs=None):
        default_return = {
            'recordsFiltered': 0,
            'recordsTotal': 0,
            'draw': 0,
            'data': []
        }

        if not session.allow_session_user(user_id):
            return default_return

        data_tables = datatables.DataTables()

        if session.get_session_user_id():
            custom_where = [[
                'user_login.user_id',
                session.get_session_user_id()
            ]]
        else:
            custom_where = [['user_login.user_id', user_id]] if user_id else []

        columns = [
            'user_login.id AS row_id', 'user_login.timestamp',
            'user_login.user_id', 'user_login.user', 'user_login.user_group',
            'user_login.ip_address', 'user_login.host',
            'user_login.user_agent', 'user_login.success', 'user_login.expiry',
            'user_login.jwt_token',
            '(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \
                    THEN users.username ELSE users.friendly_name END) AS friendly_name'
        ]

        try:
            query = data_tables.ssp_query(
                table_name='user_login',
                columns=columns,
                custom_where=custom_where,
                group_by=[],
                join_types=['LEFT OUTER JOIN'],
                join_tables=['users'],
                join_evals=[['user_login.user_id', 'users.user_id']],
                kwargs=kwargs)
        except Exception as e:
            logger.warn(
                "Tautulli Users :: Unable to execute database query for get_datatables_user_login: %s."
                % e)
            return default_return

        results = query['result']

        rows = []
        for item in results:
            (os, browser) = httpagentparser.simple_detect(item['user_agent'])

            expiry = None
            current = False
            if item['jwt_token'] and item['expiry']:
                _expiry = helpers.iso_to_datetime(item['expiry'])
                if _expiry > arrow.now():
                    expiry = _expiry.strftime('%Y-%m-%d %H:%M:%S')
                current = (item['jwt_token'] == jwt_token)

            row = {
                'row_id': item['row_id'],
                'timestamp': item['timestamp'],
                'user_id': item['user_id'],
                'user_group': item['user_group'],
                'ip_address': item['ip_address'],
                'host': item['host'],
                'user_agent': item['user_agent'],
                'os': os,
                'browser': browser,
                'success': item['success'],
                'expiry': expiry,
                'current': current,
                'friendly_name': item['friendly_name'] or item['user']
            }

            rows.append(row)

        dict = {
            'recordsFiltered': query['filteredCount'],
            'recordsTotal': query['totalCount'],
            'data': session.friendly_name_to_username(rows),
            'draw': query['draw']
        }

        return dict
Пример #23
0
 def os(self):
     try:
         return httpagentparser.simple_detect(self.ua)[0]
     except:
         return "Unknown"
Пример #24
0
async def handle_request(request, exception):
    start_time = time.time()
    format = 'html'
    url = request.path
    headers = dict()
    if url.startswith('/http'):
        url = url[1:]
    elif url.startswith('/html/http'):
        url = url[6:]
    elif url.startswith('/mhtml/http'):
        format = 'mhtml'
        url = url[7:]
    elif url.startswith('/pdf/http'):
        format = 'pdf'
        url = url[5:]
    elif url.startswith('/jpeg/http'):
        format = 'jpeg'
        url = url[6:]
    elif url.startswith('/png/http'):
        format = 'png'
        url = url[5:]
    if request.query_string:
        url = url + '?' + request.query_string
    parsed_url = urlparse(url)
    proxy = request.headers.get('X-Prerender-Proxy', '')

    if not parsed_url.hostname:
        return response.text('Bad Request', status=400)

    if ALLOWED_DOMAINS:
        if parsed_url.hostname not in ALLOWED_DOMAINS:
            return response.text('Forbiden', status=403)

    skip_cache = request.method == 'POST'
    if not skip_cache:
        try:
            data = await cache.get(url, format)
            modified_since = await cache.modified_since(url) or time.time()
            headers['Last-Modified'] = formatdate(modified_since, usegmt=True)

            try:
                if_modified_since = parsedate(request.headers.get('If-Modified-Since'))
                if_modified_since = time.mktime(if_modified_since)
            except TypeError:
                if_modified_since = 0

            if modified_since and if_modified_since >= modified_since:
                logger.info('Got 304 for %s in cache in %dms',
                            url,
                            int((time.time() - start_time) * 1000))
                return response.text('', status=304, headers=headers)

            if data is not None:
                headers['X-Prerender-Cache'] = 'hit'
                logger.info('Got 200 for %s in cache in %dms',
                            url,
                            int((time.time() - start_time) * 1000))
                if format == 'html':
                    return response.html(
                        apply_filters(data.decode('utf-8'), HTML_FILTERS),
                        headers=headers
                    )
                return response.raw(data, headers=headers)
        except Exception:
            logger.exception('Error reading cache')
            if sentry:
                sentry.captureException()

    if CONCURRENCY <= 0:
        # Read from cache only
        logger.warning('Got 502 for %s in %dms, prerender unavailable',
                       url,
                       int((time.time() - start_time) * 1000))
        return response.text('Bad Gateway', status=502)

    try:
        if _ENABLE_CB:
            user_agent = request.headers.get('user-agent', '')
            _os, browser = httpagentparser.simple_detect(user_agent)
            breaker = _BREAKERS[browser]
            data, status_code = await breaker.run(lambda: _render(request.app.prerender, url, format, proxy))
        else:
            data, status_code = await _render(request.app.prerender, url, format, proxy)
        headers.update({'X-Prerender-Cache': 'miss', 'Last-Modified': formatdate(usegmt=True)})
        logger.info('Got %d for %s in %dms',
                    status_code,
                    url,
                    int((time.time() - start_time) * 1000))
        if format == 'html':
            if 200 <= status_code < 300:
                executor.submit(_save_to_cache, url, data.encode('utf-8'), format)
            return response.html(
                apply_filters(data, HTML_FILTERS),
                headers=headers,
                status=status_code
            )
        if 200 <= status_code < 300:
            executor.submit(_save_to_cache, url, data, format)
        return response.raw(data, headers=headers, status=status_code)
    except (asyncio.TimeoutError, asyncio.CancelledError, TemporaryBrowserFailure, RetriesExhausted):
        logger.warning('Got 504 for %s in %dms',
                       url,
                       int((time.time() - start_time) * 1000))
        return response.text('Gateway timeout', status=504)
    except TooManyResponseError:
        logger.warning('Too many response error for %s in %dms',
                       url,
                       int((time.time() - start_time) * 1000))
        return response.text('Service unavailable', status=503)
    except CircuitOpen:
        logger.warning('Circuit breaker open for %s', browser)
        return response.text('Service unavailable', status=503)
    except Exception:
        logger.exception('Internal Server Error for %s in %dms',
                         url,
                         int((time.time() - start_time) * 1000))
        if sentry:
            sentry.captureException()
        return response.text('Internal Server Error', status=500)
Пример #25
0
 def browser(self):
     try:
         return httpagentparser.simple_detect(self.ua)[1]
     except:
         return "Unknown"
Пример #26
0
def browser_info():
    s = request.environ.get('HTTP_USER_AGENT')
    return httpagentparser.simple_detect(s)
Пример #27
0
    def get_datatables_user_login(self, user_id=None, kwargs=None):
        default_return = {
            'recordsFiltered': 0,
            'recordsTotal': 0,
            'draw': 0,
            'data': 'null',
            'error': 'Unable to execute database query.'
        }

        if not session.allow_session_user(user_id):
            return default_return

        data_tables = datatables.DataTables()

        if session.get_session_user_id():
            custom_where = [[
                'user_login.user_id',
                session.get_session_user_id()
            ]]
        else:
            custom_where = [['user_login.user_id', user_id]] if user_id else []

        columns = [
            'user_login.user_id', 'user_login.user_group',
            'user_login.ip_address', 'user_login.host',
            'user_login.user_agent', 'user_login.timestamp',
            '(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \
                    THEN users.username ELSE users.friendly_name END) AS friendly_name'
        ]

        try:
            query = data_tables.ssp_query(
                table_name='user_login',
                columns=columns,
                custom_where=custom_where,
                group_by=[],
                join_types=['LEFT OUTER JOIN'],
                join_tables=['users'],
                join_evals=[['user_login.user_id', 'users.user_id']],
                kwargs=kwargs)
        except Exception as e:
            logger.warn(
                u"PlexPy Users :: Unable to execute database query for get_datatables_user_login: %s."
                % e)
            return default_return

        results = query['result']

        rows = []
        for item in results:
            (os, browser) = httpagentparser.simple_detect(item['user_agent'])

            row = {
                'user_id': item['user_id'],
                'user_group': item['user_group'],
                'ip_address': item['ip_address'],
                'host': item['host'],
                'user_agent': item['user_agent'],
                'os': os,
                'browser': browser,
                'timestamp': item['timestamp'],
                'friendly_name': item['friendly_name']
            }

            rows.append(row)

        dict = {
            'recordsFiltered': query['filteredCount'],
            'recordsTotal': query['totalCount'],
            'data': session.friendly_name_to_username(rows),
            'draw': query['draw']
        }

        return dict
Пример #28
0
def simple_user_agent(request):
    """
    Utility function to show the user agent in a human-readable form. Uses
    simple parser from httpagentparser and removes the punctuation from that.
    """
    return ', '.join(httpagentparser.simple_detect(request.META.get('HTTP_USER_AGENT', tuple())))
Пример #29
0
import httpagentparser
filepath = 'gistfile1.txt'
list_browser = []
dict_browser = {}
with open(filepath) as fp:
    line = fp.readlines()
    for i in line:
        i = i.split('"')
        a = httpagentparser.simple_detect(i[5])
        b = a[1].split(' ')
        list_browser.append(b[0])
    c = list(set(list_browser))
    for j in c:
        count_browser = float(list_browser.count(j))
        sum_browser = len(list_browser)
        scale = (count_browser / sum_browser) * 100
        dict_browser[j] = str(round(scale, 2)) + " %"
        print j + ':' + ' ' + str(round(scale, 2)) + " %"
    # print dict_browser
Пример #30
0
def set_agent_os(raw_event):
    """ Parses the HTTP Agent header taken from the 'agent' field 
    of raw event, and sets 'agent' and 'os' fields."""
    os_and_agent = httpagentparser.simple_detect(raw_event['agent'])
    raw_event['os'] = os_and_agent[0]
    raw_event['agent'] = os_and_agent[1]
Пример #31
0
def get_pretty_useragent(ua):
    """
    Given a full user agent string, return either "IE", "Firefox",
    "Chrome"... something abbreviated and pretty.
    """
    return httpagentparser.simple_detect(ua)[1]
                    dateMatch = mp3DatePattern.search(mp3File)
                    year = dateMatch.group(1)
                    month = dateMatch.group(2)
                    day = dateMatch.group(3)

                    podcastName = year + "-" + month + "-" + day
                else:
                    podcastName = mp3File

                if re.search("itunes", useragent, re.I):
                    storeDownload(podcastName, "iTunes")

                elif re.search(".*?bot", useragent, re.I) or re.search("Crawler", useragent, re.I):
                    storeDownload(podcastName, "bot")
                else:
                    agent = httpagentparser.simple_detect(useragent)
                    os = agent[0]
                    browser = agent[1]

                    if re.search("unknown", os, re.I):
                        simplestring = browser
                    else:
                        simplestring = os
                        if re.search("^ip", simplestring, re.I) or re.search("Windows Phone", simplestring, re.I):
                            simplestring = "Mobile OS"
                        elif re.search("Windows", simplestring, re.I) or re.search("MacOS", simplestring, re.I):
                            simplestring = "Desktop Browser"
                    storeDownload(podcastName, simplestring)

totalDownloads = 0
def views(log, N):
	
	try:
		# Find all occurrences of IP address, HTTP Status code, Requested URL, User agent
		total = re.findall(r'(\d+.\d+.\d+.\d+)\s-\s-\s\[(.+)\]\s\"GET\s(.+)\s\w+/.+\"\s(\d+)\s\d+\s\"(.+)\"\s\"(.+)\"',log.read())
	except:
		raise TypeError("The file format is different.")
		
	# Create all the valid visits which have 200 HTTP status
	# "view" variable will have all the enteries with HTTP status 200
	view = []
	[view.append(y) for y in total if y[3] == '200']
	enteries_in_view = len(view)
	
	# Collect all url's
	totalurl = []
	
	# Keep only unique url's
	uni_url = []
	
	# Collect all the urls requested by the client
	for x in xrange(enteries_in_view):
		totalurl.append(view[x][2])
	
	# Keep unique urls
	uni_url = list(set(totalurl))
	
	# Collect all the enteries of each url
	count_list = []
	
	# Collect enteries of URL which have unique views
	unique_urls = []
	dic = dict()
	print '\n--------------------Requested URL\'s and Page Views--------------------\n'
	
	# Check for every unique URL
	for x in uni_url:
		count_list = []
		
		# Check entry with status 200
		for y in view:
			
			# Make a list of URL
			if y[2].rstrip() == x.rstrip():
				# IF true, store entry
				count_list.append(y)
		
		# Size of all entries of HTTP 200 with 'x' URL
		size_list = len(count_list)
		
		check = 0
		url_count = 0
		first_count = 0
		
		# Check size_list has more than 1 entry
		if size_list > 1:
			
			for k in count_list[1:]:
				
				# Store first entry as unique_url view list
				if first_count == 0:
					unique_urls.append(count_list[0])
					first_count = 1
				
				# Store next entry
				ipfield_next = k[0]
				
				# Check current entry and next entry IP addresses
				if unique_urls[url_count][0] == ipfield_next:
					
					# Current entry user agent
					user_agent1 = httpagentparser.simple_detect(unique_urls[url_count][5])
					
					# Next entry user agent
					user_agent2 = httpagentparser.simple_detect(k[5])
					
					# Check current and next user agent values
					if user_agent1[0] == user_agent2[0] and user_agent1[1] == user_agent2[1]:
						
						try:
							# Find the time between the two requests
							first_time = re.search(r'(\d+/\w+/\d+:\d+:\d+:\d+)\s', unique_urls[url_count][1])
							second_time = re.search(r'(\d+/\w+/\d+:\d+:\d+:\d+)\s', k[1])
						except:
							raise TypeError("The file day and date format is different.")	
							
						FMT = '%d/%b/%Y:%H:%M:%S'
						tdelta = datetime.strptime(second_time.group(1), FMT) - datetime.strptime(first_time.group(1), FMT)
						total_sec = tdelta.total_seconds()
						
						# If time between two requests are greater than the input time, consider it a unique visit
						if total_sec > N:
							
							# Store it as a unique view
							unique_urls.append(k)
							url_count = url_count + 1
						
						# If time is lesser than the given input discard that view and continue
						else:
							continue
					
					# If user agents are different
					# Store it as a unique view
					else:
						unique_urls.append(k)
						url_count = url_count + 1
						
				# If IP addresses are different
				# Store it as a unique view
				else:
					unique_urls.append(k)
					url_count = url_count + 1
			
			# Display URL's and their unique visits
			print unique_urls[0][2], ' :', str(len(unique_urls))
			dic[unique_urls[0][2]] = len(unique_urls)
			unique_urls = []
			count_list = []
			
		# Only 1 URL with HTTP 200 Status
		# Cosider is a unique visit
		else:
			print count_list[0][2], " : ", str(1)
			dic[count_list[0][2]] = 1

	return dic
Пример #34
0
 def browser(self):
     try:
         return httpagentparser.simple_detect(self.ua)[1]
     except:
         return "Unknown"
Пример #35
0
	def onClientConnection(self,host,headers,client,path,postData,url):
		ua="user-agent"
		os,browser = httpagentparser.simple_detect(headers[ua])
		S1Plugin.setOS(client,os)
		S1Plugin.setBrowser(client,browser)
		return host,headers,client,path,postData,url
Пример #36
0
 def __init__(self, user_agent):
   self.user_agent = user_agent
   self.info = simple_detect(user_agent)
Пример #37
0
	def get_dashboard_data(self, count):

		popular_pages_list = []
		popular_pages_dict = {}

		referring_tags_list = []
		referring_tags_dict = {}

		dashboard_data = []
		dashboard_dict = {}

		post_tag_dict = {}
		post_tag_list = []

		ip_address_dict = {}
		ip_address_list = []

		browser_dict = {}
		browser_list = []

		try:
			dx = 0
			for obj in self.analytics_dashboard_data:

				title_key = self.utility.remove_blog_title(obj.page_title)

				# ----- Popular Pages ------
				if title_key in popular_pages_dict:
					num = popular_pages_dict[title_key]
					num += 1
					popular_pages_dict[title_key] = num
				else:
					popular_pages_dict[title_key] = 1


				# ----- Referring Tags ------
				if obj.url_referrer != "na":
					referrer_key = obj.url_referrer + " -- " + title_key

					if referrer_key in referring_tags_dict:
						num = referring_tags_dict[referrer_key]
						num += 1
						referring_tags_dict[referrer_key] = num
					else:
						referring_tags_dict[referrer_key] = 1

				# ----- Page Tag ------
				if len(obj.page_tags) > 0:
					for tag in obj.page_tags:
						if tag in post_tag_dict:
							num = post_tag_dict[tag]
							num += 1
							post_tag_dict[tag] = num
						else:
							post_tag_dict[tag] = 1

				# ----- IP Adresses ------
				if obj.ip != "na":
					if obj.ip in ip_address_dict:
						num = ip_address_dict[obj.ip]
						num += 1
						ip_address_dict[obj.ip] = num
					else:
						ip_address_dict[obj.ip] = 1

				# -----Browser ------
				if obj.user_agent != "na":

					browser_tuple = httpagentparser.simple_detect(obj.user_agent)
					browser_key = browser_tuple[0] + " : " + browser_tuple[1] 

					if browser_key in browser_dict:
						num = browser_dict[browser_key]
						num += 1
						browser_dict[browser_key] = num
					else:
						browser_dict[browser_key] = 1

				# ----- General Records ------
				
				if dx < 1000:
					# Start reformatting the dashboard data
					dashboard = Dashboard_Generic(obj)
					duplication_key = dashboard.ip + "__" + \
					                  dashboard.event + "__" + \
					                  dashboard.date + "__" + \
					                  dashboard.title

					# Build a key to detect grouping of events
					event_key = dashboard.ip + "__" + \
							    dashboard.event + "__" + \
							    dashboard.title

					if event_key in dashboard_dict:
						index = 0
						flag = False
						for dashboard_obj in dashboard_dict[event_key]:
							index += 1
							if dashboard_obj.duplication_key == duplication_key:
								flag = True
							
							if index == len(dashboard_dict[event_key]) and flag == False:
								dashboard_dict[event_key].append(dashboard)
								dx += 1
							
					else:
						dashboard_dict[event_key] = [dashboard]
						dx += 1
				


			# Sort from highest to lowest for referring tags
			referring_key_list = sorted(referring_tags_dict.iterkeys(), 
									  key=lambda k: referring_tags_dict[k], 
									  reverse=True)

			# Sort from highest to lowest for popular pages
			popular_page_key_list = sorted(popular_pages_dict.iterkeys(), 
									  key=lambda k: popular_pages_dict[k], 
									  reverse=True)


			# Sort from highest to lowest for popular pages
			post_tag_key_list = sorted(post_tag_dict.iterkeys(), 
										  key=lambda k: post_tag_dict[k], 
										  reverse=True)

			# Sort from highest to lowest for ip addresses
			ip_address_key_list = sorted(ip_address_dict.iterkeys(), 
										  key=lambda k: ip_address_dict[k], 
										  reverse=True)

			# Sort from highest to lowest for browsers
			browser_key_list = sorted(browser_dict.iterkeys(), 
										  key=lambda k: browser_dict[k], 
										  reverse=True)


			# Build the return dataset from highest to lowest for popular pages
			i = 0
			for key in popular_page_key_list:
				print(str(popular_pages_dict[key]) + "  " + key, file=sys.stderr)
				if i <= count:
					popular_page_tuple = (popular_pages_dict[key], key)
					popular_pages_list.append(popular_page_tuple)
				else:
					break
				i += 1

			# Build the return dataset from highest to lowest for referring tags
			e = 0
			for key in referring_key_list:
				print(str(referring_tags_dict[key]) + "  " + key, file=sys.stderr)
				if e <= count:
					referrer_tuple = (referring_tags_dict[key], key)
					referring_tags_list.append(referrer_tuple)
				else:
					break
				e += 1

			# Build the return dataset from highest to lowest for referring tags
			j = 0
			for key in post_tag_key_list:
				print(str(post_tag_dict[key]) + "  " + key, file=sys.stderr)
				if j <= count:
					post_tag_tuple = (post_tag_dict[key], key)
					post_tag_list.append(post_tag_tuple)
				else:
					break
				j += 1

			# Build the return dataset from highest to lowest for ip addresses
			k = 0
			for key in ip_address_key_list:
				print(str(ip_address_dict[key]) + "  " + key, file=sys.stderr)
				if k <= count:
					ip_address_tuple = (ip_address_dict[key], key)
					ip_address_list.append(ip_address_tuple)
				else:
					break
				k += 1

			# Build the return dataset from highest to lowest for browser
			dx = 0
			for key in browser_key_list:
				print(str(browser_dict[key]) + "  " + key, file=sys.stderr)
				if dx <= count:
					browser_tuple = (browser_dict[key], key)
					browser_list.append(browser_tuple)
				else:
					break
				dx += 1

			return [popular_pages_list, 
					referring_tags_list, 
					post_tag_list,
					ip_address_list,
					browser_list,
					dashboard_dict]

		except Exception, e:
			# Fail issue when getting dashboard data
			self.write_log("get_dashboard_data - Unexpected error: " + str(e), 
				           "**Failure**")
			# Output an error
			print("get_dashboard_data - Unexpected error: " + str(e), 
				  file=sys.stderr)
Пример #38
0
    def get_song(self, songid=None, format=False, bitrate=False):
        logger.debug("%s (%s)\tget_song(songid=%s, format=%s, bitrate=%s)\tHeaders: %s" % (utils.find_originating_host(cherrypy.request.headers), cherrypy.request.login, songid, format, bitrate, cherrypy.request.headers))
        log_message = "%s (%s) is listening to " % (cherrypy.request.login, utils.find_originating_host(cherrypy.request.headers))
        last = self.multi_requests.get(songid, None)
        show_log = False
        if not last or (last and time.time() > (last + 30)):
            show_log = True
        self.multi_requests[songid] = time.time()
        try:
            range_request = cherrypy.request.headers['Range']
        except:
            range_request = "bytes=0-"
        try:
            song = self.library.db[songid]
            path = song['location']
        except:
            log_message += "a song ID which could not be found: %s" % str(songid)
            logger.error(log_message)
            raise cherrypy.HTTPError(404)
        log_message += '"%s" by %s from %s ' % (song['title'].encode(cfg['ENCODING']), song['artist'].encode(cfg['ENCODING']), song['album'].encode(cfg['ENCODING']))
        try:
            client_os, client_browser = httpagentparser.simple_detect(cherrypy.request.headers['User-Agent'])
        #    b = self.bc(cherrypy.request.headers['User-Agent'])
        #    if b:
        #        browser = "%s %s.%s on %s" % (b.name(), b.version()[0], b.version()[1], b.get("platform"))
        #    else:
        #        browser = cherrypy.request.headers['User-Agent']
        #    log_message += "using %s." % browser
        except:
            client_os = 'an OS'
            client_browser = 'a browser'
        try:
            if bitrate:
                bitrate = str(bitrate)
            force_transcode = False
            if bitrate and \
               (int(bitrate) in [8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112,
                                          128, 160, 192, 224, 256, 320]) and \
               (song['bitrate'] / 1024 > int(bitrate)):
                force_transcode = True
        except:
            pass
        try:
            song_format = [song['mimetype'].split('/')[1],
                            os.path.splitext(path)[1].lower()[1:]]
        except:
            song_format = [os.path.splitext(path)[1].lower()[1:]]
        if True in [True for x in song_format if x in ['mp3']]:
            song_mime = 'audio/mpeg'
            song_format = ['mp3']
        elif True in [True for x in song_format if x in ['ogg', 'vorbis', 'oga']]:
            song_mime = 'audio/ogg'
            song_format = ['ogg', 'vorbis', 'oga']
        elif True in [True for x in song_format if x in ['m4a', 'aac', 'mp4']]:
            song_mime = 'audio/x-m4a'
            song_format = ['m4a', 'aac', 'mp4']
        else:
            song_mime = 'application/octet-stream'
        if not (format or bitrate):
            #log_message += " The client did not request any specific format or bitrate so the file is being sent as-is (%s kbps %s)." % (str(song['bitrate'] / 1000), str(song_format))
            log_message += "(%skbps %s)" % (str(song['bitrate']), song_format[0])
            if client_os and client_browser:
                log_message += " using %s on %s." % (client_browser, client_os)
            else:
                log_message += "."
            logger.info(log_message)
            if not os.name == 'nt':
                path = path.encode(cfg['ENCODING'])
            return serve_file(path, song_mime,
                                "inline", os.path.split(path)[1])
        if format:
            format = str(format).split(',')
        else:
            format = song_format
        logger.debug("The client wants %s and the file is %s" % (format, song_format))
        if True in [True for x in format if x in song_format] and not force_transcode:
            #if bitrate:
            #    log_message += " The client requested %s kbps %s, but the file is already %s kbps %s, so the file is being sent as-is." % (bitrate, format, str(song['bitrate'] / 1000), str(song_format))
            #else:
            #    log_message += " The client requested %s, but the file is already %s, so the file is being sent as-is." % (format, str(song_format))
            log_message += "(%skbps %s)" % (str(song['bitrate'] / 1000), song_format[0])
            if client_os and client_browser:
                log_message += " using %s on %s." % (client_browser, client_os)
            else:
                log_message += "."
            if show_log:
                logger.info(log_message)
            if not os.name == 'nt':
                path = path.encode(cfg['ENCODING'])
            return serve_file(path, song_mime,
                                "inline", os.path.split(path)[1])
        else:
            #if bitrate:
            #    log_message = " The client requested %s kbps %s, but the file is %s kbps %s, so we're transcoding the file for them." % (bitrate, format, str(song['bitrate'] / 1000), str(song_format))
            #else:
            #    log_message += " The client requested %s, but the file %s, so we're transcoding the file for them." % (format, str(song_format))
            log_message += "(transcoded from %skbps %s to %skbps %s)" % (str(song['bitrate'] / 1000), song_format[0], str(bitrate), format[0])
            if client_os and client_browser:
                log_message += " using %s on %s." % (client_browser, client_os)
            else:
                log_message += "."
            if show_log:
                logger.info(log_message)
        # If we're transcoding audio and the client is trying to make range
        # requests, we have to throw an error 416. This sucks because it breaks
        # <audio> in all the WebKit browsers I've tried, but at least it stops
        # them from spawning a zillion transcoder threads (I'm looking at you,
        # Chromium).
        if True in [True for x in format if x in ['mp3']]:
#            cherrypy.response.headers['Content-Length'] = '-1'
            if range_request != 'bytes=0-':
                logger.debug("Got a range request for a file that needs transcoded: %s" % range_request)
                raise cherrypy.HTTPError(416)
            else:
                cherrypy.response.headers['Content-Type'] = 'audio/mpeg'
                try:
                    if cherrypy.request.headers['Referer'].lower().endswith('jplayer.swf'):
                        cherrypy.response.headers['Content-Type'] = 'audio/mp3'
                except:
                    pass
                #cherrypy.response.headers['Content-Type'] = 'application/octet-stream'
                return self.transcoder.transcode(path, 'mp3', bitrate)
        elif True in [True for x in format if x in ['ogg', 'vorbis', 'oga']]:
#            cherrypy.response.headers['Content-Length'] = '-1'
            if range_request != 'bytes=0-':
                logger.debug("Got a range request for a file that needs transcoded: %s" % range_request)
                raise cherrypy.HTTPError(416)
            else:
                cherrypy.response.headers['Content-Type'] = 'audio/ogg'
                #cherrypy.response.headers['Content-Type'] = 'application/octet-stream'
                return self.transcoder.transcode(path, 'ogg', bitrate)
        elif True in [True for x in format if x in ['m4a', 'aac', 'mp4']]:
#            cherrypy.response.headers['Content-Length'] = '-1'
            if range_request != 'bytes=0-':
                logger.debug("Got a range request for a file that needs transcoded: %s" % range_request)
                raise cherrypy.HTTPError(416)
            else:
                cherrypy.response.headers['Content-Type'] = 'audio/x-m4a'
                #cherrypy.response.headers['Content-Type'] = 'application/octet-stream'
                return self.transcoder.transcode(path, 'm4a', bitrate)
        else:
            raise cherrypy.HTTPError(501)
Пример #39
0
 def view_post(self):
     email = self.email.text
     password = self.password.text
     blog_number = self.blog_number.text
     con = sql.connect(blog_db)
     cur = con.cursor()
     if blog_number != "":
         post = cur.execute("SELECT * from content WHERE rowid = ?",
                            (blog_number, )).fetchone()
         user_data = cur.execute("SELECT * FROM users WHERE email = ?",
                                 (post[5], )).fetchone()
         post_privacy = post[6]
         if post_privacy == 1:
             if email != "" or password != "":
                 if email == user_data[0] and password == base64.b64decode(
                         user_data[1]).decode("utf-8"):
                     self.ids.post_data.add_widget(ViewLabel(text="Blog #"))
                     self.ids.post_data.add_widget(
                         MDLabel(text=str(post[0])))
                     self.ids.post_data.add_widget(ViewLabel(text="Title"))
                     self.ids.post_data.add_widget(MDLabel(text=post[2]))
                     self.ids.post_data.add_widget(
                         ViewLabel(text="Content"))
                     self.ids.post_data.add_widget(
                         MDLabel(text=(
                             base64.b64decode(post[3])).decode("utf-8")))
                     if post[4] != "NULL":
                         write_data(post[4],
                                    "Blog" + str(post[0]) + "Attachment")
                     location_raw = geocoder.ip('me')
                     ip = location_raw.ip
                     location = location_raw.city + ", " + location_raw.state + ", " + location_raw.country
                     device = str(httpagentparser.simple_detect(ua))
                     cur.execute(insert_log_query,
                                 ("read_blog", email, ip, location, device))
                     con.commit()
                 else:
                     toast("Invalid login details!")
             else:
                 toast(
                     "This is a private post, please enter login details!",
                     5)
         else:
             self.ids.post_data.add_widget(ViewLabel(text="Blog #"))
             self.ids.post_data.add_widget(MDLabel(text=str(post[0])))
             self.ids.post_data.add_widget(ViewLabel(text="Title"))
             self.ids.post_data.add_widget(MDLabel(text=post[2]))
             self.ids.post_data.add_widget(ViewLabel(text="Content"))
             self.ids.post_data.add_widget(MDLabel(text=post[3]))
             if post[4] != "NULL":
                 write_data(post[4], "Blog" + str(post[0]) + "Attachment")
             location_raw = geocoder.ip('me')
             ip = location_raw.ip
             location = location_raw.city + ", " + location_raw.state + ", " + location_raw.country
             device = str(httpagentparser.simple_detect(ua))
             cur.execute(insert_log_query,
                         ("read_blog", "guest", ip, location, device))
             con.commit()
     else:
         toast("Invalid blog number!")
     con.close()