def diagnosis(self): from fluxmonitor.misc._process import Process from time import time as epoch logger.info("Diagnosis...") try: logger.info( 'ntp dns testing...\n%s\n%s', Process.call_with_output('getent', 'hosts', '0.debian.pool.ntp.org'), Process.call_with_output('getent', 'hosts', '1.debian.pool.ntp.org')) except Exception: logger.exception('ntp dns testing failed') try: logger.info('ntp service status...\n%s', Process.call_with_output('service', 'ntp', 'status')) except Exception: logger.exception('fetch ntp service status failed') try: if epoch() < 1505720271: logger.error( "System time error, fixing...\n%s", Process.call_with_output('ntp-wait', '-s', '1', '-n', '1')) if epoch() < 1505720271: logger.error("System time sync failed") except Exception: logger.exception("cloud diagnosis error")
async def test_cache_multithread(): global cache_test_done THREADS = 64 MODULUS = 5 epoch_start = epoch() cache_threads = [] dot_thread = Thread(target=_dot) for ser_no in range(THREADS): cache_threads.append( Thread(target=get, args=(ser_no % MODULUS, _ser_no2did(ser_no % MODULUS)))) dot_thread.start() shuffle(cache_threads) for thread in cache_threads: # print('Starting thread {}'.format(cache_threads.index(thread))) thread.start() for thread in cache_threads: thread.join() elapsed = ceil(epoch() - epoch_start) assert elapsed < 2 * MODULUS * DELAY # shouldn't get caught waiting more than once per cache write cache_test_done = True dot_thread.join()
def make_session(): """Return a valid visitor session object""" sn = dict() sn['session_start'] = epoch() sn['session_end'] = epoch() sn['session_duration'] = 100 sn['request_address'] = "http://example.com" sn['user_agent'] = "FooBar" sn['screen_color'] = 256 sn['screen_width'] = 1000 sn['screen_height'] = 1000 sn['pageviews'] = [make_pageview()] sn['pageview_count'] = 1 return sn
def new_session(payload): """Return a new session from payload data""" session = dict() session['session_start'] = payload.get('timestamp', epoch()) session['session_end'] = payload.get('timestamp', epoch()) session['session_duration'] = 0 session['request_address'] = payload.get('request_address', None) # TODO country, city etc from ip address session['user_agent'] = payload.get('user_agent', None) session['screen_color'] = payload.get('screen_color', None) session['screen_width'] = payload.get('screen_width', None) session['screen_height'] = payload.get('screen_height', None) session['pageviews'] = list() session['pageviews'].append(new_pageview(payload)) session['pageview_count'] = 1 return session
def me_show(): xrest = [ "XREST dGVzdDp0ZXN0", # test/test "XREST ZGF0ZTp0ZXN0", # date/test "XREST bnVsbGRhdGU6dGVzdA==" # nulldate/test ] auth = request.headers.get('Authorization') if not auth in xrest: return make_response( "<html><head><title>JBoss - Error report</head></html>", '401') reply = [{ 'firstName': 'csproot', 'enterpriseName': 'CSP', 'APIKey': '02a99c64-a09a-46d7', 'APIKeyExpiry': (int(epoch()) + 100) * 1000, 'enterpriseID': 'fc3a351e-87dc-46a4-bcf5-8c4bb204bd46', }] if auth == "XREST ZGF0ZTp0ZXN0": reply[0]['DateDecodeDate'] = '1469448000000' reply[0]['DateNotDecode'] = '1469448000000' reply[0]['ExpiryDecodeExpiry'] = '1469448000000' reply[0]['DateNoneDate'] = None if auth == "XREST bnVsbGRhdGU6dGVzdA==": reply[0]['DateDecodeDate'] = 'null' return json.dumps(reply)
def me_show(): xrest = [ "XREST dGVzdDp0ZXN0", # test/test "XREST ZGF0ZTp0ZXN0", # date/test "XREST bnVsbGRhdGU6dGVzdA==", # nulldate/test ] auth = request.headers.get("Authorization") if not auth in xrest: return make_response("<html><head><title>JBoss - Error report</head></html>", "401") reply = [ { "firstName": "csproot", "enterpriseName": "CSP", "APIKey": "02a99c64-a09a-46d7", "APIKeyExpiry": (int(epoch()) + 100) * 1000, "enterpriseID": "fc3a351e-87dc-46a4-bcf5-8c4bb204bd46", } ] if auth == "XREST ZGF0ZTp0ZXN0": reply[0]["DateDecodeDate"] = "1469448000000" reply[0]["DateNotDecode"] = "1469448000000" reply[0]["ExpiryDecodeExpiry"] = "1469448000000" if auth == "XREST bnVsbGRhdGU6dGVzdA==": reply[0]["DateDecodeDate"] = "null" return json.dumps(reply)
def verify_token(token, secret): name, timestamp, token_hash = token.split(",") if epoch() - timestamp > MAX_TOKEN_AGE: return False hmac_obj = hmac.new(secret, name, timestamp) correct_token = ",".join([name, timestamp, hmac_obj.hexdigest()]) return hmac.compare_digest(token, correct_token)
async def create_tag(self, ctx: commands.Context, name: str, value: str): prefix = f'tags:{ctx.guild.id}:{name}' await self.bot.redis.set(f'{prefix}:value', value) await self.bot.redis.set(f'{prefix}:creator', ctx.author.id) await self.bot.redis.set(f'{prefix}:created_at', epoch()) await self.bot.redis.set(f'{prefix}:uses', 0)
def test_store_payload(self): account = Account(name="Foo 4") visitor = Visitor() account.visitors.append(visitor) db.session.add(account) db.session.commit() args = dict() args['user_agent'] = "Foo Bar" # Full UserAgent string args['platform_name'] = "linux" # Platform name from UA args['browser_name'] = "chrome" # Browser name from UA args['browser_version'] = "1.2.3" # Browser version from UA args['request_address'] = "0.0.0.0" # Request IP address args['visitor_id'] = "12345" # The APE cookie visitor_id args['debug'] = False # Debug switch args['page_url'] = "http://example.com/foo/bar" # Page URL args['referrer_url'] = "http://example.com/foo/baz" # Referrer URL if set args['page_title'] = "Foo Bar Baz" # Page title args['event'] = "" # Event args['account_id'] = "67890" # The customer account ID args['timestamp'] = epoch() # Epoch timestamp args['language'] = "en-GB" # Browser language args['placeholders'] = [] # The set of Placeholder ids on this page args['prefix'] = "ape" # Placeholder class prefix args['screen_colour'] = 256 # Screen colour depth args['screen_height'] = 1000 # Screen height args['screen_width'] = 1000 # Screen width args['script_version'] = "0.0.0" # Version number of this script visitor_data = visitor.store_payload(args) self.assertIsInstance(visitor_data, dict)
def test_append_payload(self): payload_1 = self.payload.copy() payload_2 = self.payload.copy() payload_3 = self.payload.copy() timestamp = epoch() payload_1.update(request_address="Foo", user_agent="Foo", timestamp=timestamp) payload_2.update(request_address="Foo", user_agent="Foo", timestamp=timestamp+1) payload_3.update(request_address="Foo", user_agent="Foo", timestamp=timestamp+31*60) # Data with first session and first pageview data_1 = VDM.append_payload(payload_1, dict()) self.assertIsInstance(data_1['visitor_id'], str) self.assertIsInstance(data_1['account_id'], str) self.assertIsInstance(data_1['sessions'], dict) self.assertEqual(data_1['session_count'], 1) self.assertEqual(len(data_1['sessions']), 1) self.assertEqual(len(data_1['sessions'][0]['pageviews']), 1) # Add new pageview to existing session data_2 = VDM.append_payload(payload_2, data_1).copy() self.assertEqual(data_2['visitor_id'], data_1['visitor_id']) self.assertEqual(data_2['account_id'], data_1['account_id']) self.assertEqual(len(data_2['sessions']), 1) self.assertEqual(data_2['session_count'], 1) self.assertEqual(len(data_2['sessions'][0]['pageviews']), 2) # Add new session with new pageview data_3 = VDM.append_payload(payload_3, data_2).copy() self.assertEqual(data_3['visitor_id'], data_1['visitor_id']) self.assertEqual(data_3['account_id'], data_1['account_id']) self.assertEqual(len(data_3['sessions']), 2) self.assertEqual(data_3['session_count'], 2) self.assertEqual(len(data_3['sessions'][0]['pageviews']), 2) self.assertEqual(len(data_3['sessions'][1]['pageviews']), 1)
def update_session(payload, session): """Update the session with the payload data""" # Using deep copy to prevent accidental data pollution session = deepcopy(session) session['session_end'] = payload.get('timestamp', epoch()) session['session_duration'] = session['session_end'] - session['session_start'] session['pageviews'].append(new_pageview(payload)) session['pageview_count'] = len(session['pageviews']) return session
def __init__(self, unique_id=uuid4(), application_id=None, timestamp=epoch()): """ :param unique_id: :param application_id: :param timestamp: """ self.unique_id = str(unique_id) self.application_id = application_id self.timestamp = timestamp
def issue(self, lazy_computation): job_command = self.serializer.serialize_and_script(lazy_computation) storage = lazy_computation.storage log_file = storage.get_log_prefix(lazy_computation.comp_name, "-{}".format(str(epoch()))) # Dirty hack: since we don't want to wait for the computation to end # for closing the file, we leave it as is. # For sequential computation `InSituEnvironment` is much cleaner file_handle = open(log_file, "w") subprocess.Popen(job_command, stderr=file_handle, stdout=file_handle)
def assert_hmac(msg, lookup): assert 'hmac' in msg, "hmac field missing" assert 'msg' in msg, "msg field missing" assert 'handle' in msg, "handle field missing" assert 'timestamp' in msg, "timestamp field missing" handle = msg['handle'] assert handle in lookup, "Handle unknown" alg_name, key, max_age_secs = [lookup[handle][k] for k in ('type', 'key', 'max_age_secs')] assert epoch() < msg['timestamp'] + max_age_secs, "Message expired" hm_digest = _hmac(handle + '\n' + str(msg['timestamp']) + '\n' + msg['msg'], alg_name, key) assert hm_digest == msg['hmac'], "Signature invalid" return True
def make_pageview(): """Return a valid visitor pageview object""" pv = dict() pv['page_url'] = "http://example.com" pv['referrer_url'] = "http://example.com" pv['page_title'] = "Foo Bar" pv['timestamp'] = epoch() pv['language'] = "en-GB" pv['event'] = "pageload" pv['placeholders'] = ['a', 'b', 'c`'] pv['prefix'] = "ape" pv['script_version'] = "0.0.0" return pv
def issue(self, lazy_computation): if self.stdout: lazy_computation() return storage = lazy_computation.storage log_file = storage.get_log_prefix(lazy_computation.comp_name, ".{}".format(str(epoch()))) sys_backup = sys.stdout, sys.stderr try: with open(log_file, "w") as hdl: sys.stdout = sys.stderr = hdl lazy_computation() finally: sys.stdout, sys.stderr = sys_backup
def test_payload_is_part_of_session(self): payload_1 = self.payload.copy() payload_2 = self.payload.copy() payload_3 = self.payload.copy() payload_4 = self.payload.copy() timestamp = epoch() payload_1.update(request_address="Foo", user_agent="Foo", timestamp=timestamp) payload_2.update(request_address="Foo", user_agent="Bar", timestamp=timestamp) payload_3.update(request_address="Bar", user_agent="Foo", timestamp=timestamp) payload_4.update(request_address="Foo", user_agent="Foo", timestamp=timestamp+31*60) session = VDM.new_session(payload_1) self.assertTrue(VDM.payload_is_part_of_session(payload_1, session)) self.assertFalse(VDM.payload_is_part_of_session(payload_2, session)) self.assertFalse(VDM.payload_is_part_of_session(payload_3, session)) self.assertFalse(VDM.payload_is_part_of_session(payload_4, session))
def test_update_session(self): payload_1 = self.payload payload_2 = payload_1.copy() payload_2.update(timestamp=epoch()) session_1 = VDM.new_session(payload_1) session_2 = VDM.update_session(payload_2, session_1) # Values not updated self.assertEqual(session_1['session_start'], session_2['session_start']) self.assertEqual(session_1['request_address'], session_2['request_address']) self.assertEqual(session_1['user_agent'], session_2['user_agent']) self.assertEqual(session_1['screen_color'], session_2['screen_color']) self.assertEqual(session_1['screen_width'], session_2['screen_width']) self.assertEqual(session_1['screen_height'], session_2['screen_height']) # Values updated self.assertEqual(session_2['session_end'], payload_2['timestamp']) self.assertEqual(session_2['session_duration'], session_2['session_end'] - session_2['session_start']) self.assertEqual(len(session_2['pageviews']), len(session_1['pageviews'])+1) self.assertEqual(session_2['pageview_count'], len(session_2['pageviews']))
def setUp(self): self.payload = dict() self.payload['user_agent'] = "Foo Bar" # Full UserAgent string self.payload['platform_name'] = "linux" # Platform name from UA self.payload['browser_name'] = "chrome" # Browser name from UA self.payload['browser_version'] = "1.2.3" # Browser version from UA self.payload['request_address'] = "0.0.0.0" # Request IP address self.payload['visitor_id'] = "12345" # The APE cookie visitor_id self.payload['debug'] = False # Debug switch self.payload['page_url'] = "http://example.com/foo/bar" # Page URL self.payload['referrer_url'] = "http://example.com/foo/baz" # Referrer URL if set self.payload['page_title'] = "Foo Bar Baz" # Page title self.payload['event'] = "" # Event self.payload['account_id'] = "67890" # The customer account ID self.payload['timestamp'] = epoch() # Epoch timestamp self.payload['language'] = "en-GB" # Browser language self.payload['placeholders'] = [] # The set of Placeholder ids on this page self.payload['prefix'] = "ape" # Placeholder class prefix self.payload['screen_color' ] = 256 # Screen colour depth self.payload['screen_height'] = 1000 # Screen height self.payload['screen_width'] = 1000 # Screen width self.payload['script_version'] = "0.0.0" # Version number of this script
def save(): user_cookie = request.get_cookie("user") # , secret="SuckMyTCP/IPv4" if user_cookie is not None: if int(user_cookie) in users["users"]: name = request.forms.get("name") if name in [x.name() for x in list(users["users"].values())] and name != users["users"][int(user_cookie)].name(): # If the user tries to update his name to an already existing name return "<h1>Username already exists<br><a href=\"/useredit\">TRY AGAIN</a></h1>" users["users"][int(user_cookie)].name(name) desc = request.forms.get("desc") if not len(re.findall("^[^#*<>\"'{}\[\];]+$", desc)) and len(desc) != 0: #anti xss attack regex desc = sanitize(desc) return desc + " ? Did you really think that was going to work?" users["users"][int(user_cookie)].descr(desc) img = request.files.get("imageFile") if img is not None: # Ef notandi er að uppfæra profile myndina sína if users["users"][int(user_cookie)].profile() != "/static/android-icon-192x192.png": # Tries to delete the previous profile picture if it is not the default try: os.remove("." + users["users"][int(user_cookie)].profile()) except: pass img.filename = str(epoch()) + img.filename[indexOfNth(img.filename, ".", "last"):] # Changes filename to seconds from epoch users["users"][int(user_cookie)].profile("/static/" + img.filename) # Updates the user img.save("./static") # Saves the picture with conn.cursor() as cur: # Updates the database cur.execute( "UPDATE users SET name = \"" \ + users["users"][int(user_cookie)].name() \ + "\", PPicFile = \"" + users["users"][int(user_cookie)].profile() \ + "\", descr = \"" + users["users"][int(user_cookie)].descr() \ + "\" WHERE ID = " + user_cookie + ";" ) conn.commit() updateUsers() # Updates the JSON files updateTop() # ----------||---------- redirect("/u") else: redirect("/process") else: redirect("/")
def hmac_wrap(msg, secret): assert not secret['accept_only'], "Key not valid for signing" handle, alg_name, key = [secret[k] for k in ('name', 'type', 'key')] timestamp = int(epoch()) hm_digest = _hmac(handle +'\n' + str(timestamp) + '\n' + msg, alg_name, key) return {'msg': msg, 'hmac': hm_digest, 'handle': handle, 'timestamp': timestamp}
def make_token(name, secret): """Generate a token for right now with the given username and secret.""" timestamp = str(int(epoch())) hmac_obj = hmac.new(secret, name + timestamp, sha256) return ",".join((name, timestamp, hmac_obj.hexdigest()))
def serialize(self, lazy_computation): fname = "{}-{}.pkl".format(lazy_computation.comp_name, str(epoch())) fpath = lazy_computation.storage.get_messy_path(fname) with open(fpath, "wb") as hdl: dill.dump(lazy_computation, hdl, -1) return fpath
def beacon(): # Respect Do Not Track if request.headers.get('DNT', False): raise Conflict("Do Not Track enabled on client") # Get values from request object args = dict() args['user_agent'] = request.user_agent.string # Full UserAgent string args['platform_name'] = request.user_agent.platform # Platform name from UA args['browser_name'] = request.user_agent.browser # Browser name from UA args['browser_version'] = request.user_agent.version # Browser version from UA args['request_address'] = request.remote_addr # Request IP address # Get args data or defaults args['visitor_id'] = request.args.get('cc', "") # The APE cookie visitor_id args['debug'] = request.args.get('db', "") # Debug switch args['page_url'] = request.args.get('dl', "") # Page URL args['referrer_url'] = request.args.get('dr', "") # Referrer URL if set args['page_title'] = request.args.get('dt', "") # Page title args['event'] = request.args.get('ev', "") # Event args['account_id'] = request.args.get('id', "") # The customer account ID args['timestamp'] = request.args.get('ld', epoch()) # Epoch timestamp args['language'] = request.args.get('lg', "") # Browser language args['placeholders'] = request.args.get('pc', "") # The set of Placeholder ids on this page args['prefix'] = request.args.get('px', "ape") # Placeholder class prefix args['screen_color'] = request.args.get('sc', 0) # Screen colour depth args['screen_height'] = request.args.get('sh', 0) # Screen height args['screen_width'] = request.args.get('sw', 0) # Screen width args['script_version'] = request.args.get('vr', "0.0.0") # Version number of this script # Convert values to base data types or defaults try: args['screen_width'] = int(args['screen_width']) except: args['screen_width'] = 0 try: args['screen_height'] = int(args['screen_height']) except: args['screen_height'] = 0 try: args['screen_color'] = int(args['screen_color']) except: args['screen_color'] = 0 try: args['timestamp'] = float(args['timestamp']) except: args['timestamp'] = epoch() try: args['debug'] = (args['debug'].lower() == "true") except: args['debug'] = False # Ensure page url and customer id are provided if not args['page_url']: raise BadRequest("Value required for page url (dl)") if not args['account_id']: raise BadRequest("Value required for customer id (id)") # Extract placeholder identifiers placeholders = args['placeholders'].split(' ') prefix = args['prefix'] + "-" args['placeholder_ids'] = [c.lstrip(prefix) for c in placeholders if c.startswith(prefix)] # Response payload payload = dict() # Return args in payload in debug mode if args['debug']: payload['args'] = args # TODO Add timings to response # Get account account = Account.query.filter_by(uuid=args['account_id']).first() if account: # Ensure account is enabled and valid for this page if account.enabled and account.url_in_sites(args['page_url']): # Get/create visitor record for this customer visitor = Visitor.get_or_create(account, args['visitor_id']) payload['visitor_id'] = visitor.uuid # Update visitor data with payload visitor_data = visitor.store_payload(args) # Ensure we have placeholder ids if args['placeholder_ids']: # Build list of applicable segments for visitor visitor_segments = [s for s in account.segments if s.matches_data(visitor_data)] if args['debug']: # Add list of segment ids to payload payload['segment_names'] = [s.name for s in visitor_segments] # Format components for json response payload['components'] = dict() for placeholder in account.placeholders: # Ignore unrequested placeholders if placeholder.uuid in args['placeholder_ids']: key = "%s-%s" % (args['prefix'], placeholder.uuid) component = placeholder.get_component_for_segments(visitor_segments) if component: payload['components'][key] = dict() payload['components'][key]['content'] = component.markup return payload