def set_password(self, password): self.salt = ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(16)) password += self.salt self.password_hash = hashlib.sha512(password.encode()).hexdigest()
def gen_random_string(strLength): return ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(strLength))
def lambda_handler(event, context): global SANDBOX_TASK_DEFINITION, SANDBOX_CLUSTER_NAME, LOGGING_LEVEL, SANDBOX_TWITTER_TASK_DEFINITION logger.debug('Starting lambda_handler') event_json = json.loads(event["body"]) usecase = event_json["usecase"] user = event['requestContext']['authorizer']['principalId'] logger.debug('Checking to see if sandbox exists') if not user_verified(user): logger.error('User email is not verified for user: %s' % (user)) response_statusCode = 403 response_contentType = 'application/json' response_body = json.dumps( {"errorString": "User email is not verified for user: %s" % (user) }) return { "statusCode": response_statusCode, "headers": { "Content-type": response_contentType, "Access-Control-Allow-Origin": "*" }, "body": response_body } elif not check_sandbox_exists(user, usecase): logger.debug('Generating password') userDbPassword = get_generated_password() logger.debug('Generating hashkey') # note: this isn't meant to generate a secure random number, # just a key used for later lookup randomNumber = random.SystemRandom().randint(0, sys.maxint) md5 = hashlib.md5() md5.update("%s-%s" % (userDbPassword, randomNumber)) sandboxHashKey = md5.hexdigest() logger.debug('Running Task on ECS') client = boto3.client('ecs') # TODO need to catch exceptions such as # An error occurred (InvalidParameterException) when calling the # RunTask operation: No Container Instances were found in your # cluster.: InvalidParameterException if usecase == 'twitter' and user[0:8] == 'twitter|': twitterUserCreds = get_twitter_user_creds(user) twitterAppCreds = get_twitter_app_creds() response = client.run_task( cluster=SANDBOX_CLUSTER_NAME, taskDefinition=SANDBOX_TWITTER_TASK_DEFINITION, overrides={"containerOverrides": [{ "name": "neo4j-enterprise-db-only", "environment": [ { "name": "USECASE", "value": usecase}, { "name": "EXTENSION_SCRIPT", "value": "extension/extension_script.sh"}, { "name": "SANDBOX_USER", "value": user}, { "name": "NEO4J_AUTH", "value": "neo4j/%s" % (userDbPassword)}, { "name": "SANDBOX_HASHKEY", "value": "%s" % (sandboxHashKey)} ] }, { "name": "neo4j-twitter", "environment": [ { "name": "TWITTER_CONSUMER_KEY", "value": twitterAppCreds['consumer_key']}, { "name": "TWITTER_CONSUMER_SECRET", "value": twitterAppCreds['consumer_secret']}, { "name": "TWITTER_USER_KEY", "value": twitterUserCreds['access_token']}, { "name": "TWITTER_USER_SECRET", "value": twitterUserCreds['access_token_secret']}, { "name": "NEO4J_AUTH", "value": "neo4j/%s" % (userDbPassword)} ] } ]}, placementStrategy=[ {"type": "spread", "field": "instanceId"} ], startedBy=('SB("%s","%s")' % (user, usecase))[:36] ) else: response = client.run_task( cluster=SANDBOX_CLUSTER_NAME, taskDefinition=SANDBOX_TASK_DEFINITION, overrides={"containerOverrides": [{ "name": "neo4j-enterprise-db-only", "environment": [ { "name": "USECASE", "value": usecase}, { "name": "EXTENSION_SCRIPT", "value": "extension/extension_script.sh"}, { "name": "SANDBOX_USER", "value": user}, { "name": "NEO4J_AUTH", "value": "neo4j/%s" % (userDbPassword)}, { "name": "SANDBOX_HASHKEY", "value": "%s" % (sandboxHashKey)} ] }, { "name": "neo4j-importer", "environment": [ { "name": "USECASE", "value": usecase}, { "name": "NEO4J_AUTH", "value": "neo4j/%s" % (userDbPassword)} ] } ]}, placementStrategy=[ {"type": "spread", "field": "instanceId"} ], startedBy=('SB("%s","%s")' % (user, usecase))[:36] ) logger.debug('Adding sandbox to database') if 'tasks' in response and len(response['tasks']) > 0: res = add_sandbox_to_db(user, usecase, response['tasks'][0]['taskArn'], encrypt_user_creds(userDbPassword), sandboxHashKey) response_json = { "status": "PENDING", "password": userDbPassword} for record in res: record_dict = dict(record) response_json.update(record_dict) response_statusCode = 200 else: response_json = { "status": "FAILED", "ECS response": response } response_statusCode = 500 response_body = json.dumps(response_json, indent=2, cls=MyEncoder ) # response_body = json.dumps(response['tasks'][0], indent=2, cls=MyEncoder) response_contentType = 'application/json' return { "statusCode": response_statusCode, "headers": { "Content-type": response_contentType, "Access-Control-Allow-Origin": "*" }, "body": response_body } else: logger.error('Sandbox already exists for user: %s and usecase %s' % (user, usecase)) response_statusCode = 400 response_contentType = 'application/json' response_body = json.dumps( {"errorString": "Sandbox already exists for user: %s and usecase %s" % (user, usecase) }) return { "statusCode": response_statusCode, "headers": { "Content-type": response_contentType, "Access-Control-Allow-Origin": "*" }, "body": response_body }
import os import random import unirest import urllib from botocore.exceptions import BotoCoreError, ClientError SENTIMENT_ANALYSIS_API = 'https://twinword-sentiment-analysis.p.mashape.com/analyze/?text={}' logger = logging.getLogger() logger.setLevel(logging.DEBUG) s3 = boto3.resource('s3', region_name='us-east-1') s3_client = boto3.client('s3', region_name='us-east-1') polly = boto3.client('polly', 'us-east-1') bucket = s3.Bucket('shakirachatbot') secure_random = random.SystemRandom() """ --- Intents --- """ def about_album(intent_request): logger.info(intent_request) slots = get_slots(intent_request) album_slot = slots['album'] if album_slot is None: return elicit_slot( intent_request['sessionAttributes'], intent_request['currentIntent']['name'], slots, 'album', 'Ok, of what album do you want me to enlighten you?') albums = [s for s in fetch_albums() if album_slot.lower() in s.lower()]
def random_password(len=32): return ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(len))
import argparse import os app = Flask(__name__) INGRESS_TLS = os.environ['INGRESS_TLS'] VERSION = "0.3.0" parser = argparse.ArgumentParser() parser.add_argument('--debug', '-d', help="Run in debug mode.", action='store_true') args = parser.parse_args() app.config.update(dict( SECRET_KEY=''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(10)), WTF_CSRF_SECRET_KEY=''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(10)) )) class Filter_Form(Form): filter = TextField("filter") # This function prevents Flask from telling the browser to cache images indefinitely @app.after_request def add_header(response): if 'Cache-Control' not in response.headers: response.headers['Cache-Control'] = 'no-store' return response # Error handlers
def throw_dice(dice, max_val): for die in range(dice): systemRandom = random.SystemRandom() throw = (systemRandom.randint(1, max_val)) numbers = str(throw) print("your throw is: " + numbers)
import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) import string, random chars = ''.join([string.ascii_letters, string.digits, string.punctuation ]).replace('\'', '').replace('"', '').replace('\\', '') # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = os.environ.get( 'DJANGO_SECRET_KEY', ''.join([random.SystemRandom().choice(chars) for i in range(50)])) # SECURITY WARNING: don't run with debug turned on in production! DEBUG = bool(os.environ.get('DJANGO_DEBUG', True)) ALLOWED_HOSTS = ["*"] # Application definition INSTALLED_APPS = [ 'recycleapp', 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages',
# Fuzz Tests for _string import os, random, time, _string rnd = random.SystemRandom() def fstr(): return str(os.urandom(rnd.randint(0, 10))) def test(): for _ in range(9999): try: list(_string.formatter_field_name_split(fstr())[1]) except ValueError: pass try: list(_string.formatter_parser(fstr())) except ValueError: pass if __name__ == '__main__': print('Start Time:', time.ctime()) for i in range(1, 99999): test() print('End of Iteration', i, '-', time.ctime())
def random_filename(): pool = string.digits + string.ascii_lowercase return ''.join(random.SystemRandom().choice(pool) for _ in range(6)) + '.jpg'
def createDeviceId(self): return "".join(random.SystemRandom().choice(string.ascii_uppercase + string.digits + string.ascii_lowercase) for _ in range(16))
def RandomNumber(number=8): import random return ''.join( map(lambda x: str(x), [ random.SystemRandom().randint(0, 9) for i in range(0, number) ]))
def toss(n_dice): rnd = random.SystemRandom() rnd_int = rnd.randint(1, dice[n_dice]) tosses.append(rnd_int) return rnd_int
def data_random(self): g = Generic('en') list_for_Cinema = None list_for_Session = None list_for_Cinema_Session_CinemaID = None list_for_Cinema_Session_SessionID = None count_succedded_Cinema_Session = 0 if self.model.present_table_type == "Cinema": list_for_Cinema = self.model.search_item("Name", False, "Network") if len(list_for_Cinema) == 0: self.view.message_print( "There aren't any Networks for random entering in table Cinema\n" ) return elif self.model.present_table_type == "Session": list_for_Session = self.model.search_item("ID", False, "Film") if len(list_for_Session) == 0: self.view.message_print( "Error during random session data entering:" + "there aren't any films in table Films to set on sessions\n" ) return elif self.model.present_table_type == "Cinema-Session": list_for_Cinema_Session_CinemaID = self.model.search_item( "Address", False, "Cinema") list_for_Cinema_Session_SessionID = self.model.search_item( "ID", False, "Session") if len(list_for_Cinema_Session_CinemaID) == 0 or len( list_for_Cinema_Session_SessionID) == 0: self.view.message_print( "Error:necessary table/tables for table Cinema-Session is/are empty\n" ) return chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789' previous_date = datetime.datetime.today() for k in range(2000): array_with_attributes = [] count = 0 if self.model.present_table_type == "Network": while True: array_with_attributes = [] array_with_attributes.append(''.join( random.SystemRandom().choice(chars) for i in range(random.randint(5, 11)))) array_with_attributes.append(g.person.full_name().replace( "'", '')) try: self.model.create_item(array_with_attributes) except Exception: continue break elif self.model.present_table_type == "Cinema": array_with_attributes.append(g.random.choice(list_for_Cinema)) array_with_attributes[0] = array_with_attributes[0][0].replace( "'", '') array_with_attributes.append( g.address.city().replace("'", '') + "," + ''.join(random.SystemRandom().choice( "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKMNOPQRSTUVWXYZ") for i in range(random.randint(5, 11))) + " " + g.address.street_suffix() + "," + ''.join(random.SystemRandom().choice("0123456789") for i in range(random.randint(1, 4)))) array_with_attributes.append(g.random.randint(1, 4)) array_with_attributes.append( g.random.randint(400, 500) * array_with_attributes[len(array_with_attributes) - 1]) while True: try: self.model.create_item(array_with_attributes) except Exception: array_with_attributes[1] = g.address.city().replace( "'", '' ) + "," + ''.join(random.SystemRandom().choice( "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKMNOPQRSTUVWXYZ" ) for i in range(random.randint( 5, 11))) + " " + g.address.street_suffix( ) + " " + ''.join( random.SystemRandom().choice("0123456789") for i in range(random.randint(1, 4))) continue break elif self.model.present_table_type == "Session": if previous_date.day + 1 == 29: if previous_date.month == 12: date = datetime.datetime(previous_date.year + 1, 1, 1) else: date = datetime.datetime(previous_date.year, previous_date.month + 1, 1) else: date = datetime.datetime(previous_date.year, previous_date.month, previous_date.day + 1) array_with_attributes.append(str(date)) previous_date = date array_with_attributes.append(g.random.choice(list_for_Session)) array_with_attributes[1] = array_with_attributes[1][0] array_with_attributes.append(g.random.randint(1, 4)) self.model.create_item(array_with_attributes) elif self.model.present_table_type == "Film": array_with_attributes.append(g.business.company().replace( "'", '')) array_with_attributes.append( g.random.choice( ("Fantastical", "Western", "Thriller", "Detective", "War", "Sci-Fi", "Horror", "Comedy", "Historical", "Musical", "Romantic", "Documentary", "Action", "Drama", "Family", "Sport"))) array_with_attributes.append(g.datetime.year()) array_with_attributes.append( str(g.random.randint(10000000, 10000000000)) + str(g.random.choice((" UAH", " USD", " GBP", " EUR")))) array_with_attributes.append( g.choice(("Ukraine", "France", "Great Britain", "Germany", "Sweden", "Finland", "Norway", "Switzerland", "Czech Republic", "Hungary", "Bulgaria", "Romania", "Greece", "Spain", "Belgium", "Netherlands", "Luxemburg", "Italy", "Croatia", "Serbia", "Montenegro", "Cyprus", "Slovakia"))) array_with_attributes.append( str(g.random.randint(100, 200)) + " min") array_with_attributes.append(g.random.choice( ("True", "False"))) self.model.create_item(array_with_attributes) elif self.model.present_table_type == "Cinema-Session": while True: array_with_attributes.append( g.choice(list_for_Cinema_Session_CinemaID)) array_with_attributes[0] = array_with_attributes[0][0] array_with_attributes.append( g.choice(list_for_Cinema_Session_SessionID)) array_with_attributes[1] = array_with_attributes[1][0] try: self.model.create_item(array_with_attributes) count_succedded_Cinema_Session += 1 break except Exception: if count > 50: count += 1 array_with_attributes = [] continue self.view.message_print( "There aren't enough Cinemas and Session to enter necessary " + "amount of data rows in table Cinema-Session but {} rows were entered\n" .format(count_succedded_Cinema_Session)) return if k % 1000 == 0: self.model.connection.commit()
def generate_password(length): return ''.join(random.SystemRandom().choice(string.ascii_letters + string.digits) for _ in range(length))
def change_password(): # Check drive's current status. status = get_encryption_status() if (status["Locked"] not in (0x00, 0x02)): print( fail( "Device has to be unlocked or without encryption to perform this operation." )) sys.exit(1) # Get and confirm the current and new password. if status["Locked"] == 0x00: # The device doesn't have a password. old_passwd = "" else: old_passwd = getpass.getpass("Current password: "******"New password: "******"New password (again): ") if new_passwd != new_passwd2: print(fail("Password didn't match.")) sys.exit(1) ## Both passwords shouldn't be empty if (len(old_passwd) <= 0 and len(new_passwd) <= 0): print( fail( "Password can't be empty. The device doesn't yet have a password." )) sys.exit(1) # Construct the command. pw_block = [0x45, 0x00, 0x00, 0x00, 0x00, 0x00] # Get the length in bytes of the key for the drive's current cipher # and put that length into the command. pwblen = status["PasswordLength"] pw_block += list(htons(pwblen)) # For compatibility with the WD encryption tool, use the same # hashing mechanism and parameters to turn the user's password # input into a key. The parameters are stored in unencrypted data. hash_parameters = read_handy_store_block1() if hash_parameters is None: # No password hashing parameters are stored on the device. # Make some up and write them to the device. hash_parameters = ( 1000, ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(8)).encode("ascii"), # eight-byte salt b'wdpassport-utils'.ljust(202)) write_handy_store_block1(*hash_parameters) assert read_handy_store_block1() == hash_parameters iteration, salt, hint = hash_parameters if (len(old_passwd) > 0): old_passwd_hashed = mk_password_block(old_passwd, iteration, salt) pw_block[3] = pw_block[3] | 0x10 else: old_passwd_hashed = bytes([0x00] * 32) if (len(new_passwd) > 0): new_passwd_hashed = mk_password_block(new_passwd, iteration, salt) pw_block[3] = pw_block[3] | 0x01 else: new_passwd_hashed = bytes([0x00] * 32) if pw_block[3] & 0x11 == 0x11: pw_block[3] = pw_block[3] & 0xEE cdb = [0xC1, 0xE2, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x48, 0x00] pwblen = 8 + 2 * pwblen cdb[8] = pwblen try: ## If exception isn't raised the unlock operation gone ok. py_sg.write( dev, _scsi_pack_cdb(cdb), _scsi_pack_cdb(pw_block) + old_passwd_hashed + new_passwd_hashed) print(success("Password changed.")) except: ## Wrong password or something bad is happened. print(fail("Error changing password.")) pass
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import argparse import os import sys import random as r random = r.SystemRandom() expansions = { "d": ["Divide %s by %s and round down.", '//'], "a": ["Add %s to %s.", '+'], "s": ["Subtract %s from %s.", '-'], "m": ["Multiply %s by %s.", '*'] } def gui(args): os.system('python3 GUI.py') def createq(qf): qf = qf.split(":") question = expansions[qf[0]][0] no1 = str(random.randint((10**(int(qf[1]) - 1)), (10**(int(qf[1]))))) no2 = str(random.randint((10**(int(qf[2]) - 1)), (10**(int(qf[2]))))) answer = eval(no1 + expansions[qf[0]][1] + no2) if qf[0] == "s": return (question % (no2, no1), answer) else: return (question % (no1, no2), answer)
def random_name(length=16): return ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(length))
from selenium import webdriver from selenium.webdriver.common.keys import Keys import random import sys import time systemRandom = random.SystemRandom() travel_hashtags = [ 'travel', 'travelphotography', 'travelgram', 'wanderlust', 'travelling', 'travelblogger', 'traveladdict', 'tourism', 'photography', 'naturephotography', 'travelpic', 'wanderlust' ] travel_comments = [ 'Your posts are amazing', 'Amazing work. Keep going!', 'Your photos are magnificent', 'Your work fascinates me!', 'I like how you put your posts together', 'Great job', 'What a really nice photo!', 'Well done!', 'That is so nice', 'That is so cool', 'This makes me sad :(', 'One day! xD' ] coding_hashtags = [ 'coding', 'coder', 'coders', '100daysofcode', 'softwareengineer', 'programmerlife', 'codingdays', 'codingmeme', 'machinelearning', 'codingpics',
def starting_url(): starting = ''.join(random.SystemRandom().choice(string.ascii_lowercase) for _ in range(3)) url = ''.join(['http://www.', starting, '.com']) return url
class MSL(object): # Is a handshake already performed and the keys loaded handshake_performed = False last_drm_context = '' last_playback_context = '' current_message_id = 0 session = requests.session() rndm = random.SystemRandom() tokens = [] base_url = 'http://www.netflix.com/api/msl/NFCDCH-LX/cadmium/' endpoints = { 'manifest': base_url + 'manifest', 'license': base_url + 'license' } def __init__(self, nx_common): """ The Constructor checks for already existing crypto Keys. If they exist it will load the existing keys """ self.nx_common = nx_common self.crypto = MSLHandler(nx_common) if self.nx_common.file_exists(self.nx_common.data_path, 'msl_data.json'): self.init_msl_data() else: self.crypto.fromDict(None) self.__perform_key_handshake() def load_manifest(self, viewable_id, dolby, hevc): """ Loads the manifets for the given viewable_id and returns a mpd-XML-Manifest :param viewable_id: The id of of the viewable :return: MPD XML Manifest or False if no success """ manifest_request_data = { 'method': 'manifest', 'lookupType': 'PREPARE', 'viewableIds': [viewable_id], 'profiles': [ # Video "playready-h264bpl30-dash", "playready-h264mpl30-dash", "playready-h264mpl31-dash", "playready-h264mpl40-dash", # Audio 'heaac-2-dash', # Subtiltes # 'dfxp-ls-sdh', 'simplesdh', # 'nflx-cmisc', # Unkown 'BIF240', 'BIF320' ], 'drmSystem': 'widevine', 'appId': '14673889385265', 'sessionParams': { 'pinCapableClient': False, 'uiplaycontext': 'null' }, 'sessionId': '14673889385265', 'trackId': 0, 'flavor': 'PRE_FETCH', 'secureUrls': False, 'supportPreviewContent': True, 'forceClearStreams': False, 'languages': ['de-DE'], 'clientVersion': '4.0004.899.011', 'uiVersion': 'akira' } # add hevc profiles if setting is set if hevc is True: hevc = 'hevc-main-' main10 = 'hevc-main10-' prk = 'dash-cenc-prk' cenc = 'dash-cenc' ctl = 'dash-cenc-tl' hdr = 'hevc-hdr-main10-' dv = 'hevc-dv-main10-' dv5 = 'hevc-dv5-main10-' manifest_request_data['profiles'].append(main10 + 'L41-' + cenc) manifest_request_data['profiles'].append(main10 + 'L50-' + cenc) manifest_request_data['profiles'].append(main10 + 'L51-' + cenc) manifest_request_data['profiles'].append(hevc + 'L30-' + cenc) manifest_request_data['profiles'].append(hevc + 'L31-' + cenc) manifest_request_data['profiles'].append(hevc + 'L40-' + cenc) manifest_request_data['profiles'].append(hevc + 'L41-' + cenc) manifest_request_data['profiles'].append(hevc + 'L50-' + cenc) manifest_request_data['profiles'].append(hevc + 'L51-' + cenc) manifest_request_data['profiles'].append(main10 + 'L30-' + cenc) manifest_request_data['profiles'].append(main10 + 'L31-' + cenc) manifest_request_data['profiles'].append(main10 + 'L40-' + cenc) manifest_request_data['profiles'].append(main10 + 'L41-' + cenc) manifest_request_data['profiles'].append(main10 + 'L50-' + cenc) manifest_request_data['profiles'].append(main10 + 'L51-' + cenc) manifest_request_data['profiles'].append(main10 + 'L30-' + prk) manifest_request_data['profiles'].append(main10 + 'L31-' + prk) manifest_request_data['profiles'].append(main10 + 'L40-' + prk) manifest_request_data['profiles'].append(main10 + 'L41-' + prk) manifest_request_data['profiles'].append(hevc + 'L30-L31-' + ctl) manifest_request_data['profiles'].append(hevc + 'L31-L40-' + ctl) manifest_request_data['profiles'].append(hevc + 'L40-L41-' + ctl) manifest_request_data['profiles'].append(hevc + 'L50-L51-' + ctl) manifest_request_data['profiles'].append(main10 + 'L30-L31-' + ctl) manifest_request_data['profiles'].append(main10 + 'L31-L40-' + ctl) manifest_request_data['profiles'].append(main10 + 'L40-L41-' + ctl) manifest_request_data['profiles'].append(main10 + 'L50-L51-' + ctl) manifest_request_data['profiles'].append(dv + 'L30-' + cenc) manifest_request_data['profiles'].append(dv + 'L31-' + cenc) manifest_request_data['profiles'].append(dv + 'L40-' + cenc) manifest_request_data['profiles'].append(dv + 'L41-' + cenc) manifest_request_data['profiles'].append(dv + 'L50-' + cenc) manifest_request_data['profiles'].append(dv + 'L51-' + cenc) manifest_request_data['profiles'].append(dv5 + 'L30-' + prk) manifest_request_data['profiles'].append(dv5 + 'L31-' + prk) manifest_request_data['profiles'].append(dv5 + 'L40-' + prk) manifest_request_data['profiles'].append(dv5 + 'L41-' + prk) manifest_request_data['profiles'].append(dv5 + 'L50-' + prk) manifest_request_data['profiles'].append(dv5 + 'L51-' + prk) manifest_request_data['profiles'].append(hdr + 'L30-' + cenc) manifest_request_data['profiles'].append(hdr + 'L31-' + cenc) manifest_request_data['profiles'].append(hdr + 'L40-' + cenc) manifest_request_data['profiles'].append(hdr + 'L41-' + cenc) manifest_request_data['profiles'].append(hdr + 'L50-' + cenc) manifest_request_data['profiles'].append(hdr + 'L51-' + cenc) manifest_request_data['profiles'].append(hdr + 'L30-' + prk) manifest_request_data['profiles'].append(hdr + 'L31-' + prk) manifest_request_data['profiles'].append(hdr + 'L40-' + prk) manifest_request_data['profiles'].append(hdr + 'L41-' + prk) manifest_request_data['profiles'].append(hdr + 'L50-' + prk) manifest_request_data['profiles'].append(hdr + 'L51-' + prk) # Check if dolby sound is enabled and add to profles if dolby: manifest_request_data['profiles'].append('ddplus-2.0-dash') manifest_request_data['profiles'].append('ddplus-5.1-dash') request_data = self.__generate_msl_request_data(manifest_request_data) try: resp = self.session.post(self.endpoints['manifest'], request_data) except: resp = None exc = sys.exc_info() msg = '[MSL][POST] Error {} {}' self.nx_common.log(msg=msg.format(exc[0], exc[1])) if resp: try: # if the json() does not fail we have an error because # the manifest response is a chuncked json response resp.json() self.nx_common.log( msg='Error getting Manifest: ' + resp.text) return False except ValueError: # json() failed so parse the chunked response #self.nx_common.log( # msg='Got chunked Manifest Response: ' + resp.text) resp = self.__parse_chunked_msl_response(resp.text) #self.nx_common.log( # msg='Parsed chunked Response: ' + json.dumps(resp)) data = self.__decrypt_payload_chunks(resp['payloads']) return self.__tranform_to_dash(data) return False def get_license(self, challenge, sid): """ Requests and returns a license for the given challenge and sid :param challenge: The base64 encoded challenge :param sid: The sid paired to the challengew :return: Base64 representation of the licensekey or False unsuccessfull """ license_request_data = { 'method': 'license', 'licenseType': 'STANDARD', 'clientVersion': '4.0004.899.011', 'uiVersion': 'akira', 'languages': ['de-DE'], 'playbackContextId': self.last_playback_context, 'drmContextIds': [self.last_drm_context], 'challenges': [{ 'dataBase64': challenge, 'sessionId': sid }], 'clientTime': int(time.time()), 'xid': int((int(time.time()) + 0.1612) * 1000) } request_data = self.__generate_msl_request_data(license_request_data) try: resp = self.session.post(self.endpoints['license'], request_data) except: resp = None exc = sys.exc_info() self.nx_common.log( msg='[MSL][POST] Error {} {}'.format(exc[0], exc[1])) if resp: try: # If is valid json the request for the licnese failed resp.json() self.nx_common.log(msg='Error getting license: '+resp.text) return False except ValueError: # json() failed so we have a chunked json response resp = self.__parse_chunked_msl_response(resp.text) data = self.__decrypt_payload_chunks(resp['payloads']) if data['success'] is True: return data['result']['licenses'][0]['data'] else: self.nx_common.log( msg='Error getting license: ' + json.dumps(data)) return False return False def __decrypt_payload_chunks(self, payloadchunks): decrypted_payload = '' for chunk in payloadchunks: payloadchunk = json.JSONDecoder().decode(chunk) payload = payloadchunk.get('payload') decoded_payload = base64.standard_b64decode(payload) encryption_envelope = json.JSONDecoder().decode(decoded_payload) # Decrypt the text plaintext = self.crypto.decrypt(base64.standard_b64decode(encryption_envelope['iv']), base64.standard_b64decode(encryption_envelope.get('ciphertext'))) # unpad the plaintext plaintext = json.JSONDecoder().decode(plaintext) data = plaintext.get('data') # uncompress data if compressed if plaintext.get('compressionalgo') == 'GZIP': decoded_data = base64.standard_b64decode(data) data = zlib.decompress(decoded_data, 16 + zlib.MAX_WBITS) else: data = base64.standard_b64decode(data) decrypted_payload += data decrypted_payload = json.JSONDecoder().decode(decrypted_payload)[1]['payload']['data'] decrypted_payload = base64.standard_b64decode(decrypted_payload) return json.JSONDecoder().decode(decrypted_payload) def __tranform_to_dash(self, manifest): self.nx_common.save_file( data_path=self.nx_common.data_path, filename='manifest.json', content=json.dumps(manifest)) manifest = manifest['result']['viewables'][0] self.last_playback_context = manifest['playbackContextId'] self.last_drm_context = manifest['drmContextId'] # Check for pssh pssh = '' keyid = None if 'psshb64' in manifest: if len(manifest['psshb64']) >= 1: pssh = manifest['psshb64'][0] psshbytes = base64.standard_b64decode(pssh) if len(psshbytes) == 52: keyid = psshbytes[36:] seconds = manifest['runtime']/1000 init_length = seconds / 2 * 12 + 20*1000 duration = "PT"+str(seconds)+".00S" root = ET.Element('MPD') root.attrib['xmlns'] = 'urn:mpeg:dash:schema:mpd:2011' root.attrib['xmlns:cenc'] = 'urn:mpeg:cenc:2013' root.attrib['mediaPresentationDuration'] = duration period = ET.SubElement(root, 'Period', start='PT0S', duration=duration) # One Adaption Set for Video for video_track in manifest['videoTracks']: video_adaption_set = ET.SubElement( parent=period, tag='AdaptationSet', mimeType='video/mp4', contentType="video") # Content Protection if keyid: protection = ET.SubElement( parent=video_adaption_set, tag='ContentProtection', value='cenc', schemeIdUri='urn:mpeg:dash:mp4protection:2011') protection.set('cenc:default_KID', str(uuid.UUID(bytes=keyid))) protection = ET.SubElement( parent=video_adaption_set, tag='ContentProtection', schemeIdUri='urn:uuid:EDEF8BA9-79D6-4ACE-A3C8-27DCD51D21ED') ET.SubElement( parent=protection, tag='widevine:license', robustness_level='HW_SECURE_CODECS_REQUIRED') if pssh is not '': ET.SubElement(protection, 'cenc:pssh').text = pssh for downloadable in video_track['downloadables']: codec = 'h264' if 'hevc' in downloadable['contentProfile']: codec = 'hevc' hdcp_versions = '0.0' for hdcp in downloadable['hdcpVersions']: if hdcp != 'none': hdcp_versions = hdcp rep = ET.SubElement( parent=video_adaption_set, tag='Representation', width=str(downloadable['width']), height=str(downloadable['height']), bandwidth=str(downloadable['bitrate']*1024), hdcp=hdcp_versions, nflxContentProfile=str(downloadable['contentProfile']), codecs=codec, mimeType='video/mp4') # BaseURL base_url = self.__get_base_url(downloadable['urls']) ET.SubElement(rep, 'BaseURL').text = base_url # Init an Segment block segment_base = ET.SubElement( parent=rep, tag='SegmentBase', indexRange='0-' + str(init_length), indexRangeExact='true') ET.SubElement( parent=segment_base, tag='Initialization', range='0-' + str(init_length)) # Multiple Adaption Set for audio for audio_track in manifest['audioTracks']: impaired = 'false' if audio_track.get('trackType') == 'ASSISTIVE': impaired = 'true' audio_adaption_set = ET.SubElement( parent=period, tag='AdaptationSet', lang=audio_track['bcp47'], contentType='audio', mimeType='audio/mp4', impaired=impaired) for downloadable in audio_track['downloadables']: codec = 'aac' #self.nx_common.log(msg=downloadable) is_dplus2 = downloadable['contentProfile'] == 'ddplus-2.0-dash' is_dplus5 = downloadable['contentProfile'] == 'ddplus-5.1-dash' if is_dplus2 or is_dplus5: codec = 'ec-3' #self.nx_common.log(msg='codec is: ' + codec) rep = ET.SubElement( parent=audio_adaption_set, tag='Representation', codecs=codec, bandwidth=str(downloadable['bitrate']*1024), mimeType='audio/mp4') # AudioChannel Config uri = 'urn:mpeg:dash:23003:3:audio_channel_configuration:2011' ET.SubElement( parent=rep, tag='AudioChannelConfiguration', schemeIdUri=uri, value=str(audio_track.get('channelsCount'))) # BaseURL base_url = self.__get_base_url(downloadable['urls']) ET.SubElement(rep, 'BaseURL').text = base_url # Index range segment_base = ET.SubElement( parent=rep, tag='SegmentBase', indexRange='0-' + str(init_length), indexRangeExact='true') ET.SubElement( parent=segment_base, tag='Initialization', range='0-' + str(init_length)) # Multiple Adaption Sets for subtiles for text_track in manifest.get('textTracks'): is_downloadables = 'downloadables' not in text_track if is_downloadables or text_track.get('downloadables') is None: continue subtiles_adaption_set = ET.SubElement( parent=period, tag='AdaptationSet', lang=text_track.get('bcp47'), codecs='stpp', contentType='text', mimeType='application/ttml+xml') for downloadable in text_track['downloadables']: rep = ET.SubElement( parent=subtiles_adaption_set, tag='Representation', nflxProfile=downloadable.get('contentProfile')) base_url = self.__get_base_url(downloadable['urls']) ET.SubElement(rep, 'BaseURL').text = base_url xml = ET.tostring(root, encoding='utf-8', method='xml') xml = xml.replace('\n', '').replace('\r', '') self.nx_common.save_file( data_path=self.nx_common.data_path, filename='manifest.mpd', content=xml) return xml def __get_base_url(self, urls): for key in urls: return urls[key] def __parse_chunked_msl_response(self, message): header = message.split('}}')[0] + '}}' payloads = re.split(',\"signature\":\"[0-9A-Za-z=/+]+\"}', message.split('}}')[1]) payloads = [x + '}' for x in payloads][:-1] return { 'header': header, 'payloads': payloads } def __generate_msl_request_data(self, data): #self.__load_msl_data() header_encryption_envelope = self.__encrypt( plaintext=self.__generate_msl_header()) headerdata = base64.standard_b64encode(header_encryption_envelope) header = { 'headerdata': headerdata, 'signature': self.__sign(header_encryption_envelope), 'mastertoken': self.mastertoken, } # Serialize the given Data raw_marshalled_data = json.dumps(data) marshalled_data = raw_marshalled_data.replace('"', '\\"') serialized_data = '[{},{"headers":{},"path":"/cbp/cadmium-13"' serialized_data += ',"payload":{"data":"' serialized_data += marshalled_data serialized_data += '"},"query":""}]\n' compressed_data = self.__compress_data(serialized_data) # Create FIRST Payload Chunks first_payload = { 'messageid': self.current_message_id, 'data': compressed_data, 'compressionalgo': 'GZIP', 'sequencenumber': 1, 'endofmsg': True } first_payload_encryption_envelope = self.__encrypt( plaintext=json.dumps(first_payload)) payload = base64.standard_b64encode(first_payload_encryption_envelope) first_payload_chunk = { 'payload': payload, 'signature': self.__sign(first_payload_encryption_envelope), } request_data = json.dumps(header) + json.dumps(first_payload_chunk) return request_data def __compress_data(self, data): # GZIP THE DATA out = StringIO() with gzip.GzipFile(fileobj=out, mode="w") as f: f.write(data) return base64.standard_b64encode(out.getvalue()) def __generate_msl_header( self, is_handshake=False, is_key_request=False, compressionalgo='GZIP', encrypt=True): """ Function that generates a MSL header dict :return: The base64 encoded JSON String of the header """ self.current_message_id = self.rndm.randint(0, pow(2, 52)) esn = self.nx_common.get_esn() # Add compression algo if not empty compression_algos = [compressionalgo] if compressionalgo != '' else [] header_data = { 'sender': esn, 'handshake': is_handshake, 'nonreplayable': False, 'capabilities': { 'languages': ['en-US'], 'compressionalgos': compression_algos }, 'recipient': 'Netflix', 'renewable': True, 'messageid': self.current_message_id, 'timestamp': 1467733923 } # If this is a keyrequest act diffrent then other requests if is_key_request: header_data['keyrequestdata'] = self.crypto.get_key_request() else: if 'usertoken' in self.tokens: pass else: account = self.nx_common.get_credentials() # Auth via email and password header_data['userauthdata'] = { 'scheme': 'EMAIL_PASSWORD', 'authdata': { 'email': account['email'], 'password': account['password'] } } return json.dumps(header_data) def __encrypt(self, plaintext): return json.dumps(self.crypto.encrypt(plaintext, self.nx_common.get_esn(), self.sequence_number)) def __sign(self, text): """ Calculates the HMAC signature for the given text with the current sign key and SHA256 :param text: :return: Base64 encoded signature """ return base64.standard_b64encode(self.crypto.sign(text)) def perform_key_handshake(self): self.__perform_key_handshake() def __perform_key_handshake(self): esn = self.nx_common.get_esn() self.nx_common.log(msg='perform_key_handshake: esn:' + esn) if not esn: return False header = self.__generate_msl_header( is_key_request=True, is_handshake=True, compressionalgo='', encrypt=False) request = { 'entityauthdata': { 'scheme': 'NONE', 'authdata': { 'identity': esn } }, 'headerdata': base64.standard_b64encode(header), 'signature': '', } #self.nx_common.log(msg='Key Handshake Request:') #self.nx_common.log(msg=json.dumps(request)) try: resp = self.session.post( url=self.endpoints['manifest'], data=json.dumps(request, sort_keys=True)) except: resp = None exc = sys.exc_info() self.nx_common.log( msg='[MSL][POST] Error {} {}'.format(exc[0], exc[1])) if resp and resp.status_code == 200: resp = resp.json() if 'errordata' in resp: self.nx_common.log(msg='Key Exchange failed') self.nx_common.log( msg=base64.standard_b64decode(resp['errordata'])) return False base_head = base64.standard_b64decode(resp['headerdata']) headerdata=json.JSONDecoder().decode(base_head) self.__set_master_token(headerdata['keyresponsedata']['mastertoken']) self.crypto.parse_key_response(headerdata) self.__save_msl_data() else: self.nx_common.log(msg='Key Exchange failed') self.nx_common.log(msg=resp.text) def init_msl_data(self): self.nx_common.log(msg='MSL Data exists. Use old Tokens.') self.__load_msl_data() self.handshake_performed = True def __load_msl_data(self): raw_msl_data = self.nx_common.load_file( data_path=self.nx_common.data_path, filename='msl_data.json') msl_data = json.JSONDecoder().decode(raw_msl_data) # Check expire date of the token raw_token = msl_data['tokens']['mastertoken']['tokendata'] base_token = base64.standard_b64decode(raw_token) master_token = json.JSONDecoder().decode(base_token) exp = int(master_token['expiration']) valid_until = datetime.utcfromtimestamp(exp) present = datetime.now() difference = valid_until - present # If token expires in less then 10 hours or is expires renew it self.nx_common.log(msg='Expiration time: Key:' + str(valid_until) + ', Now:' + str(present) + ', Diff:' + str(difference.total_seconds())) difference = difference.total_seconds() / 60 / 60 if difference < 10 or self.crypto.fromDict(msl_data): self.__perform_key_handshake() return self.__set_master_token(msl_data['tokens']['mastertoken']) def save_msl_data(self): self.__save_msl_data() def __save_msl_data(self): """ Saves the keys and tokens in json file :return: """ data = { 'tokens': { 'mastertoken': self.mastertoken } } data.update(self.crypto.toDict()) serialized_data = json.JSONEncoder().encode(data) self.nx_common.save_file( data_path=self.nx_common.data_path, filename='msl_data.json', content=serialized_data) def __set_master_token(self, master_token): self.mastertoken = master_token raw_token = master_token['tokendata'] base_token = base64.standard_b64decode(raw_token) decoded_token = json.JSONDecoder().decode(base_token) self.sequence_number = decoded_token.get('sequencenumber')
def gen_key_pair(p=nist_p, g=nist_g): '''generates a public private dh key pair for the specified group''' a = random.SystemRandom().randrange(p) A = pow(g, a, p) return (a, A)
def index(request, requestedId=""): if request.method == "POST": ##print "="*100 ##print request ##print "="*1000 afile = request.FILES['file1'] ##print afile.name,afile.read() stri = int(''.join(random.SystemRandom().choice(string.digits) for _ in xrange(6))) while Tuple.objects.all().filter(filesId=stri).exists(): stri = int(''.join(random.SystemRandom().choice(string.uppercase + string.digits) for _ in xrange(6))) instance = Tuple(file1=request.FILES['file1'], file2=request.FILES['file2'], fancyId=stri) instance.save() if 'storecode' in request.POST: return redirect('../check/' + str(instance.fancyId)) file1 = str(instance.file1)[2:] file2 = str(instance.file2)[2:] dire = str(settings.MEDIA_ROOT) + '/' ccod1 = open(dire + file1).read().split('\n') ccod2 = open(dire + file2).read().split('\n') datastuff = process_it(file1, file2, dire) popen3('rm ' + dire + file1) popen3('rm ' + dire + file2) if file1[-1] == "c": s_file1 = file1.replace('.c', '.s') s_file2 = file2.replace('.c', '.s') else: s_file1 = file1.replace('.cpp', '.s') s_file2 = file2.replace('.cpp', '.s') popen3('rm ' + dire + s_file1) popen3('rm ' + dire + s_file2) ccod1.insert(0, '') ccod2.insert(0, '') instance.delete() renderVal = {'code1' : ccod1, 'code2' :ccod2, 'ccode1': ["",datastuff[1][1]], 'ccode2':["",datastuff[1][2]],\ 'assembly1': ["",datastuff[0][1]], 'assembly2': ["",datastuff[0][2]]} renderVal['title'] = 'Result' if datastuff[0][0][0] != -1: renderVal['assemblymatch1'] = int(datastuff[0][0][0]) renderVal['assemblymatch2'] = int(datastuff[0][0][1]) if datastuff[0][0] < 50: renderVal['assemblyLabel'] = 'success' elif datastuff[0][0] < 75: renderVal['assemblyLabel'] = 'warning' else: renderVal['assemblyLabel'] = 'danger' datastuff[1][0][0] *= 100 datastuff[1][0][0] = int(datastuff[1][0][0]) datastuff[1][0][1] *= 100 datastuff[1][0][1] = int(datastuff[1][0][1]) renderVal['codematch1'] = datastuff[1][0][0] renderVal['codematch2'] = datastuff[1][0][1] if datastuff[0][0][0] != -1: renderVal['percent'] = calc(renderVal['codematch1'], renderVal['codematch2'], renderVal['assemblymatch1'], renderVal['assemblymatch2']) else: renderVal['percent'] = calc2(renderVal['codematch1'], renderVal['codematch2']) if datastuff[1][0][0] < 50: renderVal['codeLabel'] = 'success' elif datastuff[1][0][0] < 75: renderVal['codeLabel'] = 'warning' else: renderVal['codeLabel'] = 'danger' if renderVal['percent'] < 50: renderVal['totaltitle'] = 'success' elif renderVal['percent'] < 75: renderVal['totaltitle'] = 'warning' else: renderVal['totaltitle'] = 'danger' return render(request, 'ComparedResult/ComparedResult.html', renderVal) elif len(requestedId) > 3: instance = Tuple.objects.all().filter(fancyId=requestedId)[0] file1 = str(instance.file1)[2:] file2 = str(instance.file2)[2:] dire = str(settings.MEDIA_ROOT) + '/' ccod1 = open(dire + file1).read().split('\n') ccod2 = open(dire + file2).read().split('\n') datastuff = process_it(file1, file2, dire) ccod1.insert(0, '') ccod2.insert(0, '') renderVal = {'code1' : ccod1, 'code2' :ccod2, 'ccode1': ["",datastuff[1][1]], 'ccode2':["",datastuff[1][2]],\ 'assembly1': ["",datastuff[0][1]], 'assembly2': ["",datastuff[0][2]]} renderVal['title'] = requestedId if datastuff[0][0][0] != -1: renderVal['assemblymatch1'] = int(datastuff[0][0][0]) renderVal['assemblymatch2'] = int(datastuff[0][0][1]) if datastuff[0][0] < 50: renderVal['assemblyLabel'] = 'success' elif datastuff[0][0] < 75: renderVal['assemblyLabel'] = 'warning' else: renderVal['assemblyLabel'] = 'danger' #print '!'*100 #print datastuff #print '!'*100 datastuff[1][0][0] *= 100 datastuff[1][0][1] *= 100 renderVal['codematch1'] = int(datastuff[1][0][0]) renderVal['codematch2'] = int(datastuff[1][0][1]) if datastuff[1][0][0] < 50: renderVal['codeLabel'] = 'success' elif datastuff[1][0][0] < 75: renderVal['codeLabel'] = 'warning' else: renderVal['codeLabel'] = 'danger' if datastuff[0][0][0] != -1: renderVal['percent'] = calc(renderVal['codematch1'], renderVal['codematch2'], renderVal['assemblymatch1'], renderVal['assemblymatch2']) else: renderVal['percent'] = calc2(renderVal['codematch1'], renderVal['codematch2']) if renderVal['percent'] < 50: renderVal['totaltitle'] = 'success' elif renderVal['percent'] < 75: renderVal['totaltitle'] = 'warning' else: renderVal['totaltitle'] = 'danger' return render(request, 'ComparedResult/ComparedResult.html', renderVal) else: return redirect('../comparecode')
def pad_generate_url(self, cr, uid, context=None): company = self.pool.get('res.users').browse(cr, SUPERUSER_ID, uid, context=context).company_id pad = { "server": company.pad_server, "key": company.pad_key, } # make sure pad server in the form of http://hostname if not pad["server"]: return pad if not pad["server"].startswith('http'): pad["server"] = 'http://' + pad["server"] pad["server"] = pad["server"].rstrip('/') # generate a salt s = string.ascii_uppercase + string.digits salt = ''.join([ s[random.SystemRandom().randint(0, len(s) - 1)] for i in range(10) ]) #path # etherpad hardcodes pad id length limit to 50 path = '-%s-%s' % (self._name, salt) path = '%s%s' % (cr.dbname.replace('_', '-')[0:50 - len(path)], path) # contruct the url url = '%s/p/%s' % (pad["server"], path) # create pad myPad = EtherpadLiteClient(pad["key"], pad["server"] + '/api') try: myPad.createPad(path) except urllib2.URLError: raise osv.except_osv( _("Error"), _("Pad creation failed, \ either there is a problem with your pad server URL or with your connection." )) #if create with content if "field_name" in context and "model" in context and "object_id" in context: #get attr on the field model model = self.pool[context["model"]] field = model._fields[context['field_name']] real_field = field.pad_content_field #get content of the real field for record in model.browse(cr, uid, [context["object_id"]]): if record[real_field]: myPad.setText( path, (html2plaintext(record[real_field]).encode('utf-8'))) #Etherpad for html not functional #myPad.setHTML(path, record[real_field]) return { "server": pad["server"], "path": path, "url": url, }
############################### # old_password_generator.py # ############################### import string, random, sys SELECT = string.ascii_letters + string.punctuation + string.digits SAMPLE = random.SystemRandom().sample def main(): while True: size = get_size() password = generate_pw(size) print_pause(password) def get_size(): while True: try: size = int(input('Size: ')) except ValueError: print('Please enter a number.') except EOFError: sys.exit() else: if 1 <= size <= 80: return size print('Valid number range is 1 - 80.') def generate_pw(size): password = ''.join(SAMPLE(SELECT, size)) while not approved(password):
def checkcaptcha(): print("Solve this captcha to continue:") key = ''.join(random.SystemRandom().choice(letters) for _ in range(8)) print(figlet(key)) return input('>>> ') == key
def random_str(n=50): chars = ''.join([string.ascii_letters, string.digits, string.punctuation ]).replace('\'', '').replace('"', '').replace('\\', '') return ''.join([random.SystemRandom().choice(chars) for i in range(n)])
def main(): global wapt_folder, NGINX_GID parser = OptionParser(usage=usage, version=__version__) parser.add_option('-c', '--config', dest='configfile', default=waptserver.config.DEFAULT_CONFIG_FILE, help='Config file full path (default: %default)') parser.add_option( "-s", "--force-https", dest="force_https", default=False, action='store_true', help= "Use https only, http is 301 redirected to https (default: False). Requires a proper DNS name" ) parser.add_option( '-q', '--quiet', dest='quiet', default=False, action="store_true", help= 'Run quiet postconfiguration - default password and simple behavior') (options, args) = parser.parse_args() quiet = options.quiet if not quiet: if postconf.yesno("Do you want to launch post configuration tool ?" ) != postconf.DIALOG_OK: print("canceling wapt postconfiguration") sys.exit(1) else: print('WAPT silent post-configuration') # SELinux rules for CentOS/RedHat if type_redhat(): if re.match('^SELinux status:.*enabled', run('sestatus')): if not quiet: postconf.msgbox( 'SELinux detected, tweaking httpd permissions.') selinux_rules() postconf.msgbox( 'SELinux correctly configured for Nginx reverse proxy') else: print('[*] Redhat/Centos detected, tweaking SELinux rules') selinux_rules() print( '[*] Nginx - SELinux correctly configured for Nginx reverse proxy' ) # Load existing config file server_config = waptserver.config.load_config(options.configfile) if os.path.isfile(options.configfile): print('[*] Making a backup copy of the configuration file') datetime_now = datetime.datetime.now() shutil.copyfile( options.configfile, '%s.bck_%s' % (options.configfile, datetime_now.isoformat())) wapt_folder = server_config['wapt_folder'] # add secret key initialisation string (for session token) if not server_config['secret_key']: server_config['secret_key'] = ''.join( random.SystemRandom().choice(string.letters + string.digits) for _ in range(64)) # add user db and password in ini file if server_config['db_host'] in (None, '', 'localhost', '127.0.0.1', '::1'): ensure_postgresql_db(db_name=server_config['db_name'], db_owner=server_config['db_name'], db_password=server_config['db_password']) # Password setup/reset screen if not quiet: if not server_config['wapt_password'] or \ postconf.yesno("Do you want to reset admin password ?",yes_label='skip',no_label='reset') != postconf.DIALOG_OK: wapt_password_ok = False while not wapt_password_ok: wapt_password = '' wapt_password_check = '' while wapt_password == '': (code, wapt_password) = postconf.passwordbox( "Please enter the wapt server password (min. 10 characters): ", insecure=True, width=100) if code != postconf.DIALOG_OK: exit(0) while wapt_password_check == '': (code, wapt_password_check) = postconf.passwordbox( "Please enter the wapt server password again: ", insecure=True, width=100) if code != postconf.DIALOG_OK: exit(0) if wapt_password != wapt_password_check: postconf.msgbox('Password mismatch !') elif len(wapt_password) < 10: postconf.msgbox( 'Password must be at least 10 characters long !') else: wapt_password_ok = True password = pbkdf2_sha256.hash(wapt_password.encode('utf8')) server_config['wapt_password'] = password else: wapt_password = '' if not server_config['wapt_password']: print('[*] Generating random password for WAPT server') wapt_password = pwd.genword(entropy=56, charset="ascii_62") print('[*] WAPT admin password : %s' % wapt_password) password = pbkdf2_sha256.hash(wapt_password.encode('utf8')) server_config['wapt_password'] = password if not server_config['server_uuid']: server_config['server_uuid'] = str(uuid.uuid1()) # waptagent authentication method if not quiet: choices = [ ("1", "Allow unauthenticated registration, same behavior as WAPT 1.3", True), ("2", "Enable kerberos authentication required for machines registration", False), ("3", "Disable Kerberos but registration require strong authentication", False), ] code, t = postconf.radiolist("WaptAgent Authentication type?", choices=choices, width=120) if code == 'cancel': print("\n\npostconfiguration canceled\n\n") sys.exit(1) if t == "1": server_config['allow_unauthenticated_registration'] = True server_config['use_kerberos'] = False if t == "2": server_config['allow_unauthenticated_registration'] = False server_config['use_kerberos'] = True if t == "3": server_config['allow_unauthenticated_registration'] = False server_config['use_kerberos'] = False else: print( '[*] Set default registration method to : Allow anyone to register + Kerberos disabled' ) server_config['allow_unauthenticated_registration'] = True server_config['use_kerberos'] = False # Guess fqdn using socket fqdn = guess_fqdn() clients_signing_certificate = server_config.get( 'clients_signing_certificate') clients_signing_key = server_config.get('clients_signing_key') if not clients_signing_certificate or not clients_signing_key: clients_signing_certificate = os.path.join(wapt_root_dir, 'conf', 'ca-%s.crt' % fqdn) clients_signing_key = os.path.join(wapt_root_dir, 'conf', 'ca-%s.pem' % fqdn) server_config[ 'clients_signing_certificate'] = clients_signing_certificate server_config['clients_signing_key'] = clients_signing_key if clients_signing_certificate is not None and clients_signing_key is not None and not os.path.isfile( clients_signing_certificate): print('Create a certificate and key for clients certificate signing') key = SSLPrivateKey(clients_signing_key) if not os.path.isfile(clients_signing_key): print('Create SSL RSA Key %s' % clients_signing_key) key.create() key.save_as_pem() crt = key.build_sign_certificate(cn=fqdn, is_code_signing=False, is_ca=True) print('Create X509 cert %s' % clients_signing_certificate) crt.save_as_pem(clients_signing_certificate) waptserver.config.write_config_file(cfgfile=options.configfile, server_config=server_config, non_default_values_only=True) print('[*] Protecting WAPT config file') run("/bin/chmod 640 %s" % options.configfile) run("/bin/chown wapt %s" % options.configfile) print('[*] Update WAPT repository') repo = WaptLocalRepo(wapt_folder) repo.update_packages_index(force_all=True) final_msg = [ '[*] Postconfiguration completed.', ] if not quiet: postconf.msgbox("Press ok to start waptserver and wapttasks daemons") enable_waptserver() start_waptserver() # In this new version Apache is replaced with Nginx? Proceed to disable Apache. After migration one can remove Apache install altogether stop_disable_httpd() # Nginx configuration if quiet: try: generate_dhparam() nginx_cleanup() make_httpd_config('/opt/wapt/waptserver', fqdn, options.force_https, server_config) enable_nginx() restart_nginx() setup_firewall() except subprocess.CalledProcessError as cpe: final_msg += [ 'Error while trying to configure Nginx!', 'errno = ' + str(cpe.returncode) + ', output: ' + cpe.output ] except Exception as e: import traceback final_msg += [ 'Error while trying to configure Nginx!', traceback.format_exc() ] else: reply = postconf.yesno("Do you want to configure nginx?") if reply == postconf.DIALOG_OK: try: msg = 'FQDN for the WAPT server (eg. wapt.acme.com)' (code, reply) = postconf.inputbox(text=msg, width=len(msg) + 4, init=fqdn) if code != postconf.DIALOG_OK: exit(1) else: fqdn = reply generate_dhparam() nginx_cleanup() if server_config['use_kerberos']: if type_debian(): if not check_if_deb_installed( 'libnginx-mod-http-auth-spnego'): print( '[*] Nginx - Missing dependency libnginx-mod-http-auth-spnego, please install first before configuring kerberos' ) sys.exit(1) make_httpd_config('/opt/wapt/waptserver', fqdn, options.force_https, server_config) final_msg.append('Please connect to https://' + fqdn + '/ to access the server.') postconf.msgbox( "The Nginx config is done. We need to restart Nginx?") run_verbose('systemctl enable nginx') run_verbose('systemctl restart nginx') setup_firewall() except subprocess.CalledProcessError as cpe: final_msg += [ 'Error while trying to configure Nginx!', 'errno = ' + str(cpe.returncode) + ', output: ' + cpe.output ] except Exception as e: import traceback final_msg += [ 'Error while trying to configure Nginx!', traceback.format_exc() ] #Migrate file for new version waptwua wuafolder = server_config['waptwua_folder'] for (root, dirs, files) in list(os.walk(wuafolder, topdown=False)): if root == os.path.join(wuafolder, '.stfolder'): continue for f in files: oldpath = os.path.join(root, f) newpath = os.path.join(wuafolder, f) if os.path.isfile(newpath): continue print('Move %s --> %s' % (oldpath, newpath)) os.rename(oldpath, newpath) for d in dirs: if d == '.stfolder': continue print('Delete folder %s' % os.path.join(root, d)) shutil.rmtree(os.path.join(root, d)) final_msg.append('Please connect to https://' + fqdn + '/ to access the server.') # Check if Mongodb > PostgreSQL migration is necessary if not quiet: if check_mongo2pgsql_upgrade_needed(options.configfile) and\ postconf.yesno("It is necessary to migrate current database backend from mongodb to postgres. Press yes to start migration",no_label='cancel') == postconf.DIALOG_OK: upgrade2postgres(options.configfile) else: if check_mongo2pgsql_upgrade_needed(options.configfile): upgrade2postgres(options.configfile) WAPT_UID = good_pwd.getpwnam('wapt').pw_uid # CHOWN of waptservertasks.sqlite it seems to be created before location_waptservertasks = os.path.join(wapt_root_dir, 'db', 'waptservertasks.sqlite') if os.path.isfile(location_waptservertasks): os.chown(location_waptservertasks, WAPT_UID, os.stat(location_waptservertasks).st_gid) # Create empty sync.json and rules.json file for all installations def set_good_rights_nginx(files=[]): for afile in files: if not os.path.isfile(afile): with open(afile, 'w'): pass os.chown(afile, WAPT_UID, NGINX_GID) sync_json = os.path.join( os.path.abspath(os.path.join(wapt_folder, os.pardir)), 'sync.json') rules_json = os.path.join( os.path.abspath(os.path.join(wapt_folder, os.pardir)), 'rules.json') diff_rules_dir = wapt_folder + u'-diff-repos' if not (os.path.isdir(diff_rules_dir)): os.mkdir(diff_rules_dir) os.chown(diff_rules_dir, WAPT_UID, NGINX_GID) set_good_rights_nginx([sync_json, rules_json]) # Final message if not quiet: width = 4 + max(10, len(max(final_msg, key=len))) height = 2 + max(20, len(final_msg)) postconf.msgbox('\n'.join(final_msg), height=height, width=width) else: if wapt_password: final_msg.append('[*] WAPT admin password : %s\n' % wapt_password) for line in final_msg: print(line)
def get_random_hostname(): """Unique identifier for hostnames. """ return str(hex(random.SystemRandom().getrandbits(64))[2:])
def sample(self, quantity: int, duration: int, additional_ursulas: float = 1.5, attempts: int = 5, pagination_size: int = None) -> List[str]: """ Select n random Stakers, according to their stake distribution. The returned addresses are shuffled, so one can request more than needed and throw away those which do not respond. See full diagram here: https://github.com/nucypher/kms-whitepaper/blob/master/pdf/miners-ruler.pdf This method implements the Probability Proportional to Size (PPS) sampling algorithm. In few words, the algorithm places in a line all active stakes that have locked tokens for at least `duration` periods; a staker is selected if an input point is within its stake. For example: Stakes: |----- S0 ----|--------- S1 ---------|-- S2 --|---- S3 ---|-S4-|----- S5 -----| Points: ....R0.......................R1..................R2...............R3........... In this case, Stakers 0, 1, 3 and 5 will be selected. Only stakers which confirmed the current period (in the previous period) are used. """ system_random = random.SystemRandom() n_tokens, stakers = self.get_all_active_stakers( periods=duration, pagination_size=pagination_size) if n_tokens == 0: raise self.NotEnoughStakers( 'There are no locked tokens for duration {}.'.format(duration)) sample_size = quantity for _ in range(attempts): sample_size = math.ceil(sample_size * additional_ursulas) points = sorted( system_random.randrange(n_tokens) for _ in range(sample_size)) self.log.debug( f"Sampling {sample_size} stakers with random points: {points}") addresses = set() point_index = 0 sum_of_locked_tokens = 0 staker_index = 0 stakers_len = len(stakers) while staker_index < stakers_len and point_index < sample_size: current_staker = stakers[staker_index][0] staker_tokens = stakers[staker_index][1] next_sum_value = sum_of_locked_tokens + staker_tokens point = points[point_index] if sum_of_locked_tokens <= point < next_sum_value: addresses.add(to_checksum_address(current_staker)) point_index += 1 else: staker_index += 1 sum_of_locked_tokens = next_sum_value self.log.debug( f"Sampled {len(addresses)} stakers: {list(addresses)}") if len(addresses) >= quantity: return system_random.sample(addresses, quantity) raise self.NotEnoughStakers( 'Selection failed after {} attempts'.format(attempts))