Ejemplo n.º 1
0
    def create_server(self, zone, startup_script_location, proxy_username_identifier, proxy_username, proxy_password_identifier, proxy_password):
        name_generator = haikunator.Haikunator()
        title = name_generator.haikunate()
        server = {
            "server": {
                "zone": zone,
                "title": title,
                "hostname": title,
                "plan": "1xCPU-1GB", # smallest plan
                "storage_devices": {
                    "storage_device": [
                        {
                            "action": "clone",
                            "storage": "01000000-0000-4000-8000-000030060200", # ubuntu 16.04 lts
                            "title": title + "-disk", 
                            "size": 25,
                            "tier": "maxiops"
                        }
                    ]
                },
                "user_data": self.get_startup_script(startup_script_location, proxy_username_identifier, proxy_username, proxy_password_identifier, proxy_password)
            }
        }
        response = json.loads(requests.post(self.base_url + '/server', json.dumps(server), auth=self.api_authentication, headers={"content-type": "application/json"}).text)

        return proxymodels.UpcloudServer(response['server']['uuid'], response['server']['ip_addresses']['ip_address'][1]['address'], proxy_username, proxy_password, 80)
Ejemplo n.º 2
0
 def create_vm(self, zone, startup_script):
     name_gen = haikunator.Haikunator()
     image_response = self.compute_client.images().getFromFamily(project='ubuntu-os-cloud', family='ubuntu-1604-lts').execute()
     vm_name = name_gen.haikunate()
     server = {
         'name': vm_name,
         'machineType': f"zones/{zone}/machineTypes/n1-standard-1",
         'disks': [
             {
                 'boot': True,
                 'autoDelete': True,
                 'initializeParams': {
                     'sourceImage': image_response['selfLink']
                 }
             }
         ],
         'networkInterfaces': [
             {
                 'network': '/global/networks/default',
                 'accessConfigs': [
                     {'type': 'ONE_TO_ONE_NAT', 'name': 'external nat'}
                 ]
             }
         ],
         'metadata': {
             'items': [
                 {
                     'key': 'startup-script',
                     'value': startup_script
                 }
             ]
         }
     }
     creation_result = self.compute_client.instances().insert(project=self.project, zone=zone, body=server).execute()
     return vm_name
Ejemplo n.º 3
0
    def __init__(self, args, random_state=42, subdir=''):
        ops.reset_default_graph()
        np.random.seed(random_state)
        tf.set_random_seed(random_state)

        self.batch_size = args['batch_size']
        self.classes = args['classes']
        self.opt = args['optimizer']
        self.learning_rate = args['learning_rate']
        self.run = haikunator.Haikunator().haikunate(token_length=0)
        self.name = ''
        self.data = args['data']
        self.subdir = subdir

        for i, (key, val) in enumerate(args.items()):
            if type(val) is TupleType or type(val) is ListType:
                if isinstance(val[0], CNN_description) or isinstance(
                        val[0], FF_description):
                    tmp_val = val
                    val = ''
                    for v in tmp_val:
                        val += v.printit() + '_'
                    val = val[:-1]
                else:
                    tmp_val = val
                    val = ''
                    for v in tmp_val:
                        val += str(v) + '_'
                    val = val[:-1]
            if type(val) is FunctionType:
                val = val.__name__
            if val.__class__ == type:
                val = val.__name__
            self.name += str(key)[:3] + '_' + str(val) + '_'
        self.name = self.name[:-1]
Ejemplo n.º 4
0
    def __init__(self, graph=None, *args, **kwargs):
        # Constant seed for uniform results
        tf.set_random_seed(42)
        np.random.seed(42)

        ##
        ## Model Configuration
        if kwargs is not None:
            self.folder = kwargs['type']
            self.S = kwargs['nb_speakers']
            self.args = kwargs
            self.learning_rate = kwargs['learning_rate']
        else:
            raise Exception(
                'Keyword Arguments missing ! Please add the right arguments in input | check doc'
            )

        if graph is None:
            # Run ID
            self.runID = haikunator.Haikunator().haikunate()
            print 'ID : {}'.format(self.runID)

            #Create a graph for this model
            self.graph = tf.Graph()

            with self.graph.as_default():

                with tf.name_scope('inputs'):

                    # Boolean placeholder signaling if the model is in learning/training mode
                    self.training = tf.placeholder(tf.bool, name='is_training')

                    # Batch of raw non-mixed audio
                    # shape = [ batch size , number of speakers, samples ] = [ B, S, L]
                    self.x_non_mix = tf.placeholder("float",
                                                    [None, None, None],
                                                    name='non_mix_input')

                    # Batch of raw mixed audio - Input data
                    # shape = [ batch size , samples ] = [ B , L ]
                    self.x_mix = tf.placeholder("float", [None, None],
                                                name='mix_input')

                    # Speakers indicies used in the mixtures
                    # shape = [ batch size, #speakers]
                    self.I = tf.placeholder(tf.int32, [None, None],
                                            name='indicies')

                    shape_in = tf.shape(self.x_mix)
                    self.B = shape_in[0]
                    self.L = shape_in[1]

            # Create a session for this model based on the constructed graph
            config_ = tf.ConfigProto()
            config_.gpu_options.allow_growth = True
            config_.allow_soft_placement = True
            self.sess = tf.Session(graph=self.graph, config=config_)
Ejemplo n.º 5
0
def create_proxies(user_option, region):
    name_gen = haikunator.Haikunator()
    proxy_list = []
    proxy_list_name = input("Proxy list name: ")
    proxy_count = int(input("How many proxies would you like to generate: "))
    proxy_list_file = open(
        os.path.join(os.path.realpath(''),
                     f'proxylists\\{proxy_list_name}.json'), 'a')
    if (user_option == 1):
        proxy_gen = awsproxygen.AWSProxyGen()
        cloud_provider = 'aws'
    elif (user_option == 2):
        proxy_gen = azureproxygen.AzureProxyGen()
        cloud_provider = 'azure'
    elif (user_option == 3):
        cloud_provider = '100tb'
        proxy_gen = proxygen100tb.ProxyGen100TB()
    elif (user_option == 4):
        cloud_provider = 'gcs'
        proxy_gen = googlecloudproxygen.GoogleCloudProxyGen()
    elif (user_option == 5):
        cloud_provider = 'upcloud'
        proxy_gen = upcloudproxygen.UpcloudProxyGen()
    elif (user_option == 6):
        cloud_provider = 'linode'
        proxy_gen = linodeproxygen.LinodeProxygen()
    elif (user_option == 7):
        cloud_provider = 'vultr'
        proxy_gen = vultrproxygen.VultrProxyGen()
    elif (user_option == 8):
        cloud_provider = 'digitalocean'
        proxy_gen = digitaloceanproxygen.DigitalOceanProxyGen()
    elif (user_option == 9):
        cloud_provider = 'vpsie'
        proxy_gen = vpsieproxygen.VpsieProxyGen()
    servers = proxy_gen.create_proxies(region, proxy_count,
                                       name_gen.haikunate(),
                                       name_gen.haikunate())

    print("Here are your generated proxies: ")
    for x in servers:
        proxy_list.append(x.to_dict())
        print(x.to_string())

    proxy_list_dict = {
        'cloud_provider':
        cloud_provider,
        'region':
        region,
        'hourly_cost':
        len(proxy_list) * get_proxy_price(cloud_provider, str(region)),
        'proxies':
        proxy_list
    }
    json.dump(proxy_list_dict, proxy_list_file)
    proxy_list_file.close()
Ejemplo n.º 6
0
def new_room(request):
    name_generator = haikunator.Haikunator()
    new_room = None
    while not new_room:
        with transaction.atomic():
            name = name_generator.haikunate()
            if Room.objects.filter(label=name).exists():
                continue
            new_room = Room.objects.create(label=name)
    return redirect(chat_room, label=name)
Ejemplo n.º 7
0
def main():
    name_generator = haikunator.Haikunator()
    proxy_gen = ProxyGen100TB()
    proxy_count = int(input('How many proxies do you want? '))
    proxies, servers = proxy_gen.create_proxies(proxy_count, 2,
                                                name_generator.haikunate(),
                                                name_generator.haikunate())
    for x in proxies:
        print(x)
    input('type anything when ready to delete vms')
    proxy_gen.delete_vms(servers)
Ejemplo n.º 8
0
def new_room(request):
    """
    Randomly create a new room, and redirect to it.
    """
    new_room = None
    while not new_room:
        with transaction.atomic():
            label = haikunator.Haikunator().haikunate()
            if Room.objects.filter(label=label).exists():
                continue
            new_room = Room.objects.create(label=label)
    return redirect(reverse('chatroom:chat_room', args=(label, )))
    def create_vm(self, region, startup_script):
        name_gen = haikunator.Haikunator()
        server = {
            "name": name_gen.haikunate(),
            "region": region,
            "size": "1gb",
            "image": 49549315,
            "user_data": startup_script,
        }
        response = json.loads(
            requests.post(self.base_url + '/v2/droplets',
                          json.dumps(server),
                          headers=self.request_headers).text)

        return response['droplet']['id']
Ejemplo n.º 10
0
    def create_vm(self, location_id, proxy_username, proxy_password):
        name_gen = haikunator.Haikunator()
        header = {"Authorization": f"Bearer {self.access_token}"}
        request = {
            'hostname': name_gen.haikunate(),
            'offer_id':
            '9a0e49c6-9f22-11e3-8af5-005056aa8af7',  # 1 cpu, 768mb ram
            'datacenter_id': location_id,
            'os_id': 'fbbb3371-283b-11e8-b4ba-005056aadd24'  # ubuntu 16.04  
        }
        response = json.loads(
            requests.post(self.base_url + '/vpsie', request,
                          headers=header).text)

        return proxymodels.VpsieServer(response['vpsie_id'], 'root',
                                       response['password'], response['ipv4'],
                                       proxy_username, proxy_password, 80)
Ejemplo n.º 11
0
    def create_server(self, location, proxy_username, proxy_password,
                      stack_script_id):
        name_gen = haikunator.Haikunator()
        headers = {
            "Authorization": f"Bearer {self.access_token}",
            "content-type": "application/json"
        }
        server = {
            "region": location,
            "type": "g6-nanode-1",
            "backups_enabled": False,
            "booted": True,
            "image": "linode/ubuntu16.04lts",
            "root_pass": name_gen.haikunate(),
            "stackscript_id": stack_script_id
        }
        response = json.loads(
            requests.post(self.base_url + '/linode/instances',
                          json.dumps(server),
                          headers=headers).text)

        return proxymodels.LinodeServer(response['id'], response['ipv4'][0],
                                        proxy_username, proxy_password, 80)
Ejemplo n.º 12
0
    def create_vm(self, location_id, proxy_username, proxy_password):
        name_generator = haikunator.Haikunator()
        vm_name = name_generator.haikunate()
        password = name_generator.haikunate()
        request_url = f'https://cp.100tb.com/rest-api/vps.json?api_key={self.api_key}'

        request_params = {
            'planId': 2698,  # v1-19 server
            'locationId':
            location_id,  # a number 2-20 corresponding to a location
            'templateId': self.get_template_id(location_id),
            'label': vm_name,
            'hostname': vm_name,
            'password': password,
            'billHourly': 'true'
        }
        creation_result = json.loads(
            requests.post(request_url, request_params).text)

        return proxymodels.Server100TB(location_id, creation_result['server'],
                                       vm_name, creation_result['ip'], 'root',
                                       password, proxy_username,
                                       proxy_password, 80)
Ejemplo n.º 13
0
	def __init__(self, graph=None, *args, **kwargs):		
		# Constant seed for uniform results
		tf.set_random_seed(42)
		np.random.seed(42)

		##
		## Model Configuration 
		if kwargs is not None:
			self.folder = kwargs['type']
			self.S = kwargs['nb_speakers']
			self.args = kwargs
			self.learning_rate = kwargs['learning_rate']
		else:
			raise Exception('Keyword Arguments missing ! Please add the right arguments in input | check doc')

		if graph is None:
			# Run ID
			self.runID = haikunator.Haikunator().haikunate()
			print 'ID : {}'.format(self.runID)

			if not kwargs['pipeline']:
				#Create a graph for this model
				self.graph = tf.Graph()

				with self.graph.as_default():

					with tf.name_scope('inputs'):

						# Boolean placeholder signaling if the model is in learning/training mode
						self.training = tf.placeholder(tf.bool, name='is_training')

						# Batch of raw non-mixed audio
						# shape = [ batch size , number of speakers, samples ] = [ B, S, L]
						self.x_non_mix = tf.placeholder("float", [None, None, None], name='non_mix_input')

						# Batch of raw mixed audio - Input data
						# shape = [ batch size , samples ] = [ B , L ]
						self.x_mix = tf.placeholder("float", [None, None], name='mix_input')

						# Speakers indicies used in the mixtures
						# shape = [ batch size, #speakers]
						self.I = tf.placeholder(tf.int32, [None, None], name='indicies')

						shape_in = tf.shape(self.x_mix)
						self.B = shape_in[0]
						self.L = shape_in[1]
			else:
				with tf.get_default_graph().as_default():
					with tf.name_scope('inputs'):

						# Boolean placeholder signaling if the model is in learning/training mode
						self.training = tf.placeholder(tf.bool, name='is_training')

						# Batch of raw non-mixed audio
						# shape = [ batch size , number of speakers, samples ] = [ B, S, L]
						self.x_non_mix = tf.identity(kwargs['non_mix'], name='non_mix_input')

						# Batch of raw mixed audio - Input data
						# shape = [ batch size , samples ] = [ B , L ]
						self.x_mix =  tf.identity(kwargs['mix'], name='mix_input')

						# Speakers indicies used in the mixtures
						# shape = [ batch size, #speakers]
						self.I =  tf.identity(kwargs['ind'], name='indicies')

						shape_in = tf.shape(self.x_mix)
						self.B = shape_in[0]
						self.L = shape_in[1]

						tf.summary.audio(name= "audio/input/non-mixed", tensor = tf.reshape(self.x_non_mix, [-1, self.L]), sample_rate = config.fs, max_outputs=2)
						tf.summary.audio(name= "audio/input/mixed", tensor = self.x_mix[:self.B], sample_rate = config.fs, max_outputs=1)
Ejemplo n.º 14
0
from django.shortcuts import render, redirect
from django.db import transaction
import haikunator
from .models import Room


hai = haikunator.Haikunator()

def about(request):
    """
    Starting page.
    """
    return render(request, 'chat/about.html')

def new_room(request):
    new_room = None
    while not new_room:
        with transaction.atomic():
            label = hai.haikunate()
            if Room.objects.filter(label=label).exists():
                continue
            new_room = Room.objects.create(label=label)
    return redirect('chat:chat_room', label=label)


def chat_room(request, label):
    room, created = Room.objects.get_or_create(label=label)
    messages = reversed(room.messages.order_by('-timestamp')[:50])
    template = 'chat/room.html'
    context = {
        'room': room,
Ejemplo n.º 15
0
import string
import secrets

import haikunator

haikunator = haikunator.Haikunator()


def append_random_haiku(name):
    haiku = haikunator.haikunate(token_length=0, delimiter='')
    return "{}_{}".format(name, haiku)


def generate_random_token(size):
    # allow upper and lowercase alphanumerics
    characters = string.ascii_letters + string.digits
    token = ''.join(secrets.choice(characters) for i in range(size))
    return token


def iterate_pod_number(course):
    total_pods = course.get_total_number_pods()
    if total_pods == 0:
        return 1
    else:
        # iterate from last pod number
        return course.pods.order_by('-number')[0].number + 1