Example #1
0
def create_token(*args, **kwargs):
    if request.method == 'GET':
        return render_template("new_token.html")

    errors = dict()
    data = dict(client_type='public')

    if not 'name' in request.form:
        errors['name_error'] = "Name is required"

    if not 'scopes' in request.form:
        errors['scopes_error'] = 'Select at least one scope'

    if errors:
        return render_template("new_token.html", errors=errors)
    user = auth.current_user()
    client = auth.Client.from_dict(dict(name=request.form.get('name'),
                                        user_id=user,
                                        client_type='public',
                                        redirect_uris='http://localhost',
                                        client_id="*****@*****.**" % random_string(32),
                                        client_secret=random_string(32))).save()
    rc.sadd("user|%s|clients" % user, client.client_id)
    token = auth.Token.from_dict(dict(client_id=client.client_id,
                                      user_id=client.user_id,
                                      token_type='bearer',
                                      personal=True,
                                      access_token=random_string(64),
                                      scopes=" ".join(
                                          [x.encode('ascii') for x in request.form.getlist('scopes')]))).save()
    print "saved token %s" % repr(token)
    rc.sadd("user|%s|tokens" % user, token.token_id)
    return redirect("/oauth/tokens")
Example #2
0
    def run(self):
        self.custom_timers = {}
        ts, action, user_id, tlen = self.next_act()
        if action == 'Retrieve':
            resp = self.s.get(url_get,
                              params={
                                  'user_id': user_id,
                                  'limit': 50,
                                  'pop': 0
                              })
            pref = 'get'
        else:
            tweet_id = self.tid_prefix + '-' + str(ts) + utils.random_string(6)
            resp = self.s.post(url_create,
                               data={
                                   'tweet_id': tweet_id,
                                   'user_id': user_id,
                                   'ts': ts,
                                   'content': utils.random_string(tlen)
                               })
            pref = 'create'

        try:
            respj = resp.json()
            for k, v in respj['timer'].iteritems():
                self.custom_timers['{}_{}'.format(pref, k)] = v
        except Exception as e:
            print 'exception: {}. {} {} {} {}\nresp:{}: {}'.format(
                e, ts, action, user_id, tlen, resp.status_code, resp.text)
            raise e
Example #3
0
    def test_construction(self):
        # valid arguments, individual shape, dtype
        for dtype, maxshape in product((int, float, np.float32, np.int32, None), (None, (None,))):
            filename = random_string(random.randint(1, 10))
            dataset_name = random_string(random.randint(1, 10))
            buffer_length = random.randint(1, 10)
            shape = random_int_tuple(1, 10, random.randint(0, 4))
            maxshape = maxshape if maxshape is None else maxshape * (len(shape) + 1)
            series = TimeSeries(filename, dataset_name, buffer_length,
                                shape=shape, dtype=dtype, maxshape=maxshape)
            self.assertEqual(series.filename, Path(filename))
            self.assertEqual(series.dataset_name.relative_to("/"), Path(dataset_name))
            self.assertEqual(series.buffer_length, buffer_length)
            self.assertEqual(series.shape, shape)
            self.assertEqual(series.item.shape, shape if shape else (1,))
            self.assertEqual(series.dtype, dtype if dtype else np.float64)
            self.assertEqual(series.item.dtype, dtype if dtype else np.float64)
            self.assertIsNone(series.maxtime)

        # valid arguments, from array
        for dtype in (int, float, np.float32, np.int32, None):
            shape = random_int_tuple(1, 10, random.randint(1, 5))
            array = np.random.uniform(-10, 10, shape).astype(dtype)
            buffer = ChunkBuffer("", "", data=array)
            series = TimeSeries(buffer)
            self.assertEqual(series.shape, shape[1:])
            self.assertEqual(buffer.dtype, dtype if dtype else np.float64)
            for i in range(shape[0]):
                series.select(i)
                np.testing.assert_allclose(series.item, array[i])
def prepape_auth_headers(session, include_cauth=False):
    """
    This function prepapes headers with CSRF/CAUTH tokens that can
    be used in POST requests such as login/get_quiz.
    @param session: Requests session.
    @type session: requests.Session
    @param include_cauth: Flag that indicates whethe CAUTH cookies should be
        included as well.
    @type include_cauth: bool
    @return: Dictionary of headers.
    @rtype: dict
    """

    # csrftoken is simply a 20 char random string
    csrftoken = random_string(20)

    # now make a call to the authenticator url
    csrf2cookie = 'csrf2_token_%s' % random_string(8)
    csrf2token = random_string(24)
    cookie = "csrftoken=%s; %s=%s" % (csrftoken, csrf2cookie, csrf2token)

    if include_cauth:
        CAUTH = session.cookies.get('CAUTH')
        cookie = "CAUTH=%s; %s" % (CAUTH, cookie)

    logging.debug('Forging cookie header: %s.', cookie)
    headers = {
        'Cookie': cookie,
        'X-CSRFToken': csrftoken,
        'X-CSRF2-Cookie': csrf2cookie,
        'X-CSRF2-Token': csrf2token
    }

    return headers
Example #5
0
	def close(self):
		""" Closes the file and returns a URI with the final address of the file.
		If save_metadata is set, the file metadata is saved in the DHT.
		You can always access to the metadata with File.metadata """
		if self.closed: return
		logger.info('Closing %s'%self.uri.get_readable())
		if self.mode == 'w':
			self.flush(True)
			self.metadata.set('Main:parts', len(self.parts))
			self.metadata.set('Main:length', self.filelength)
			self.metadata.set('Main:hash', self.hasher.hexdigest())
			self.metadata.set('Main:p', '')
			if self.save_metadata:
				# variables used to chain metadata blocks
				puri = self.uri
				pmeta = self.metadata
				not_saved = True
				# crypter used to encrypt the metadata. There is always
				# a crypter to protect against casual atackers, but
				# if there is no Kf the crypter is nearly useless
				if SECURED:
					if self.keys[4]:
						mdencrypter = AES.new(self.keys[4], AES.MODE_CBC, self.uri.get_hd())
					else:
						mdencrypter = AES.new(self.uri.get_hd(), AES.MODE_CBC, self.uri.get_hd())
				else:
					mdencrypter = DummyEncrypter()
				for i in range(0, len(self.parts)):
					pmeta.set('Part:%d'%i, self.parts[i])
					# chain the metadata blocks, each block only with
					# DESC_PER_METAPART references to parts of the file
					if i<len(self.parts)-1 and i%self.DESC_PER_METAPART == self.DESC_PER_METAPART-1:
						nuri=URI(self.uri.uid,utils.random_nick(),'',self.keys)
						nuri.hd=utils.random_string(16,False)
						pmeta.set('Main:n',nuri.get_static())
						m=pmeta.save()
						pmeta.set('Main:p',utils.random_string(self.BLOCK_SIZE-len(m)))
						m=mdencrypter.encrypt(pmeta.save())
						dfs.dht.put(puri.get_hd(),m,puri.nick)
						pmeta=utils.Config()
						pmeta.set('Main:p','')
						puri=nuri
						not_saved=False
					else:
						not_saved=True
				if not_saved:
					m=pmeta.save()
					pmeta.set('Main:p',utils.random_string(self.BLOCK_SIZE-len(m)))
					m=mdencrypter.encrypt(pmeta.save())
					dfs.dht.put(puri.get_hd(),m,puri.nick)

			# Create the final metadata
			for i in range(0,len(self.parts)):
				self.metadata.set('Part:%d'%i,self.parts[i])
		else:
			# In read, free the buffer
			self.buffer = None
		self.closed = True
		return self.uri
Example #6
0
 def test_random_string(self):
     from utils import random_string
     
     x = random_string(10)
     self.assertEqual(len(x), 10)
     y = random_string(10)
     self.assertEqual(len(y), 10)
     self.assertNotEqual(x, y)
Example #7
0
 def __init__(self, valid_is_students=[True]):
     self.first_name = ut.random_string(10)
     self.last_name = ut.random_string(10)
     self.username = ut.random_string(10)
     self.email = ut.random_email()
     self.password = ut.random_string(10)
     self.is_student = random.choice(valid_is_students)
     print(self.__dict__)
Example #8
0
 def random():
     output = Map(latitude=utils.random_float(-90, 90),
                  longitude=utils.random_float(-180, 180),
                  building_name=utils.random_string(),
                  level_name=utils.random_string(),
                  floor_number=utils.random_int(-5, 5),
                  scale=utils.random_float(0, 300),
                  path=utils.random_string())
     return output
Example #9
0
 def test_verify_fail(self):
     msg = random_string(40)
     index = random.randint(0, len(msg))
     original_char = msg[index]
     new_char = random_string(1)
     if original_char == new_char:
         return
     wrong = msg[:index] + new_char + msg[index + 1:]
     self.assertFalse(verify(wrong, conceal(msg)))
Example #10
0
 def generate_cases(revision, commands):
     """
     report: [
         {
             success: True / False
             error: ...
             case_number: 1
             detail: ...
         }, { ... }, ...
     ]
     """
     generators = {}
     current_task = Task.objects.create(revision=revision, abstract="GENERATE CASES")
     report = []
     for command_string in commands:
         ret = {"command": command_string}
         command = command_string.split()
         program_name, program_args = command[0], command[1:]
         try:
             if program_name not in generators:
                 program = revision.programs.get(name=program_name, tag="generator")
                 generators[program_name] = Runner(program)
             elif isinstance(generators[program_name], CompileError):
                 raise generators[program_name]
             runner = generators[program_name]
             if revision.cases.all().count():
                 case_number = revision.cases.all().aggregate(Max("case_number"))["case_number__max"] + 1
             else: case_number = 1
             new_case = Case(create_time=datetime.now(),
                             description="Gen \"%s\"" % command_string,
                             case_number=case_number)
             new_case.input_file.save("in_" + random_string(), ContentFile(b""), save=False)
             new_case.output_file.save("out_" + random_string(), ContentFile(b""), save=False)
             running_result = runner.run(args=program_args, stdout=new_case.input_file.path,
                                         max_time=revision.time_limit * 5 / 1000,
                                         max_memory=revision.memory_limit * 3)
             CaseManagementTools.reformat_file(new_case.input_file.path, revision.well_form_policy)
             new_case.save_fingerprint(revision.problem_id)
             ret["case_number"] = case_number
             with transaction.atomic():
                 new_case.save()
                 revision.cases.add(new_case)
                 ret.update(case_number=case_number,
                            success=running_result["verdict"] == "OK",
                            detail=running_result,
                            generated=new_case.input_preview)
         except (Program.MultipleObjectsReturned, Program.DoesNotExist):
             ret.update(success=False,
                        error="There should be exactly one program tagged 'generator' that fits the command.")
         except CompileError as e:
             generators[program_name] = e
             ret.update(success=False, error=e.error)
         report.append(ret)
         current_task.status = -2
         current_task.report = json.dumps(report)
     current_task.status = 0 if all(map(lambda r: r["success"], report)) else -1
     current_task.save()
Example #11
0
    def test_terminals_equal(self):
        for _ in range(MAX_REPS):
            name = random_string()
            other_name = random_string()
            while name == other_name:
                name = random_string()

            self.assertTrue(
                Node(NodeType.TERMINAL,
                     value=name).equals(Node(NodeType.TERMINAL, value=name)))
            self.assertFalse(
                Node(NodeType.TERMINAL, value=name).equals(
                    Node(NodeType.TERMINAL, value=other_name)), )
Example #12
0
    def card_pin_lookup(self, card_number, pin):
        """Returns a tuple with its first element indicating pin's validity.

        :param card_number: card number to be looked up
        :param pin: the pin associated with the card_number
        :return: a Tuple (pin_valid, (session_id, session_key))
        """
        if card_number in CARD_DATABASE:
            session_id, session_key = None, None
            if pin == CARD_DATABASE[card_number]:
                session_id, session_key = utils.random_string(
                    26), utils.random_string(8)
                self.sessions[session_id] = session_key
            return pin == CARD_DATABASE[card_number], (session_id, session_key)
        raise KeyError('Card does not exist in the Bank\'s database')
Example #13
0
 def generate_alias(self):
     change = random.choice(config.enabled_alias_changes)
     if change == 'patient_id':
         id_length = random.randint(config.min_id_length,
                                    config.max_id_length)
         alias = ('id', utils.random_string(id_length))
     elif change == 'first_name':
         alias = (change, self.first_name[0])
     elif change == 'middle_name':
         if len(self.middle_name) > 1:
             if random.randint(0, 1):
                 alias = (change, self.middle_name[0])
             else:
                 alias = (change, '')
         elif len(self.middle_name) == 1:
             alias = (change, '')
         elif len(self.middle_name) == 0:
             alias = (change,
                      utils.random_string(1, string.ascii_uppercase))
     elif change == 'last_name':
         alias = (change, names.get_last_name())
     elif change == 'sex':
         if random.randint(0, 1):
             alias = (change, 'O')
         else:
             alias = (change, '')
     elif change == 'birth_date':
         part = random.choice(['day', 'month', 'year'])
         num_days = calendar.monthrange(self.birth_date.year,
                                        self.birth_date.month)
         if (self.birth_date.day == 1 or self.birth_date.month == 1 or
                 self.birth_date == 1900):
             amount = 1
         elif (self.birth_date.day == num_days[1] or
               self.birth_date.month == 12):
             amount = -1
         else:
             amount = random.choice([-1, 1])
         if part == 'day':
             delta = self.birth_date.day + amount
             alias = (change, self.birth_date.replace(day=delta))
         if part == 'month':
             delta = self.birth_date.month + amount
             alias = (change, self.birth_date.replace(month=delta))
         else:
             delta = self.birth_date.year + amount
             alias = (change, self.birth_date.replace(year=delta))
     return alias
Example #14
0
    def get_exam_cal(self, msg, userId):
        self.class_database_op.step_back(userId)

        jwxt = self.class_database_op.get_jwxt_info(userId)
        if not jwxt:
            return "您还没有保存您的信息,选3来输入登录信息吧~"

        itchatmp.send("正在查询,请稍候。。。", msg['FromUserName'])

        request_utils_ins = request_utils.request_utils()
        exams = request_utils_ins.jwxt_get_exam_list(userId)
        print(exams)

        cal = utils.exams_list_to_cal(exams)

        file_name = 'cache/' + str(userId) + '.ics'
        file_show_name = utils.random_string(32) + '.ics'
        with open(file_name, 'wb') as f:
            f.write(cal.to_ical())
            f.close()
        file_url = utils.upload_file(file_name, file_show_name)
        import os
        os.remove(file_name)
        itchatmp.send("请接收文件~由于微信限制,机器人不能给您直接发送文件,请将以下链接复制到外部浏览器下载~", msg['FromUserName'])
        return file_url
Example #15
0
def create_agent_group_matching_agent(context, amount_of_agent_groups,
                                      amount_of_tags, **kwargs):
    if amount_of_tags.isdigit() is False:
        assert_that(amount_of_tags, equal_to("all"),
                    'Unexpected value for amount of tags')

    if "group_description" in kwargs.keys():
        group_description = kwargs["group_description"]
    else:
        group_description = agent_group_description

    tags_in_agent = context.agent["orb_tags"]
    if context.agent["agent_tags"] is not None:
        tags_in_agent.update(context.agent["agent_tags"])
    tags_keys = tags_in_agent.keys()

    if amount_of_tags.isdigit() is True:
        amount_of_tags = int(amount_of_tags)
    else:
        amount_of_tags = len(tags_keys)
    assert_that(tags_keys,
                has_length(greater_than_or_equal_to(amount_of_tags)),
                "Amount of tags greater than tags"
                "contained in agent")
    tags_to_group = {
        key: tags_in_agent[key]
        for key in sample(tags_keys, amount_of_tags)
    }

    for group in range(int(amount_of_agent_groups)):
        agent_group_name = agent_group_name_prefix + random_string()
        agent_group_data = generate_group_with_valid_json(
            context.token, agent_group_name, group_description, tags_to_group,
            context.agent_groups)
Example #16
0
 def post(self, request, *args, **kwargs):
     if request.POST['type'] == 'manual':
         input = request.POST['input']
         output = request.POST['output']
         well_form = request.POST.get("wellForm") == "on"
         if well_form:
             input, output = well_form_text(input), well_form_text(output)
         if not input:
             raise ValueError('Input file cannot be empty')
         save_case(self.session, input.encode(), output.encode(), well_form=well_form)
     elif request.POST['type'] == 'upload':
         file = request.FILES['file']
         file_directory = '/tmp'
         file_path = save_uploaded_file_to(file, file_directory, filename=random_string(), keep_extension=True)
         process_uploaded_case(self.session, file_path)
         remove(file_path)
     elif request.POST['type'] == 'generate':
         generator = request.POST['generator']
         raw_param = request.POST['param']
         generate_input('Generate cases', self.session, generator, raw_param)
     elif request.POST['type'] == 'stress':
         generator = request.POST['generator']
         raw_param = request.POST['param']
         submission = request.POST['submission']
         time = int(request.POST['time']) * 60
         if time < 60 or time > 300:
             raise ValueError('Time not in range')
         stress('Stress test', self.session, generator, submission, raw_param, time)
     return HttpResponse()
Example #17
0
def encrypt_exploit(exploit_dir, target_team, config, signer=None):
    # Remove trailing slash, for user convenience
    exploit_dir = remove_trailing_slash(exploit_dir)
    out_file = exploit_dir + ".zip.pgp"

    # Retrieve information from config
    teams = config["teams"]
    instructor_pubkey = teams["instructor"]["pub_key_id"]
    target_pubkey = teams[target_team]['pub_key_id']

    # Zip the directory
    tmp_path = "/tmp/gitctf_%s" % random_string(6)
    shutil.make_archive(tmp_path, "zip", exploit_dir)
    zip_file = tmp_path + ".zip"  # make_archive() automatically appends suffix.

    # Encrypt the zipped file

    encrypt_cmd = "gpg -o %s " % out_file
    if signer is not None:
        signer_pubkey = config["individual"][signer]['pub_key_id']
        encrypt_cmd += "--default-key %s --sign " % signer_pubkey
    encrypt_cmd += "-e -r %s -r %s " % (instructor_pubkey, target_pubkey)
    encrypt_cmd += "--armor %s" % zip_file
    _, err, ret = run_command(encrypt_cmd, None)
    rmfile(zip_file)  # Clean up zip file.
    if ret != 0:
        print "[*] Failed to sign/encrypt %s" % zip_file
        print err
        return None

    return out_file
Example #18
0
def edit_profile():
    """
    Profile editing view.
    :return: HTTP Response
    """

    if request.method == 'POST':

        if 'profile_picture' in request.files and request.files[
                'profile_picture'].filename:
            request.files['profile_picture'].filename = random_string(
                5) + request.files['profile_picture'].filename
            request.files['profile_picture'].save(
                os.path.join(os.getcwd(), 'static', 'media',
                             'profile_pictures',
                             request.files['profile_picture'].filename))
            current_user.profile_photo = os.path.join(
                '/static', 'media', 'profile_pictures',
                request.files['profile_picture'].filename)

        for field in Users.editable_fields:
            if field in request.form:
                current_user.__setattr__(field, request.form[field])

        if request.form.get('old_password', '-') == request.form.get('old_password2', '+') and\
                current_user.check_password(request.form['old_password']) and 'new_password' in request.form:
            current_user.set_password(request.form['new_password'])

        current_user.update()

        return redirect(url_for('core.profile',
                                username=current_user.username))

    return render_template('profile-edit.html')
Example #19
0
 def post(self, request, *args, **kwargs):
     files = request.FILES.getlist("files[]")
     for file in files:
         save_uploaded_file_to(file, path.join(settings.UPLOAD_DIR, str(self.problem.pk)),
                               filename=path.splitext(file.name)[0] + '.' + random_string(16),
                               keep_extension=True)
     return HttpResponse()
Example #20
0
    def __init__(self):
        id_length = random.randint(config.min_id_length, config.max_id_length)
        self.id = utils.random_string(id_length)

        sex = random.choice(['male', 'female'])
        if sex == 'male':
            self.sex = 'M'
        else:
            self.sex = 'F'

        self.first_name = names.get_first_name(sex)
        self.last_name = names.get_last_name()
        self.middle_name = ''
        if config.gen_mid_name:
            if random.random() < config.gen_mid_name_chance:
                if random.randint(0, 1):
                    self.middle_name = names.get_first_name(sex)
                else:
                    self.middle_name = names.get_first_name(sex)[0]

        start = datetime.datetime(1900, 1, 1)
        end = datetime.datetime.now()
        self.birth_date = utils.random_date_between(start, end)

        self.aliases = []
        if config.gen_alias:
            for i in xrange(config.gen_alias_max):
                if random.random() < config.gen_alias_chance:
                    self.aliases.append(self.generate_alias())

        self.studies = self.generate_studies(self.birth_date)
Example #21
0
def agent_is_created_matching_group(context, amount_of_group):
    context.agent_name = agent_name_prefix + random_string(10)
    all_used_tags = tags_to_match_k_groups(context.token, amount_of_group,
                                           context.agent_groups)
    agent = create_agent(context.token, context.agent_name, all_used_tags)
    context.agent = agent
    context.agent_key = context.agent["key"]
Example #22
0
 def test_cipher_stress(self):
     key = Cipher.generate_key()
     cipher = Cipher(key)
     for _ in range(1000):
         msg = random_string(1000)
         algo = cipher.decrypt(cipher.encrypt(msg))
         self.assertEqual(algo, str.encode(msg))
Example #23
0
File: main.py Project: tbsd/hehmda
 def send():
     user = validate_session(users, request)
     data = request.get_json(force=True)
     chat_id = data['chat_id']
     # only if user is member of this chat
     if (user and chat_id in user['chat_list']):
         message_id = random_string()
         # timestamp in milliseconds
         timestamp = int(time.time()) * 1000
         content = data['content']
         # replace 'script' with its utf-8 code
         # to prevent malicious code execution
         content = content.replace('script',
                                   '&#x73;&#x63;&#x72;&#x69;&#x70;&#x74;')
         message = {
             'id': message_id,
             'author': user['id'],
             'time': timestamp,
             'content': content
         }
         push_to_db(chats, chat_id, 'messages', message, False)
         return json_util.dumps(message)
     return json_util.dumps({
         'code': 401,
         'status_msg': 'Вы не состоите в данном чате.'
     })
Example #24
0
    def run(self):
        path404 = "/" + utils.random_string(30, defaults.ALLOWED_CHARS)
        wildcard_responses = trio.run(self.get_sensitive_urls, {path404: ""})

        valid_status_codes = {}
        for domain, _ in self.iter_domains():
            valid_status_codes[domain] = defaults.DEFAULT_VALID_STATUS_CODES[:]
            if domain in wildcard_responses and wildcard_responses[domain]:
                first_key = list(wildcard_responses[domain].keys())[0]
                wildcard_status_code = wildcard_responses[domain][first_key][
                    'status_code']
                warning(
                    f"Found wildcard status_code={wildcard_status_code} for {domain}"
                )
                valid_status_codes[domain].remove(wildcard_status_code)

        sensitive_urls = trio.run(self.get_sensitive_urls, self.url_index,
                                  valid_status_codes)
        print("Sensitive URLS: ", sensitive_urls)

        import json
        json.dumps(sensitive_urls, indent=2)

        for domain, domain_data in self.iter_domains():
            domain_data['paths'] = sensitive_urls.get(domain, {})

        return self.data
Example #25
0
File: case.py Project: qinmaye/eoj3
    def form_valid(self, form):
        global_create_time = datetime.now()
        option = form.cleaned_data["option"]
        case_number_start = form.cleaned_data["case_number"]
        cases = []

        if option == "text":
            input_binary = REFORMAT(form.cleaned_data["input_text"].encode(),
                                    self.revision.well_form_policy)
            output_binary = REFORMAT(form.cleaned_data["output_text"].encode(),
                                     self.revision.well_form_policy)
            description = "手动创建."
            case = Case(create_time=global_create_time,
                        in_samples=form.cleaned_data["in_samples"],
                        description=description,
                        activated=form.cleaned_data["activated"])
            case.input_file.save("in", ContentFile(input_binary), save=False)
            case.output_file.save("out",
                                  ContentFile(output_binary),
                                  save=False)
            case.save_fingerprint(self.problem.id)
            cases.append(case)

        elif option == "batch":
            tmp_directory = '/tmp/' + random_string()
            with zipfile.ZipFile(form.cleaned_data["batch_file"]) as myZip:
                myZip.extractall(path=tmp_directory)
            case_config = {}
            if path.exists(path.join(tmp_directory, "data.json")):
                with open(path.join(tmp_directory,
                                    "data.json")) as json_config:
                    case_config = json.loads(json_config.read())
            for inf, ouf in sort_data_list_from_directory(tmp_directory):
                with open(path.join(tmp_directory, inf),
                          'rb') as ins, open(path.join(tmp_directory, ouf),
                                             'rb') as ous:
                    conf = case_config.get(inf, {})
                    case = Case(
                        create_time=global_create_time,
                        description=conf.get(
                            "description", "\"%s\": (%s, %s)." %
                            (form.cleaned_data["batch_file"].name, inf, ouf)),
                        in_samples=conf.get("in_samples", False),
                        activated=conf.get("activated", True),
                        group=conf.get("group", 0),
                        points=conf.get("points", 10))
                    if self.revision.well_form_policy:
                        case.input_file.save("in",
                                             ContentFile(
                                                 REFORMAT(ins.read(), True)),
                                             save=False)
                        case.output_file.save("out",
                                              ContentFile(
                                                  REFORMAT(ous.read(), True)),
                                              save=False)
                    else:
                        case.input_file.save("in", File(ins), save=False)
                        case.output_file.save("out", File(ous), save=False)
                    case.save_fingerprint(self.problem.id)
                    cases.append(case)
Example #26
0
 def run_case_output(revision, case_set, solution):
     """
     report: similar to generating cases, [{ }, { }, ... { }]
     """
     current_task = Task.objects.create(revision=revision, abstract="RUN OUTPUT, %d cases" % len(case_set))
     try:
         runner = Runner(solution)
         result = []
         failed = False
         for case in case_set:
             if case.output_lock: continue  # output content protected
             with UpdateManager(case, revision) as case:
                 case.output_file.save("out_" + random_string(), ContentFile(b''), save=False)
                 run_result = runner.run(stdin=case.input_file.path, stdout=case.output_file.path,
                                         max_time=revision.time_limit * 3 / 1000,
                                         max_memory=revision.memory_limit * 2)
                 CaseManagementTools.reformat_file(case.output_file.path, revision.well_form_policy)
                 case.save_fingerprint(revision.problem_id)
                 with transaction.atomic():
                     case.save()
                     result.append({
                         "case_number": case.case_number,
                         "success": run_result["verdict"] == "OK",
                         "detail": run_result
                     })
                     if run_result["verdict"] != "OK":
                         failed = True
                     current_task.status = -2
                     current_task.report = json.dumps(result)
                     current_task.save()
Example #27
0
def create_new_dataset(context, amount_of_datasets, group_order, amount_of_sinks, sink_number):
    assert_that(sink_number, any_of(equal_to("sink"), equal_to("sinks")), "Unexpected value for sink")
    assert_that(group_order, any_of(equal_to("first"), equal_to("second"), equal_to("last"), equal_to("an existing")),
                "Unexpected value for group.")

    if group_order == "an existing":
        groups_to_be_used = random.sample(list(context.agent_groups.keys()), int(amount_of_datasets))
    else:
        assert_that(str(amount_of_datasets), equal_to(str(1)), "For more than one dataset, pass 'an existing' as group"
                                                          " parameter")
        order_convert = {"first": 0, "last": -1, "second": 1}
        groups_to_be_used = [list(context.agent_groups.keys())[order_convert[group_order]]]

    for i in range(int(amount_of_datasets)):
        context.considered_timestamp = datetime.now().timestamp()
        token = context.token
        if amount_of_sinks == 1:
            context.used_sinks_id = [context.sink['id']]
        else:
            # todo create scenario with multiple sinks
            context.used_sinks_id = context.existent_sinks_id[:int(amount_of_sinks)]
        policy_id = context.policy['id']
        dataset_name = dataset_name_prefix + random_string(10)
        context.dataset = create_dataset(token, dataset_name, policy_id, groups_to_be_used[i], context.used_sinks_id)
        local_orb_path = configs.get("local_orb_path")
        dataset_schema_path = local_orb_path + "/python-test/features/steps/schemas/dataset_schema.json"
        is_schema_valid = validate_json(context.dataset, dataset_schema_path)
        assert_that(is_schema_valid, equal_to(True), f"Invalid dataset json. \n Dataset = {context.dataset}. \n"
                                                     f"Policy: {context.policy}. \n Group: {groups_to_be_used[i]}. \n"
                                                     f"Sink(s): {context.used_sinks_id}")
        if 'datasets_created' in context:
            context.datasets_created[context.dataset['id']] = context.dataset['name']
        else:
            context.datasets_created = dict()
            context.datasets_created[context.dataset['id']] = context.dataset['name']
Example #28
0
File: main.py Project: tbsd/hehmda
 def authorization():
     # Считывание логина и пароля
     data = request.get_json(force=True)
     login = data['login']
     password = data['password']
     # Проверка, есть ли в базе данных эта личнасть
     password_hash = hashlib.md5(
         password.strip().encode('utf-8')).hexdigest()
     if users.find({
             "login": login,
             "password_hash": password_hash
     }).count() == 1:
         token = random_string()
         response = make_response()
         user = users.find_one({
             "login": login,
             "password_hash": password_hash
         })
         users.find_one_and_update({'id': user['id']},
                                   {'$set': {
                                       'session': token
                                   }})
         user = users.find_one({
             "login": login,
             "password_hash": password_hash
         })
         response.set_cookie('session', user['session'])
         return json_util.dumps({'session': user['session']})
     else:
         return json_util.dumps({
             'code': 401,
             'status_msg': 'Неверный логин или пароль.'
         })
Example #29
0
    def get(self, request, cid):
        type = request.GET.get('t')
        if type and 'all' in type and self.privileged:
            submissions = self.contest.submission_set
        else:
            submissions = self.contest.submission_set.filter(
                author=request.user)
        if type and 'accepted' in type:
            submissions = submissions.filter(status=SubmissionStatus.ACCEPTED)
        submissions = submissions.select_related("author")
        self.contest.add_contest_problem_to_submissions(submissions)
        participants = dict(
            self.contest.contestparticipant_set.values_list(
                'user_id', 'comment'))

        file_path = path.join(settings.GENERATE_DIR, random_string())
        lang_ext_dict = dict(LANG_EXT)
        with zipfile.ZipFile(file_path, "w", zipfile.ZIP_DEFLATED) as zip:
            zip.writestr('/contest.nfo', '')
            for submission in submissions:
                user = submission.author.username
                if participants[submission.author_id]:
                    user = participants[submission.author_id]
                user = self.__class__.slugify_filename(user)
                if getattr(submission,
                           'contest_problem') and submission.contest_problem:
                    zip.writestr(
                        "/%s_%s/%s_#%d_%s.%s" %
                        (user, submission.author_id,
                         submission.contest_problem.identifier, submission.pk,
                         submission.get_status_display().replace(' ', '_'),
                         lang_ext_dict.get(submission.lang, 'txt')),
                        submission.code)
        return respond_generate_file(
            request, file_path, "ContestCode - %s.zip" % self.contest.title)
Example #30
0
    def propose_session(self, recv_dtg):
        dtg = Datagram(TransportAim.SESSION_PROPOSAL, self.gainer.ip,
                       self.gainer.port, recv_dtg.source_ip,
                       recv_dtg.source_port)
        if recv_dtg.source_ip in self.gainer.sessions.keys():
            session = self.gainer.sessions[recv_dtg.source_ip]
            dtg.set_payload(session)
            self.send_datagram(dtg)
        else:
            session = {
                'session_id': len(self.gainer.sessions),
                'server_ip': self.gainer.ip,
                'server_port': self.gainer.port,
                'client_ip': recv_dtg.source_ip,
                'client_port': recv_dtg.source_port,
                'AES_key': utils.random_string()
            }
            self.gainer.sessions[session['client_ip']] = session

            dtg.set_payload(session)
            self.send_datagram(dtg)

            self.gainer.AES_ciphers[recv_dtg.source_ip] = None
            cipher = AES.new(session['AES_key'].encode(), AES.MODE_ECB)
            self.gainer.AES_ciphers[recv_dtg.source_ip] = cipher
async def tracks_search(query: types.InlineQuery):
    q = query.query
    if await utils.answer_empty_inline_query(query, q):
        return
    offset = int(query.offset) if query.offset.isdecimal() else 0

    search_results = await deezer_api.search(q=q)
    inline_results = []

    for result in search_results[offset:offset + 10]:
        file_id = await db_utils.get_track(result.id)
        if file_id:
            inline_results.append(
                types.InlineQueryResultCachedAudio(id='done:' +
                                                   utils.random_string(),
                                                   audio_file_id=file_id))
        elif result.preview:
            inline_results.append(
                types.InlineQueryResultAudio(
                    id=f'finish_download:{result.id}:{utils.random_string(4)}',
                    audio_url=result.preview,
                    title=result.title,
                    performer=result.artist.name,
                    audio_duration=30,
                    reply_markup=inline_keyboards.finish_download_keyboard))

    if offset + 11 < len(search_results):
        next_offset = str(offset + 10)
    else:
        next_offset = 'done'
    await bot.answer_inline_query(inline_query_id=query.id,
                                  results=inline_results,
                                  next_offset=next_offset,
                                  cache_time=30)
Example #32
0
def save_image(data, category, olid, author=None, ip=None, source_url=None):
    """Save the provided image data, creates thumbnails and adds an entry in the database.
    
    ValueError is raised if the provided data is not a valid image.
    """
    prefix = olid + '-' + random_string(5)
    
    img = write_image(data, prefix)
    if img is None:
        raise ValueError("Bad Image")

    d = web.storage({
        'category': category,
        'olid': olid,
        'author': author,
        'source_url': source_url,
    })
    d['width'], d['height'] = img.size

    filename = prefix + '.jpg'
    d['ip'] = ip
    d['filename'] = filename
    d['filename_s'] = prefix + '-S.jpg'
    d['filename_m'] = prefix + '-M.jpg'
    d['filename_l'] = prefix + '-L.jpg'
    d.id = db.new(**d)
    return d
Example #33
0
def moveExistConfig():
    home = utils.getHome()
    if os.path.exists("{0}/.kube/config".format(home)):
        tempname = "{0}/.kube/config_temp_{1}".format(home,
                                                      utils.random_string(16))
        os.rename("{0}/.kube/config".format(home), tempname)
        atexit.register(os.rename, tempname, "{0}/.kube/config".format(home))
Example #34
0
def fetch(team, issue_no, config, token=None):
    repo_owner = config['repo_owner']
    repo_name = config['teams'][team]['repo_name']
    github = Github(config["player"], token)

    _, submitter, create_time, content = \
        get_github_issue(repo_owner, repo_name, issue_no, github)

    # Write the fetched issue content to temp file
    tmpfile = "/tmp/gitctf_%s.issue" % random_string(6)
    with open(tmpfile, "w") as f:
        f.write(content)

    # Decrypt the exploit
    out_dir = "exploit-%s-%s" % (submitter, create_time)
    prompt_rmdir_warning(out_dir)
    rmdir(out_dir)
    mkdir(out_dir)
    team = config["player_team"]
    out_dir = decrypt_exploit(tmpfile, config, team, out_dir, submitter)
    if out_dir is not None:
        print "Exploit fetched into %s" % out_dir

    # Clean up
    rmfile(tmpfile)
Example #35
0
 def __init__(self, url,devel=False,debug=False,info=None,rtt=None):
   self._base_url = url
   self._server = utils.random_number_string(1000)
   self.protocol = "websocket"
   self.readyState = SockPy.CONNECTING
   self._connid = utils.random_string(8)
   self._trans_url = self._base_url + '/' + self._server + '/' + self._connid
   self._transport = None
   REventTarget.__init__(self)
Example #36
0
def build_xml():
    yield '<root>'
    yield '<var name="id" value="%s"/>' % uuid4()
    yield '<var name="level" value="%d"/>' % randint(1, 100)
    yield '<objects>'
    for _ in range(randint(1, 10)):
        yield '<object name="%s"/>' % random_string()
    yield '</objects>'
    yield '</root>'
Example #37
0
def hash_pwd(email, pwd, salt=None):
    """ Hash the raw password.
        So that the hashed string can be saved into datastore as pwd instead.
        Since the encrypted string won't contain ',', use comma as delimiter
    """
    if salt is None:
        salt = utils.random_string()

    hashed = hmac.new(keys.HASH_SECRET, email + pwd + salt, sha256).hexdigest()
    return hashed + ',' + salt
Example #38
0
 def write(self, path):
     """
     Writes the page to the SVG file with the provided path.
     """
     # Create a new SVG document
     doc = svg(
         x=0, y=0, 
         width=mm_to_px(self.width), height=mm_to_px(self.height)
     )
     # Draw all the content: first the background, then the content,
     # and finally the labels
     background_group = g()
     background_group.setAttribute('id', 'background')
     content_group = g()
     content_group.setAttribute('id', 'content')
     label_group = g()
     label_group.setAttribute('id', 'labels')
     contour_group = g()
     contour_group.setAttribute('id', 'contours')
     my_defs = defs()
     for c in self.containers:
         if c.has_background: c.draw_background(background_group)
         if c.needs_clipping and (c.has_content or c.has_labels):
             path_id = random_string(16)
             clprect = rect(
                 x=mm_to_px(c.x), y=mm_to_px(c.y),
                 width=mm_to_px(c.width), height=mm_to_px(c.height)
             )
             clppath = clipPath(id=path_id)
             clppath.addElement(clprect)
             my_defs.addElement(clppath)
             # Draw content with clipping path
             if c.has_content:
                 container_grp = g()
                 container_grp.set_clip_path('url(#%s)' % path_id)
                 c.draw_content(container_grp)
                 content_group.addElement(container_grp)
             # The labels on top of the content
             if c.has_labels:
                 container_grp = g()
                 container_grp.set_clip_path('url(#%s)' % path_id)
                 c.draw_labels(container_grp)
                 label_group.addElement(container_grp)
         else:
             if c.has_content: c.draw_content(content_group)
             if c.has_labels: c.draw_labels(label_group)
         if c.has_contour: c.draw_contour(contour_group)
     # Add each of the base groups
     doc.addElement(my_defs)
     doc.addElement(background_group)
     doc.addElement(content_group)
     doc.addElement(label_group)
     doc.addElement(contour_group)
     # Write the SVG document to the file
     doc.save(path)
Example #39
0
    def __init__(self, name, attack, defense, cost, special=None):
        self.id = random_string(20)
        self.attack = attack
        self.defense = defense
        self.cost = cost
        self.name = name

        if special is None:
            self.special = special
        else:
            self.special = {}
Example #40
0
 def __init__(self, session):
     self.dialog_number = session.dialog_number
     self.caller_id = session.session_id
     self.initial_prompt = "Hello, how may I help you?"
     self.asr_results = []
     self.recordings = []
     self.responses = []
     # a unique id for this dialog
     self.id =  utils.timestamp() + "_" + utils.random_string()
     self.date = utils.dateString()
     self.time = utils.timeString()
Example #41
0
 def _oauth_parameter(self, has_token=True):
     parameters = {
             'oauth_consumer_key' : self.consumer_key,
             'oauth_timestamp' : timestamp(),
             'oauth_nonce' : random_string(),
             'oauth_signature_method' : self.SIG_METHOD,
             'oauth_version' : self.VERSION,
     }
     if has_token:
         parameters['oauth_token'] = self.oauth_token
     return parameters
Example #42
0
	def __init__(self,url,dirname,ipsfile):
		self.url = url
		self.dirname = dirname
		self.secret = random_string(SECRET_LENGTH)
		self.ipsfile = ipsfile
		# indicate whether node server is running
		self.event_running= Event() # flag is false by default

		# server thread instance
		self.server_thread = NodeServerThread('Thread-SERVER',self.url,self.dirname,self.secret,self.ipsfile,self.event_running)
		# node server proxy for client use
		self.server = None
Example #43
0
 def __init__(self, from_datetime):
     self.study_datetime = utils.random_date_between(
         from_datetime, datetime.datetime.now())
     accession_length = random.randint(
         config.min_accn_length, config.max_accn_length)
     self.accession_number = utils.random_string(accession_length)
     modality = config.sample_data.random_modality(
         config.enabled_modalities)
     self.study_description = config.sample_data.random_study_description(
         modality)
     self.study_instance_uid = utils.generate_uid_with_delay()
     num_series = random.randint(config.min_series, config.max_series)
     self.series = self.generate_series(
         num_series, self.study_datetime, modality)
Example #44
0
def init_default_conf():	
	""" Sets up a default configuration for DFS: uses $HOME/.dfs as
	the config dir, reads the default configuration from
	$HOME/.dfs/dfsrc, and sets up the logging system to use the file
	dfs.log in the configuration directory """
	
	global default_config_dir, default_config, default_log_file, default_config_file, dht
	
	# Creates a directory to the default config, if it does not exists.
	default_config_dir=os.path.expanduser('~%s.dfs'%os.path.sep)
	# Create the default config path if it does not exists
	if not os.path.exists(default_config_dir): os.mkdir(default_config_dir)
	default_config_file=default_config_dir+os.path.sep+'dfsrc'
	default_log_file=default_config_dir+os.path.sep+'dfs.log'
	# Load the default config file
	if not os.path.exists(default_config_file): open(default_config_file,'w').close()
	default_config=utils.Config()
	default_config.load(open(default_config_file,'r'))
	
	# Configures the logging system
	utils.configure_logging(level=logging.INFO,
		format='%(asctime)s %(name)s %(levelname)s %(message)s',
		datefmt='%H:%M:%S',
		filename=default_log_file,
		filemode='w')	
	
	logging.info('Default configuration: %s'%default_config_file)
	
	# sets default configuration, if not set
	changed=False
	if not default_config.get('DHT:datadir'):
		default_config.set('DHT:datadir',default_config_dir+os.path.sep+'dhtdata')
		changed=True
	if not default_config.get('Main:UID'):
		default_config.set('Main:uid',utils.random_string(16))
		changes=True
	if not default_config.get('Main:nick'):
		default_config.set('Main:nick',utils.random_nick())
		changed=True
	if not default_config.get('Keys:kf'):
		logging.warning('There are not file key')
	if not default_config.get('Keys:kd'):
		logging.warning('There are not description key')
	if changed:
		default_config.save(open(default_config_file,'w'))
		
	# Default DHT: a local DHT
	dht=DHT.LocalDHT(default_config)
def write_image_to_file(row):
    # FIXME: CORRECT SPELLING
    data = np.reshape(row['Adverserial Image'], [28,28])
    # data = np.reshape(row['Adversarial Image'], [28,28])
    true_label = row['True Label']
    adv_label = row['Predicted Label Adverserial']
    # adv_label = row['Predicted Label Adversarial']
    img = plt.imshow(data, cmap='Greys')
    plt.axis('off')
    img.axes.get_xaxis().set_visible(False)
    img.axes.get_yaxis().set_visible(False)
    filename = '/{}-{}-{}.png'.format(
        str(int(true_label)),
        str(int(adv_label)),
        utils.random_string(8))
    plt.savefig(outdir + filename, bbox_inches='tight', pad_inches = 0)
    print("Saved " + outdir + filename)
Example #46
0
    def save_video(video_url):
        """Saves a video file to the file system.

        Args:
            video_url (str): URL of the MP4 to save to the file system.
        Returns:
            Filename (not path) of saved video as a string.
        """

        logger.debug("Saving video", )
        req = requests.get(video_url, stream=True)
        video_name = "{0}.mp4".format(random_string())
        with open(video_name, 'wb') as video_file:
            for chunk in req.iter_content(chunk_size=1024):
                if chunk:
                    video_file.write(chunk)
                    video_file.flush()
        return video_name
Example #47
0
File: api.py Project: adrianp/cartz
def game_new(id):
    # if games does not exist, create id
    if id not in games:
        games[id] = Game(id)

    if games[id].getPlayerCount() < 2:
        player = Player(random_string(uid_length))
        games[id].addPlayer(player)
        return ({
            "joined": True,
            "id": games[id].id,
            "player": player.id,
            "started": games[id].started
        }, 200)
    else:
        return ({
            "joined": False,
            "id": id
        }, 403)
Example #48
0
    def post(self):

        uname = self.get_argument("uname", "")
        cell = self.get_argument("cell", "")
        upass = self.get_argument("upass", "")
        upass2 = self.get_argument("upass2", "")

        # if upass2 is not upass:
        if upass2 != upass:
            self.write({'code': 'error', 'reason': 'pass2'})
            return

        salt = random_string(16)
        authcode = random_number(6)
        password = hashlib.sha1((upass + salt).encode('ascii')).hexdigest()

        info = {}

        user = User(
            username=uname,
            password=password,
            status=0,
            type=2,
            salt=salt,
            reg_date=datetime_str(),
            courses='',
            info=info)

        self.db.add(user)
        self.db.commit()

        if user.id:
            self.set_secure_cookie("userinfo", json.dumps({
                'type': user.type,
                'name': user.username,
                'id': user.id})
            )
            # TODO: 发短信
            jobq.enqueue("job_send_sms_auth.tpl_send_sms", "#code#=%s" % authcode, cell)
            self.write({'code': 'ok', 'id': user.id})
        else:
            self.write({'code': 'error'})
Example #49
0
	def flush(self, alldata=False):
		""" Flushes the contents of the file.
		Actually, only multiples of BLOCK_SIZE are flushed. If alldata is
		set, all the data in the buffer is flushed (padding the last data to
		a block of 1024 bytes) Warning: do NOT use alldata=True except
		in the last block of the file (close() internally calls to
		flush(True) """
		if self.closed: raise IOError('Closed')
		if not self.mode == 'w': raise IOError('In read mode')
		logger.info('Flushing %s'%self.uri.get_readable())
		
		bl = len(self.buffer)
		fl = bl / self.BLOCK_SIZE
		if alldata and not bl == fl * self.BLOCK_SIZE: fl = fl + 1
		
		for i in range(0,fl):
			p = ''.join(self.buffer[i * self.BLOCK_SIZE:(i + 1) * 1024])
			if len(p) < self.BLOCK_SIZE:
				# pad random data at the end of the block
				p += utils.random_string(self.BLOCK_SIZE-len(p))
			# encrypt data if there is a crypter
			if self.crypter: p = self.crypter.encrypt(p)
			# create a random nick and calculate the hash of the part
			u = random_uri(self.config)
			self.hasher.update(p)

			logger.info('Saving part ' + u.get_static())
			# Save the part in the DHT
			dfs.dht.put(u.get_hd(), p, u.nick)

			# Save the reference to the part
			self.parts.append(u.get_static())
		
		if fl * self.BLOCK_SIZE >= bl:
			self.buffer = []
		else:
			self.buffer = self.buffer[fl * self.BLOCK_SIZE:]
 def create_session(self):
     session_id = random_string(20)
     session = HttpSession(self.dispatcher, session_id)
     return session_id
Example #51
0
            buf = StringIO.StringIO()
            img.save(buf, "PNG")
            image_b64 = buf.read().encode('base64')
            data['icon'] = image_b64
        except Exception, ex:
            traceback.print_exc()
            errors['file_error'] = "Unknown file format: %s" % ex

    if errors:
        return render_template('new_client.html', **errors)

    user = auth.current_user()
    data.update(dict(name=request.form.get('name'),
                     user_id=user,
                     redirect_uris=" ".join([x.encode('ascii') for x in request.form.getlist('request_uris')]),
                     client_id="*****@*****.**" % random_string(32),
                     client_secret=random_string(32)))

    client = auth.Client.from_dict(data).save()
    rc.sadd("user|%s|clients" % user, client.client_id)

    return redirect("/oauth/clients")


@app.route('/oauth/clients', methods=['GET'])
@auth.requires_auth
def list_client():
    clients = [auth.get_client(client_id) for client_id in rc.smembers("user|%s|clients" % session['user']) if
               client_id is not None]
    return render_template("clients.html", clients=clients)
def random_temp_shared_key():
    return random_string(8)
Example #53
0
def store_image(image):
	if image is None and not allowed_file(image.filename):
		return None
	filename = utils.random_string()+'.'+image.filename.rsplit('.', 1)[1]
	image.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
	return '/static/image/' + filename
Example #54
0
def make_path_prefix(olid, date=None):
    """Makes a file prefix for storing an image.
    """
    date = date or datetime.date.today()
    return "%04d/%02d/%02d/%s-%s" % (date.year, date.month, date.day, olid, random_string(5))
Example #55
0
def provision(instance_id):
    """Provisions an RDS instance.
    """
    # TODO: Break this up into more maintainable chunks.
    if bottle.request.content_type != 'application/json':
        bottle.abort(
            415,
            'Unsupported Content-Type: expecting application/json'
        )
    incompletes = bottle.request.query.getone('accepts_incomplete')
    bottle.response.content_type = 'application/json'
    if incompletes is None:
        return _abort_async_required()
    if incompletes.lower() == 'true':
        data = json.loads(bottle.request.body.read())
        for plan in CONFIG['plan_settings']:
            if plan['id'] == data['plan_id']:
                plan_params = dict(plan)
                # Remove the id value from the params so we can just
                # pass the whole dict along to the RDS class.
                del plan_params['id']
                break
        else:
            bottle.response.status = 400
            return json.dumps({'description': 'Plan ID does not exist'})
        rds = RDS(**CONFIG['aws'])
        # Update the rds class instance with the parameters for the
        # plan as defined in the configuration.
        rds.__dict__.update(plan_params)
        # Parse and use extra parameters that have been passed in by
        # the user.
        #
        # TODO: Move allowed_params to config so operator can determine
        #       what they want to allow.
        allowed_params = ['DBName', 'AllocatedStorage']
        if CONFIG['deploy_from_snapshots'] is True:
            allowed_params.append('DBSnapshotIdentifier')
        if 'parameters' in data.keys():
            user_params = dict([
                (k, v) for (k, v) in data['parameters'].items()
                if k in allowed_params
            ])
            rds.__dict__.update(user_params)
        else:
            user_params = {}
        params_to_update = {}
        rds.DBInstanceIdentifier = '-'.join([rds.Engine.lower(), instance_id])
        rds.MasterUserPassword = utils.random_string()
        if rds.DBSnapshotIdentifier is None:
            last_operation = 'create'
            source_snapshot = 'NONE'
            step = 'NONE'
            first_char = random.choice(string.letters)
            rds.MasterUsername = ''.join([
                first_char,
                utils.random_string(15)
            ])
            rds.create_instance()
        else:
            last_operation = 'create_from_snapshot'
            source_snapshot = rds.DBSnapshotIdentifier
            step = 'deploy'
            try:
                snapshot_metadata = rds.snapshot_metadata()
            except botocore.exceptions.ClientError as e:
                if e.response['Error']['Code'] == 'DBSnapshotNotFound':
                    bottle.response.status = 400
                    return json.dumps(
                        {'description': 'Invalid snapshot identifier'}
                    )
                else:
                    raise
            if snapshot_metadata['Engine'] != rds.Engine.lower():
                bottle.response.status = 400
                return json.dumps(
                    {'description': 'Database engine in snapshot differs from '
                                    'database engine in plan settings.'}
                )
            rds.MasterUsername = snapshot_metadata['MasterUsername']
            rds.Port = snapshot_metadata['Port']
            # If the user is requesting a bigger disk than the snapshot
            # was generated from store this parameter so we can change
            # it during the modify operation after initial provisioning.
            if rds.AllocatedStorage > snapshot_metadata['AllocatedStorage']:
                params_to_update['AllocatedStorage'] = rds.AllocatedStorage
            if rds.StorageType != snapshot_metadata['StorageType']:
                params_to_update['StorageType']
            # When deploying from snapshot the security groups is always
            # set to the default security group.  The only way to change
            # it is to modify the instance after provisioning is done.
            # If the security group IDs are provided then they take
            # precedence over the named security groups.  If named
            # groups are provided they will be validated now before the
            # instance is created and stored to be applied after the
            # instance is done with initial bootstrapping.
            if rds.VpcSecurityGroupIds:
                params_to_update['VpcSecurityGroupIds'] = rds.VpcSecurityGroupIds
            else:
                group_ids = rds.validate_security_groups()
                if group_ids[0]:
                    params_to_update['VpcSecurityGroupIds'] = group_ids[1]
                else:
                    bottle.response.status = 400
                    return json.dumps(
                        {'description': 'Invalid AWS security group id'}
                    )
            rds.create_from_snapshot()
        iv = utils.Crypt.generate_iv()
        credentials = {
            'username': rds.MasterUsername,
            'password': rds.MasterUserPassword,
            'hostname': '',
            'port': rds.Port,
            'db_name': rds.DBName,
            'uri': '',
        }
        with utils.Crypt(iv=iv, key=CONFIG['encryption_key']) as c:
            creds = c.encrypt(json.dumps(credentials))
        dynamodb = utils.boto3_session(**CONFIG['aws']).resource('dynamodb')
        table = dynamodb.Table(name=CONFIG['dynamodb_table'])
        record = {
            'instance_id': instance_id,
            'iv': iv,
            'hostname': rds.DBInstanceIdentifier,
            'credentials': creds,
            'engine': rds.Engine,
            'binding_ids': [],
            'parameters': user_params,
            'last_operation': last_operation,
            'source_snapshot': source_snapshot,
            'step': step,
            'params_to_update': params_to_update
        }
        record.update(data)
        table.put_item(Item=record)
    else:
        return _abort_async_required()
    bottle.response.status = 202
    return json.dumps({"dashboard_url": ""})
Example #56
0
	def _manage_key(self):
		if not self.key:
			self.key = random_string(32)
			self.secret_key = random_string(32)
Example #57
0
    def _match_species_abundances(self, element, ion, additional_columns=None,
        scaled=False, include_flagged_lines=False):
        """
        Return an array of matched line abundances for the given species.
        """

        column = "scaled_abundance" if scaled else "abundance"
        flag_query = "" if include_flagged_lines else "AND l.flags = 0"
        
        # Do outer joins for all the nodes?
        nodes = self.retrieve_column("""SELECT DISTINCT ON (node) node 
            FROM line_abundances l
            WHERE trim(l.element) = %s AND l.ion = %s {0}""".format(flag_query),
            (element, ion), asarray=True)

        rounding = self.config.round_wavelengths
        tmp_name = "tmp_" + utils.random_string(size=6)

        query = "DROP TABLE IF EXISTS {tbl}; DROP INDEX IF EXISTS {tbl}_index;"
        if additional_columns is None:
            query += """CREATE TABLE {tbl} AS (SELECT DISTINCT ON
                (round(wavelength::numeric, {rounding}), spectrum_filename_stub)
                wavelength,
                round(wavelength::numeric, {rounding}) AS rounded_wavelength,
                spectrum_filename_stub
                FROM line_abundances l
                WHERE TRIM(l.element) = %s AND l.ion = %s {flag_query})"""
        else:
            additional_columns = ", ".join(set(additional_columns))
            query += """CREATE TABLE {tbl} AS (SELECT DISTINCT ON 
                (round(wavelength::numeric, {rounding}), spectrum_filename_stub)
                wavelength,
                round(wavelength::numeric, {rounding}) AS rounded_wavelength,
                spectrum_filename_stub, n.*
                FROM line_abundances l
                JOIN (SELECT DISTINCT ON (cname) cname, {additional_columns} 
                    FROM node_results ORDER BY cname) n 
                ON (trim(l.element) = %s AND l.ion = %s
                    AND l.cname = n.cname {flag_query}))"""

        self.execute(query.format(tbl=tmp_name, rounding=rounding,
            flag_query=flag_query, additional_columns=additional_columns or ""),
            (element, ion))
        # Create an index.
        self.execute("""CREATE INDEX {0}_index ON {0} 
            (rounded_wavelength, spectrum_filename_stub)""".format(tmp_name))
        self._database.commit()

        N_nodes = len(nodes)
        
        # Do a left outer join against the table.
        query = """SELECT DISTINCT ON (T.rounded_wavelength, T.spectrum_filename_stub)
            T2.{5} {6}
            FROM {0} T LEFT OUTER JOIN line_abundances T2 ON (
                T.spectrum_filename_stub = T2.spectrum_filename_stub AND
                T.rounded_wavelength = round(T2.wavelength::numeric, {1}) AND
                TRIM(T2.element) = '{2}' AND T2.ion = {3} AND 
                T2.node = '{4}') 
            ORDER BY T.spectrum_filename_stub, T.rounded_wavelength ASC"""

        data = self.retrieve_table(query.format(
            tmp_name, rounding, element, ion, nodes[0], column, ", T.*"),
            disable_rounding=True)

        if data is None or len(data) == 0:
            self.execute("DROP TABLE {0}".format(tmp_name))
            self._database.commit()
            return (None, None, None)

        data["wavelength"] = data["wavelength"].astype(float)
        if self.config.round_wavelengths >= 0:
            data["wavelength"] = np.round(data["wavelength"],
                self.config.round_wavelengths)

        X = np.nan * np.ones((len(data), N_nodes))
        X[:, 0] = data[column].astype(float)
        del data[column]

        for i, node in enumerate(nodes[1:], start=1):
            logger.debug("doing node {0} {1}".format(i, node))
            X[:, i] = self.retrieve_column(
                query.format(tmp_name, rounding, element, ion, node, column, ""),
                asarray=True).astype(float)

        self.execute("DROP TABLE {0}".format(tmp_name))
        self._database.commit()

        return (X, nodes, data)
Example #58
0
def generate_branch():
    return "z9hG4bK" + random_string(7)