def login(): if request.method == 'GET': return render_template('login.html') elif request.method == 'POST': render = partial(render_template, 'login.html') username = request.form.get('username') if not username: return render(error='No username provided') render_u = partial(render, username=username) user = UserAccount.query.filter(UserAccount.username==username).first() if not user: return render_u(error='Username does not exist') password = request.form.get('password') if not password: return render_u(error='No password entered') try: password = bytes(password) except UnicodeEncodeError: return render_u(error='Invalid characters used in password') password_salt = user.password_salt password_hash = generate_hash(password, password_salt) if password_hash != user.password_hash: return render_u(error='Password for this username is incorrect') login_user(user) next = request.args.get('next') return redirect(next) if next else redirect(url_for('index'))
def get(self): """ Given a long URL, returns a short URL. """ long_url = self.get_argument('longUrl', None) # Is decoded by Tornado. domain = self.get_argument('domain', self.settings['default_domain']) # Normalize and validate long_url. try: long_url = utils.normalize_url(long_url) assert utils.validate_url(long_url) == True except: logging.info('Wrong URL', exc_info=1) return self.finish({'status_code': 500, 'status_txt': 'INVALID_URI', 'data': []}) # Validate domain. if not utils.validate_url('http://' + domain): return self.finish({'status_code': 500, 'status_txt': 'INVALID_ARG_DOMAIN', 'data': []}) # Generate a unique hash, assemble short url and store result in Redis. url_hash = utils.get_hash_from_map(long_url) #check exist hash if do some customize is_exist = self.load_url(url_hash) if is_exist: url_hash = utils.generate_hash(self.application.redis, self.settings['redis_namespace'], self.settings['hash_salt']) short_url = 'http://' + domain + '/' + url_hash self.store_url(url_hash, long_url) # Return success response. data = {'long_url': long_url, 'url': short_url, 'hash': url_hash, 'global_hash': url_hash} self.finish({'status_code': 200, 'status_txt': 'OK', 'data': data})
def generate_tgs_service_password(service_id): global tgs_service_password print('Generating password between tgs and service ' + str(service_id)) password = str(random.randint(0, 9999)) tgs_service_password.append(utils.generate_hash(password)) utils.persist_txt('tgs_service', 'our_secret_' + str(service_id), tgs_service_password[service_id - 1])
def get(self): """ Given a long URL, returns a short URL. """ long_url = self.get_argument('longUrl', None) # decoded by Tornado. android_url = self.get_argument('androidUrl', None) # decoded by Tornado. android_fallback_url = self.get_argument('androidFallbackUrl', None) # decoded by Tornado. ios_url = self.get_argument('iosUrl', None) # decoded by Tornado. ios_fallback_url = self.get_argument('iosFallbackUrl', None) # decoded by Tornado. domain = self.get_argument('domain', self.settings['default_domain']) # Normalize and validate long_url. try: long_url = utils.normalize_url(long_url) assert utils.validate_url(long_url) is True if android_url: # TODO: Validate and normalize! pass # android_url = utils.normalize_url(android_url) # assert utils.validate_url(android_url) is True if android_fallback_url: android_fallback_url = utils.normalize_url(android_fallback_url) assert utils.validate_url(android_fallback_url) is True if ios_url: # TODO: Validate and normalize! pass # ios_url = utils.normalize_url(ios_url) # assert utils.validate_url(ios_url) is True if ios_fallback_url: ios_fallback_url = utils.normalize_url(ios_fallback_url) assert utils.validate_url(ios_fallback_url) is True except: logging.info('Wrong URL', exc_info=1) return self.finish({'status_code': 500, 'status_txt': 'INVALID_URI', 'data': []}) # Validate domain. if not utils.validate_url('http://' + domain): return self.finish( {'status_code': 500, 'status_txt': 'INVALID_ARG_DOMAIN', 'data': []}) # Generate a unique hash, assemble short url and store result in Redis. url_hash = utils.generate_hash(self.application.redis, self.settings['redis_namespace'], self.settings['hash_salt']) short_url = 'http://' + domain + '/' + url_hash self.store_url(url_hash, long_url, android_url, android_fallback_url, ios_url, ios_fallback_url) # Return success response. data = { 'long_url': long_url, 'android_url': android_url, 'android_fallback_url': android_fallback_url, 'ios_url': ios_url, 'ios_fallback_url': ios_fallback_url, 'url': short_url, 'hash': url_hash, 'global_hash': url_hash } self.finish({'status_code': 200, 'status_txt': 'OK', 'data': data})
def register_user(): first_name = request.json["first_name"] last_name = request.json["last_name"] user_email = request.json["email"] user_password = request.json["password"] created = datetime.datetime.utcnow() # user_confirm_password = request.json["confirm_password"] if validate_user_input("authentication", email=user_email, password=user_password): password_salt = generate_salt() password_hash = generate_hash(user_password, password_salt) if db_write( """INSERT INTO users (email, first_name, last_name, password_salt, password_hash, created) VALUES (%s, %s, %s, %s, %s, %s)""", (user_email, first_name, last_name, password_salt, password_hash, created), ): print("Registered" + user_email) return Response(status=201) else: return Response(status=409) else: return Response(status=400)
def post(self): data = get_registration_info() is_user_valid, error_message = validate_register_user(data) if not is_user_valid: return error_message user = UserFactory.factory(data['usertype']) user.username = data['username'] user.password = generate_hash(data['password']) user.uuid = generate_uuid() if UserDataModelFactory.factory(user.usertype).find_by_username( user.username): return { 'error': True, 'message': 'User {} already exists'.format(user.username) } new_user = UserDataModelFactory.factory(user.usertype) new_user.set_data_fields(user) try: new_user.save_to_db() access_token = create_access_token(identity=user.username) refresh_token = create_refresh_token(identity=user.username) return { 'error': False, 'message': 'User {} was created'.format(user.username), 'access_token': access_token, 'refresh_token': refresh_token, 'user_id': user.uuid } except: return {'error': True, 'message': 'Something went wrong'}, 500
def __init__( self, host, path, port=80, params={}, headers={}, timeout=61, username=None, password=None, min_tcp_ip_delay=0.25, max_tcp_ip_delay=16, min_http_delay=10, max_http_delay=240 ): """Store config and build the connection headers. """ self.host = host self.port = port self.path = path self.body = unicode_urlencode(params) if username and password: headers['Authorization'] = generate_auth_header(username, password) header_lines = [ 'POST %s HTTP/1.1' % self.path, 'Host: %s' % self.host, 'Content-Length: %s' % len(self.body), 'Content-Type: application/x-www-form-urlencoded' ] header_lines.extend([ '%s: %s' % (k, v) for k, v in headers.iteritems() ] + ['', ''] ) self.headers = '\r\n'.join(header_lines) self.timeout = timeout self.min_tcp_ip_delay = min_tcp_ip_delay self.max_tcp_ip_delay = max_tcp_ip_delay self.min_http_delay = min_http_delay self.max_http_delay = max_http_delay self.id = generate_hash()
def generate_x509_certificate(self, issuer_name: str, issuer_id: int, subject_name: str, issuer_public_parameters: list, issuer_public_key: list, not_valid_before: dateutil, not_valid_after: dateutil, hash_type=SHA): # Generating the certificate cert = collections.OrderedDict() cert['issuer_name'] = issuer_name cert['issuer_id'] = issuer_id cert['subject_name'] = subject_name cert['issuer_public_parameters'] = issuer_public_parameters cert['issuer_public_key'] = issuer_public_key cert['serial_number'] = CA.__x509_SN cert['not_valid_before'] = not_valid_before cert['not_valid_after'] = not_valid_after CA.__x509_SN += 1 returned_cert = cert.copy() # Signing the certificate m = generate_hash(str(cert), hash_type) signature = ElGamalDS.sign(self.__x_ca, self.a_ca, self.q_ca, m) cert['signature'] = signature.copy() # Conversion from MPZ to integer for i in range(len(signature)): signature[i] = int(signature[i]) # Conversion into strings to be stored in the database properly for i in range(len(cert['issuer_public_key'])): cert['issuer_public_key'][i] = str(cert['issuer_public_key'][i]) for i in range(len(cert['signature'])): cert['signature'][i] = str(cert['signature'][i]) cert['issuer_public_parameters'] = returned_cert[ 'issuer_public_parameters'].copy() for i in range(len(cert['issuer_public_parameters'])): cert['issuer_public_parameters'][i] = str( cert['issuer_public_parameters'][i]) cert['not_valid_before'] = str(cert['not_valid_before']) cert['not_valid_after'] = str(cert['not_valid_after']) # Insert into the database or update if it exists cur = self.certificates.find({'issuer_id': issuer_id}) if cur.count() > 0: self.certificates.update({'issuer_id': issuer_id}, cert) else: self.certificates.insert_one(cert) return returned_cert, signature
def xsrf_token(self): """ A token we can check to prevent `XSRF`_ attacks. .. _`xsrf`: http://en.wikipedia.org/wiki/Cross-site_request_forgery """ if not hasattr(self, '_xsrf_token'): token = self.cookies.get('_xsrf') if not token: token = generate_hash() self.cookies.set('_xsrf', token, expires_days=None) self._xsrf_token = token return self._xsrf_token
def run(): global username, password username = str(input('Username: '******'Password: '******'Actions:') print('1 - Authenticate by authentication service') print('2 - Request ticket by ticket granting service') print('3 - Execute action by resource service') desired_action = int(input('Desired action: ')) execute_action(desired_action)
def create_cookie(self, session_obj): """ Create cookie for current session. :param session_obj: dictionary having the current user id. """ if not os.path.exists('.sessions'): os.mkdir('.sessions') hash_value = generate_hash(time.time()) self.headers['Set-Cookie'] = 'session_id=%s;expires=%s' % ( hash_value, str(time.time() + (60 * 60 * 5))) self.print_headers() session_file = open(os.path.join('.sessions', hash_value), 'wb') pickle.dump(session_obj, session_file, 1) session_file.close()
def activation(self): """ Create activation details new registered users :return: dictionary having activation url and password. """ hash_value = generate_hash(self.email) activation_url = "http://localhost/activate.py?email=%s&value=%s" % ( self.email, hash_value) result = dict() result['activation_url'] = activation_url result['temp_password'] = self.password result['user_name'] = ' '.join([self.first_name, self.last_name]) return result
def __init__(self, encrypted): print("Bob Part!") # Get Alice's certificate alice_cert, ca_alice_signature = ca.get_x509_certificate(id_a) # Verify the certificate from CA if verify_certificate(alice_cert, ca_alice_signature, ca.y_ca, ca.a_ca, ca.q_ca): print("Alice Certificate verified!") else: raise ValueError("Alice Certificate is incorrect!") # Get Alice's public key alice_public_key_elgamal, alice_public_key_rsa = alice_cert[ 'issuer_public_key'] alice_a, alice_q, alice_n = alice_cert['issuer_public_parameters'] # Assert that it's the true public key (Used for debugging) assert alice_public_key_elgamal == y_a assert alice_public_key_rsa == e_a print("Encrypted Message that is received: " + str(encrypted)) decrypted = rsa.decrypt(encrypted, d_b) print("Decrypted Message that is received: " + str(decrypted)) msg_r = int.to_bytes(decrypted, decrypted.bit_length(), byteorder='big').decode('UTF-8').replace( '\0', '') print("Decrypted Received Message after conversion from int: " + str(msg_r)) q_len_r = ord(msg_r[0]) sig0_r = int(msg_r[1:q_len_r + 1]) sig1_r = int(msg_r[q_len_r + 1:2 * q_len_r + 1]) M_r = msg_r[2 * q_len_r + 1:] print("Original Message at Bob: ", M_r) # Calculate the hash for the received message m_r = generate_hash(M_r, SHA) # Verify the signature of the message if ElGamalDS.verify(alice_public_key_elgamal, alice_a, m_r, alice_q, [sig0_r, sig1_r]): print("Message signature verified!") else: print("Message signature failed!")
def __init__(self): print("Alice Part!") M = "Hey, I'd like to play!" print("Original Message at Alice: ", M) m = generate_hash(M, SHA) # Sign the message by Alice m_alice_signature = ElGamalDS.sign(x_a, a, q, m) # Get Bob's certificate bob_cert, ca_bob_signature = ca.get_x509_certificate(id_b) # Verify the certificate from CA if verify_certificate(bob_cert, ca_bob_signature, ca.y_ca, ca.a_ca, ca.q_ca): print("Bob Certificate verified!") else: raise ValueError("Bob Certificate is incorrect!") # Get Bob's public key bob_public_key_elgamal, bob_public_key_rsa = bob_cert[ 'issuer_public_key'] # Assert that it's the true public key (Used for debugging) assert bob_public_key_elgamal == y_b assert bob_public_key_rsa == e_b # First byte specifies the length of each of the signature parts, then each of the signature parts are added, then the message. # Concatenate to send q_len = len(str(q)) q_len_str = chr(q_len) print("Length of q: " + str(q_len)) sig0 = str(m_alice_signature[0]) for _ in range(q_len - len(sig0)): sig0 = '0' + sig0 sig1 = str(m_alice_signature[1]) for _ in range(q_len - len(sig1)): sig1 = '0' + sig1 msg = q_len_str + sig0 + sig1 + str(M) print("Message before conversion to int: " + msg) total_message = int.from_bytes(msg.encode('UTF-8'), byteorder='big') print("Message after conversion to int: " + str(total_message)) self.encrypted = rsa.encrypt(bob_public_key_rsa, int(total_message)) print("Encrypted Message to be sent: " + str(self.encrypted) + "\n")
def _start_task(self, task): hash_key = generate_hash(task.model_params, task.compute_params) if self._storage.has_result(task.model_params, task.compute_params): task.finished = True task.result = json.loads(self._storage.get_result(task.model_params, task.compute_params)[0]) else: string = json.dumps(self._config.swift_config) while len(string) % 16 != 0: string += " " config = self._config.crypt_obj.encrypt(string) workertask = workertasks.simulate_airfoil.apply_async((task.model_params, task.compute_params, config, self._config.container), task_id=hash_key) self._storage.save_result(task.model_params, task.compute_params, None) task.workertask = workertask task.id = workertask.id
def get(self): """ Given a long URL, returns a short URL. """ long_url = self.get_argument('longUrl', None) # Is decoded by Tornado. domain = self.get_argument('domain', self.settings['default_domain']) # Normalize and validate long_url. try: long_url = utils.normalize_url(long_url) assert utils.validate_url(long_url) == True except: logging.info('Wrong URL', exc_info=1) return self.finish({ 'status_code': 500, 'status_txt': 'INVALID_URI', 'data': [] }) # Validate domain. if not utils.validate_url('http://' + domain): return self.finish({ 'status_code': 500, 'status_txt': 'INVALID_ARG_DOMAIN', 'data': [] }) # Generate a unique hash, assemble short url and store result in Redis. url_hash = utils.get_hash_from_map(long_url) #check exist hash if do some customize is_exist = self.load_url(url_hash) if is_exist: url_hash = utils.generate_hash(self.application.redis, self.settings['redis_namespace'], self.settings['hash_salt']) short_url = 'http://' + domain + '/' + url_hash self.store_url(url_hash, long_url) # Return success response. data = { 'long_url': long_url, 'url': short_url, 'hash': url_hash, 'global_hash': url_hash } self.finish({'status_code': 200, 'status_txt': 'OK', 'data': data})
def __init__(self, address): self.server = None # 현재 이 서버가 구동되고 있는 port self.address = address # data table 생성 self.data_table = TableEntry() # node table 생성 self.node_table = NodeTable(generate_hash(self.address), self.address, self.data_table) # 기능 구현 대기 self.command_listener = threading.Thread(target=self.listen_command) # 작동 시작 self.serve()
def register_user(): user_email = request.json["email"] user_password = request.json["password"] user_confirm_password = request.json["confirm_password"] if user_password == user_confirm_password and validate_user_input( "authentication", email=user_email, password=user_password): password_salt = generate_salt() password_hash = generate_hash(user_password, password_salt) if db_write( """INSERT INTO users (email, password_salt, password_hash) VALUES (%s, %s, %s)""", (user_email, password_salt, password_hash), ): return Response(status=201) else: return Response(status=409) else: return Response(status=400)
def update_info(self, datastore): """ Datastore needs to be provided so we can update hashes of parent dirs. """ if os.path.isfile(self.path): self.type = "f" elif os.path.isdir(self.path): self.type = "d" else: raise IOError("Path '%s' is an unsupported type" % self.path) stats = os.stat(self.path) if self.type == "f": # We will generate dir hashes in a seond pass once all the nodes are in the database self.hash = generate_hash(self) self.bytes = stats.st_size self.permissions = stats.st_mode self.created = format_time_iso(datetime.fromtimestamp(stats.st_ctime)) self.modified = format_time_iso(datetime.fromtimestamp(stats.st_mtime)) self.last_seen = format_time_iso(datetime.now()) self.level = len(self.path.split(os.path.sep)[1:])
def _start_task(self, task): hash_key = generate_hash(task.model_params, task.compute_params) if self._storage.has_result(task.model_params, task.compute_params): task.finished = True task.result = json.loads( self._storage.get_result(task.model_params, task.compute_params)[0]) else: string = json.dumps(self._config.swift_config) while len(string) % 16 != 0: string += " " config = self._config.crypt_obj.encrypt(string) workertask = workertasks.simulate_airfoil.apply_async( (task.model_params, task.compute_params, config, self._config.container), task_id=hash_key) self._storage.save_result(task.model_params, task.compute_params, None) task.workertask = workertask task.id = workertask.id
def access(req_id): # Here ask for a passphrase # If this is a POST request # Compare it and redirect accordingly # Get details from redis j = retrieve_job(req_id) # If no passphrase, no need to bother, just redirect if 'passphrase' not in j: session['req_id'] = req_id return redirect(url_for('results', req_id=req_id)) if request.method == 'POST': # compare passphrases, after hashing passphrase = request.form['passphrase'] if passphrase: hpass = generate_hash(passphrase) else: flash(u'Error handling your passphrase', 'danger') return render_template('access.html', req_id=req_id) # Compare if hpass == j['passphrase']: # Correct! session['req_id'] = req_id return redirect(url_for('results', req_id=req_id)) else: flash(u'Passphrase does not match', 'danger') session.pop('req_id', None) return render_template('access.html', req_id=req_id) # Redirect to password form flash('This job is protected by a passphrase', 'info') return render_template('access.html', req_id=req_id)
def generate_client_service_password(): print('Generating password between client and service') password = str(random.randint(0, 9999)) return utils.generate_hash(password)
def save_mapping(self, feed, contents): # TODO: change this to provide the data that you want not the feed hash_tuple = utils.generate_hash(feed.get('link')) self._save_to_file(hash_tuple, contents)
def run(): # Here handle submissions and run the analysis # Send emails on failures, success # Use redis to store user stats (hashed for privacy) if request.method == 'POST': # First things first, compute user hash req_id = generate_time_hash(request.remote_addr) # To avoid slow-downs in the running directory # create subdirs w/ the first 2 chars of the hash h2c = req_id[:2] try: os.mkdir(os.path.join(app.config['UPLOAD_FOLDER'], h2c)) except: pass # Prepare the working directory # Our hash scheme ensures that it should be unique wdir = os.path.join(app.config['UPLOAD_FOLDER'], h2c, req_id) wdir = os.path.abspath(wdir) os.mkdir(wdir) # Sanity check for form entries float_entries = ['maxtemp', 'mingc', 'mintemp', 'maxgc', 'opttemp', 'multitreshold', 'evalue', 'optgc'] int_entries = ['contigcoverage', 'minsize', 'threads', 'exclude', 'contiglength', 'optsize', 'flanksize', 'maxsize', 'minprod', 'maxprod', 'numN', 'hitlength', 'gcclamp'] bool_entries = ['pcr', 'blastn', 'non'] str_entries = ['email', 'jobname'] for f, entries in ((float, float_entries), (str, str_entries), (bool, bool_entries), (int, int_entries)): for e in entries: try: if e in request.form: f(request.form[e]) except Exception as e: flash(u'Something went wrong while processing your options %s'%e, 'danger') return redirect(url_for('index')) # Save input files draft = request.files['contigs'] if draft: filename = secure_filename(draft.filename) draft.save(os.path.join(wdir, filename)) dname = filename else: flash(u'Forgot to provide your draft genome?', 'danger') return redirect(url_for('index')) # Save the genomes files genomes = set() try: for genome in request.files.getlist('reference'): filename = secure_filename(genome.filename) genome.save(os.path.join(wdir, filename)) genomes.add(filename) except: flash(u'Forgot to provide your reference genome?', 'danger') return redirect(url_for('index')) # Save the genomes files ptts = request.files['pttfile'] if ptts: try: for ptt in request.files.getlist('pttfile'): filename = secure_filename(ptt.filename) ptt.save(os.path.join(wdir, filename)) except: flash(u'Something went wrong with your ptt files', 'danger') return redirect(url_for('index')) # Check email, hash it email = request.form['email'] if email: hemail = generate_hash(email) else: flash(u'Something went wrong with your email', 'danger') return redirect(url_for('index')) # Submit the job # Then redirect to the waiting page try: # Handle the bool flags if 'non' in request.form:non = True else:non = False if 'pcr' in request.form:pcr = True else:pcr = False if 'inner' in request.form:inner = True else:inner = False if 'blastn' in request.form:blastn = True else:blastn = False result = run_contiguator.delay(wdir, dname, genomes, evalue=request.form.get('evalue', 1e-20), contiglength=request.form.get('contiglength', 1000), contigcoverage=request.form.get('contigcoverage', 20), hitlength=request.form.get('hitlength', 1100), multitreshold=request.form.get('multitreshold', 1.5), non=non, numN=request.form.get('numN', 100), pcr=pcr, inner=inner, blastn=blastn, threads=request.form.get('threads', 1), optsize=request.form.get('optsize', 20), minsize=request.form.get('minsize', 18), maxsize=request.form.get('maxsize', 27), opttemp=request.form.get('opttemp', 60), mintemp=request.form.get('mintemp', 57), maxtemp=request.form.get('maxtemp', 63), flanksize=request.form.get('flanksize', 1000), minprod=request.form.get('minprod', 1000), maxprod=request.form.get('maxprod', 7000), optgc=request.form.get('optgc', 50), mingc=request.form.get('mingc', 20), maxgc=request.form.get('maxgc', 80), gcclamp=request.form.get('gcclamp', 1), exclude=request.form.get('exclude', 100), jobname=request.form.get('jobname', '')) except: flash(u'Could not submit your job', 'danger') return redirect(url_for('index')) try: # Send details to redis add_job(req_id, request.remote_addr, hemail, result.task_id) except: flash(u'Could not save your job details', 'danger') return redirect(url_for('index')) return redirect(url_for('results', req_id=req_id)) # No POST, return to start flash(u'No job details given, would you like to start a new one?', 'warning') return redirect(url_for('index'))
def process_payment(request, transaction_id): txid = uuid.UUID(transaction_id) tx_obj = Transaction.objects.get(payment_id=txid) order_obj = tx_obj.order.all()[0] if tx_obj.payment_mode == 'ONLINE': first_name = order_obj.shipping_firstname purl = settings.PAYU_INFO['payment_url'] surl = settings.PAYU_INFO['surl'] key = settings.PAYU_INFO['merchant_key'] curl = settings.PAYU_INFO['curl'] furl = settings.PAYU_INFO['furl'] cleaned_data = { 'key': key, 'txnid': txid, 'amount': tx_obj.payment_amount, 'productinfo': order_obj.product_id, 'firstname': first_name, 'email': order_obj.email, 'udf1': '', 'udf2': '', 'udf3': '', 'udf4': '', 'udf5': '', 'udf6': '', 'udf7': '', 'udf8': '', 'udf9': '', 'udf10': '' } """ the generate_hash funtion is use for genarating hash value from cleaned_data""" hash_o = utils.generate_hash(cleaned_data) context = { 'firstname': first_name, 'purl': purl, 'surl': surl, 'phone': order_obj.phone, 'key': key, 'hash': hash_o, 'curl': curl, 'furl': furl, 'txnid': str(txid), 'productinfo': order_obj.product_id, 'amount': tx_obj.payment_amount, 'email': order_obj.email, } print context transaction_raw = tx_obj.__dict__.copy() transaction_raw.update( {'order': o.__dict__ for o in tx_obj.order.all()}) transaction_raw.update(context) transaction_raw = { k: str(v) if type(v) != str else v for k, v in transaction_raw.items() } tx_obj.payment_request = str(transaction_raw) tx_obj.save() return render(request, "process-payment.html", context) else: total_item = 0 if request.user.is_authenticated(): cart_obj = Cart.objects.filter(user=request.user, listed_type="CART") products = [obj.product for obj in cart_obj] total_item = len(products) for i in cart_obj: i.active = False i.save() else: try: token = uuid.UUID(request.COOKIES.get("token")) cart_obj = Cart.objects.filter(token=token, listed_type="CART") products = [obj.product for obj in cart_obj] total_item = len(products) for i in cart_obj: i.active = False i.save() except TypeError: cart = [] if total_item > 0: AbMailUtils.send_email_of_order(request.user, products) context = {} return render(request, "offline-page-orderpaced.html", context)
def get(self): """ Given a long URL, returns a short URL. """ long_url = self.get_argument('longUrl', None) # decoded by Tornado. android_url = self.get_argument('androidUrl', None) # decoded by Tornado. android_fallback_url = self.get_argument('androidFallbackUrl', None) # decoded by Tornado. ios_url = self.get_argument('iosUrl', None) # decoded by Tornado. ios_fallback_url = self.get_argument('iosFallbackUrl', None) # decoded by Tornado. domain = self.get_argument('domain', self.settings['default_domain']) # Normalize and validate long_url. try: long_url = utils.normalize_url(long_url) assert utils.validate_url(long_url) is True if android_url: # TODO: Validate and normalize! pass # android_url = utils.normalize_url(android_url) # assert utils.validate_url(android_url) is True if android_fallback_url: android_fallback_url = utils.normalize_url( android_fallback_url) assert utils.validate_url(android_fallback_url) is True if ios_url: # TODO: Validate and normalize! pass # ios_url = utils.normalize_url(ios_url) # assert utils.validate_url(ios_url) is True if ios_fallback_url: ios_fallback_url = utils.normalize_url(ios_fallback_url) assert utils.validate_url(ios_fallback_url) is True except: logging.info('Wrong URL', exc_info=1) return self.finish({ 'status_code': 500, 'status_txt': 'INVALID_URI', 'data': [] }) # Validate domain. if not utils.validate_url('http://' + domain): return self.finish({ 'status_code': 500, 'status_txt': 'INVALID_ARG_DOMAIN', 'data': [] }) # Generate a unique hash, assemble short url and store result in Redis. url_hash = utils.generate_hash(self.application.redis, self.settings['redis_namespace'], self.settings['hash_salt']) short_url = 'http://' + domain + '/' + url_hash self.store_url(url_hash, long_url, android_url, android_fallback_url, ios_url, ios_fallback_url) # Return success response. data = { 'long_url': long_url, 'android_url': android_url, 'android_fallback_url': android_fallback_url, 'ios_url': ios_url, 'ios_fallback_url': ios_fallback_url, 'url': short_url, 'hash': url_hash, 'global_hash': url_hash } self.finish({'status_code': 200, 'status_txt': 'OK', 'data': data})
def run(): # Here handle submissions and run the analysis # Send emails on failures, success # Use redis to store user stats (hashed for privacy) if request.method == 'POST': # First things first, compute user hash req_id = generate_time_hash(request.remote_addr) # To avoid slow-downs in the running directory # create subdirs w/ the first 2 chars of the hash h2c = req_id[:2] try: os.mkdir(os.path.join(app.config['UPLOAD_FOLDER'], h2c)) except: pass # Prepare the working directory # Our hash scheme ensures that it should be unique wdir = os.path.join(app.config['UPLOAD_FOLDER'], h2c, req_id) wdir = os.path.abspath(wdir) os.mkdir(wdir) # Save input files draft = request.files['draft'] #if draft and allowed_file(draft.filename): if draft: filename = secure_filename(draft.filename) draft.save(os.path.join(wdir, filename)) dname = filename else: flash(u'Something went wrong with your draft genome', 'danger') return redirect(url_for('index')) # Save the genomes files genomes = set() try: for genome in request.files.getlist('genomes'): filename = secure_filename(genome.filename) genome.save(os.path.join(wdir, filename)) genomes.add(filename) except: flash(u'Something went wrong with your target genomes', 'danger') return redirect(url_for('index')) # Check email, hash it email = request.form['email'] if email: hemail = generate_hash(email) else: flash(u'Something went wrong with your email', 'danger') return redirect(url_for('index')) # Secure my results? passphrase = request.form['passphrase'] if passphrase: hpass = generate_hash(passphrase) else: hpass = None # In case of a passphrase, don't bother the current submitter session['req_id'] = req_id # Submit the job # Then redirect to the waiting page try: cmd = 'python tasks.py %s %s %s %s' % (req_id, wdir, dname, ' '.join(genomes)) f = open(os.path.join(wdir, 'cmd.sh'), 'w') f.write(cmd + '\n') f.close() cmd = 'at -q b -M now -f %s' % os.path.join(wdir, 'cmd.sh') proc = subprocess.Popen(cmd, shell=(sys.platform!="win32"), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out = proc.communicate() return_code = proc.returncode if return_code != 0: raise Exception('%s'%str(out[1])) except Exception as e: flash(u'Could not submit your job "%s"' % e, 'danger') return redirect(url_for('index')) try: # Send details to redis add_job(req_id, request.remote_addr, hemail, hpass) except Exception as e: flash(u'Could not save your job details (%s)' % e, 'danger') return redirect(url_for('index')) return redirect(url_for('results', req_id=req_id)) # No POST, return to start flash(u'No job details given, would you like to start a new one?', 'warning') return redirect(url_for('index'))
def _generate_name(self, name): return '{}.txt'.format(generate_hash(name))
def generate_hash(self, model_params, compute_params): return utils.generate_hash(model_params, compute_params)
config = configparser.ConfigParser() config.read('constants.cnf') cgitb.enable() form = cgi.FieldStorage() header = dict() header["title"] = "Activate" header["homeUrl"] = "http://localhost/register.py" header[ "navTopRight"] = '<li class="active"><a href="http://localhost/register.py">Register Now</a></li>' header["css_version"] = config.get("version", "css") footer = {"js_version": config.get("version", "css")} email = form.getvalue("email") value = form.getvalue("value") active = login(email)["active"] email_hash = generate_hash(email) if os.environ['REQUEST_METHOD'] == 'GET': print("Content-type: text/html\n") f = open('./template/header.html', encoding='utf-8') print(f.read() % header) f.close() if value == email_hash and not active: activate(email) print( "<p>Your account has been activated. Please log in with the password assigned to you</p>" ) elif active: print( "<p>Your account was already activated. Please log in with the password assigned to you</p>" )
def get_result(self, model_params, compute_params): if self.has_result(model_params, compute_params): hash_key = generate_hash(model_params, compute_params) return self.get_result_hash(hash_key)
def has_result(self, model_params, compute_params): response, objects = self.connection.get_container(self._container) objects = map(lambda x: x["name"], objects) hash_key = generate_hash(model_params, compute_params) return hash_key in objects
def save_result(self, model_params, compute_params, result): self.save_result_hash(generate_hash(model_params, compute_params), result)
def run(): # Here handle submissions and run the analysis # Send emails on failures, success # Use redis to store user stats (hashed for privacy) if request.method == 'POST': # First things first, compute user hash req_id = generate_time_hash(request.remote_addr) # To avoid slow-downs in the running directory # create subdirs w/ the first 2 chars of the hash h2c = req_id[:2] try: os.mkdir(os.path.join(app.config['UPLOAD_FOLDER'], h2c)) except: pass # Prepare the working directory # Our hash scheme ensures that it should be unique wdir = os.path.join(app.config['UPLOAD_FOLDER'], h2c, req_id) wdir = os.path.abspath(wdir) os.mkdir(wdir) # Save input files draft = request.files['draft'] #if draft and allowed_file(draft.filename): if draft: filename = secure_filename(draft.filename) draft.save(os.path.join(wdir, filename)) dname = filename else: flash(u'Something went wrong with your draft genome', 'danger') return redirect(url_for('index')) # Save the genomes files genomes = set() try: for genome in request.files.getlist('genomes'): filename = secure_filename(genome.filename) genome.save(os.path.join(wdir, filename)) genomes.add(filename) except: flash(u'Something went wrong with your target genomes', 'danger') return redirect(url_for('index')) # Check email, hash it email = request.form['email'] if email: hemail = generate_hash(email) else: flash(u'Something went wrong with your email', 'danger') return redirect(url_for('index')) # Secure my results? passphrase = request.form['passphrase'] if passphrase: hpass = generate_hash(passphrase) else: hpass = None # In case of a passphrase, don't bother the current submitter session['req_id'] = req_id # Submit the job # Then redirect to the waiting page try: cmd = 'python tasks.py %s %s %s %s' % (req_id, wdir, dname, ' '.join(genomes)) f = open(os.path.join(wdir, 'cmd.sh'), 'w') f.write(cmd + '\n') f.close() cmd = 'at -q b -M now -f %s' % os.path.join(wdir, 'cmd.sh') proc = subprocess.Popen(cmd, shell=(sys.platform != "win32"), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out = proc.communicate() return_code = proc.returncode if return_code != 0: raise Exception('%s' % str(out[1])) except Exception as e: flash(u'Could not submit your job "%s"' % e, 'danger') return redirect(url_for('index')) try: # Send details to redis add_job(req_id, request.remote_addr, hemail, hpass) except Exception as e: flash(u'Could not save your job details (%s)' % e, 'danger') return redirect(url_for('index')) return redirect(url_for('results', req_id=req_id)) # No POST, return to start flash(u'No job details given, would you like to start a new one?', 'warning') return redirect(url_for('index'))
def save_result(self, model_params, compute_params, result): self.save_result_hash( generate_hash(model_params, compute_params), result)
def generate_client_tgs_password(): print('Generating password between tgs and client') password = str(random.randint(0, 9999)) hashed_password = utils.generate_hash(password) return hashed_password
def generate_auth_service_tgs_password(): global auth_tgs_password print('Generating password between me and tgs') password = str(random.randint(0, 9999)) auth_tgs_password = utils.generate_hash(password) utils.persist_txt('auth_tgs', 'our_secret', auth_tgs_password)
def generate_password_hash(self): return generate_hash(self.password)
def run(): # Here handle submissions and run the analysis # Send emails on failures, success # Use redis to store user stats (hashed for privacy) if request.method == 'POST': # First things first, compute user hash req_id = generate_time_hash(request.remote_addr) # To avoid slow-downs in the running directory # create subdirs w/ the first 2 chars of the hash h2c = req_id[:2] try: os.mkdir(os.path.join(app.config['UPLOAD_FOLDER'], h2c)) except: pass # Prepare the working directory # Our hash scheme ensures that it should be unique wdir = os.path.join(app.config['UPLOAD_FOLDER'], h2c, req_id) wdir = os.path.abspath(wdir) os.mkdir(wdir) # Sanity check for form entries float_entries = ['maxtemp', 'mingc', 'mintemp', 'maxgc', 'opttemp', 'multitreshold', 'evalue', 'optgc'] int_entries = ['contigcoverage', 'minsize', 'threads', 'exclude', 'contiglength', 'optsize', 'flanksize', 'maxsize', 'minprod', 'maxprod', 'numN', 'hitlength', 'gcclamp'] bool_entries = ['pcr', 'blastn', 'non'] str_entries = ['email', 'jobname'] for f, entries in ((float, float_entries), (str, str_entries), (bool, bool_entries), (int, int_entries)): for e in entries: try: if e in request.form: f(request.form[e]) except Exception as e: flash(u'Something went wrong while processing your options %s'%e, 'danger') return redirect(url_for('index')) # Save input files draft = request.files['contigs'] if draft: filename = secure_filename(draft.filename) draft.save(os.path.join(wdir, filename)) dname = filename else: flash(u'Forgot to provide your draft genome?', 'danger') return redirect(url_for('index')) # Save the genomes files genomes = set() try: for genome in request.files.getlist('reference'): filename = secure_filename(genome.filename) genome.save(os.path.join(wdir, filename)) genomes.add(filename) except: flash(u'Forgot to provide your reference genome?', 'danger') return redirect(url_for('index')) # Save the genomes files ptts = request.files['pttfile'] if ptts: try: for ptt in request.files.getlist('pttfile'): filename = secure_filename(ptt.filename) ptt.save(os.path.join(wdir, filename)) except: flash(u'Something went wrong with your ptt files', 'danger') return redirect(url_for('index')) # Check email, hash it email = request.form['email'] if email: hemail = generate_hash(email) else: flash(u'Something went wrong with your email', 'danger') return redirect(url_for('index')) # Submit the job # Then redirect to the waiting page try: # Handle the bool flags if 'non' in request.form:non = True else:non = False if 'pcr' in request.form:pcr = True else:pcr = False if 'inner' in request.form:inner = True else:inner = False if 'blastn' in request.form:blastn = True else:blastn = False evalue=request.form.get('evalue', 1e-20) contiglength=request.form.get('contiglength', 1000) contigcoverage=request.form.get('contigcoverage', 20) hitlength=request.form.get('hitlength', 1100) multitreshold=request.form.get('multitreshold', 1.5) non=non numN=request.form.get('numN', 100) pcr=pcr inner=inner blastn=blastn threads=request.form.get('threads', 1) optsize=request.form.get('optsize', 20) minsize=request.form.get('minsize', 18) maxsize=request.form.get('maxsize', 27) opttemp=request.form.get('opttemp', 60) mintemp=request.form.get('mintemp', 57) maxtemp=request.form.get('maxtemp', 63) flanksize=request.form.get('flanksize', 1000) minprod=request.form.get('minprod', 1000) maxprod=request.form.get('maxprod', 7000) optgc=request.form.get('optgc', 50) mingc=request.form.get('mingc', 20) maxgc=request.form.get('maxgc', 80) gcclamp=request.form.get('gcclamp', 1) exclude=request.form.get('exclude', 100) jobname=request.form.get('jobname', 'CONTIGuator job') cmd = 'python tasks.py %s %s %s ' % (req_id, wdir, dname) cmd += '%s %s %s %s ' % (evalue, contiglength, contigcoverage, hitlength) cmd += '%s %s %s %s %s %s ' % (multitreshold, non, numN, pcr, inner, blastn) cmd += '%s %s %s %s %s %s ' % (threads, optsize, minsize, maxsize, opttemp, mintemp) cmd += '%s %s %s %s %s %s %s ' % (maxtemp, flanksize, minprod, maxprod, optgc, mingc, maxgc) cmd += '%s %s "%s" ' % (gcclamp, exclude, jobname) cmd += ' '.join(genomes) f = open(os.path.join(wdir, 'cmd.sh'), 'w') f.write(cmd + '\n') f.close() cmd = 'at -q b -M now -f %s' % os.path.join(wdir, 'cmd.sh') proc = subprocess.Popen(cmd, shell=(sys.platform!="win32"), stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out = proc.communicate() return_code = proc.returncode if return_code != 0: raise Exception('%s'%str(out[1])) except Exception as e: flash(u'Could not submit your job "%s"' % e, 'danger') return redirect(url_for('index')) try: # Send details to redis add_job(req_id, request.remote_addr, hemail) except Exception as e: flash(u'Could not save your job details (%s)' % e, 'danger') return redirect(url_for('index')) return redirect(url_for('results', req_id=req_id)) # No POST, return to start flash(u'No job details given, would you like to start a new one?', 'warning') return redirect(url_for('index'))
def update_airflow_image(): image = build_image() tag_and_push_to_ecr(image, IMAGE_TAG) hash_tag = generate_hash(16) tag_and_push_to_ecr(image, hash_tag)
def command_handler(self, command): commands = command.split() if commands[0] == 'get': key = commands[1] try: value = self.data_table.get(key) logging.info( f"request key:{key[:10]}'s value is {value}, stored in {self.address}" ) except ValueError: # 먼저, 해당하는 finger table이 살아있는지, 죽었으면 그 밑에 노드로 계속 보내는 health check 작업이 필요하다. nearest_node = self.node_table.find_nearest_alive_node(key) data_request(self.node_table.cur_node, nearest_node, Data(key, ""), d.get) elif commands[0] == 'set': key, value = commands[1].split(":") key = generate_hash(key) # 만약 자기 자신에 넣어야 하면 cur_key = self.node_table.cur_node.key successor_key = self.node_table.finger_table.entries[ n.successor].key # 만약 자기 자신에 넣을 수 있으면 자기 자신에 넣음 if cur_key <= key < successor_key or successor_key < cur_key <= key or key < successor_key < cur_key: self.data_table.set(key, value) logging.info( f"request key:{key[:10]}'s value is set to {value}, stored in {self.address}" ) # 아닐 경우 살아있는 가장 가까운 노드를 찾아서 넣음 else: nearest_node = self.node_table.find_nearest_alive_node(key) data_request(self.node_table.cur_node, nearest_node, Data(key, value), d.set) elif commands[0] == 'delete': key = commands[1] try: self.data_table.delete(key) logging.info( f"request key:{key[:10]} is deleted from {self.address}") except ValueError: nearest_node = self.node_table.find_nearest_alive_node(key) data_request(self.node_table.cur_node, nearest_node, Data(key, ""), d.delete) elif commands[0] == 'join': toss_message(self.node_table.cur_node, Data("", commands[1]), t.join_node) logging.info(f"finishing join node, will update finger table...") toss_message(self.node_table.cur_node, self.node_table.finger_table.entries[n.successor], t.finger_table_setting, 1, t.join_node) elif commands[0] == 'disjoin': self.server.stop(0) # HealthCheck를 못 받게 모든 서버 종료 self.node_table.stop_flag = True # health check 송신 종료 logging.info( 'Waiting for other nodes to update their finger tables') time.sleep(10) # 다른 Node들이 FingerTable을 업데이트할때까지 대기 for entry in self.data_table.entries: threading.Thread(target=data_request, args=(self.node_table.cur_node, self.node_table.predecessor, entry, d.set)).start() toss_message(self.node_table.cur_node, self.node_table.finger_table.entries[n.successor], t.finger_table_setting, 1, t.left_node) elif commands[0] == 'show': # 노드 테이블 정보 출력하는 기능 추가 self.node_table.log_nodes() elif commands[0] == 'summary': self.data_table.summary() elif commands[0] == 'ft_update': toss_message(self.node_table.cur_node, self.node_table.finger_table.entries[0], t.finger_table_setting, 1)
from utils import generate_hash, walk_directory DB_NAME = 'test.db' if os.path.exists(DB_NAME): os.remove(DB_NAME) ds = Datastore(DB_NAME) ds.clear() print 'Can a Node be added to the Datastore and increase the row count?' node = Node('testdata/hello.txt') ds.add_or_update_node(node) assert(ds.num_items() == 1) print 'PASSED\n' print 'Does a file Node generate an expected hash?' assert(generate_hash(node) == '60fde9c2310b0d4cad4dab8d126b04387efba289') print 'PASSED\n' print 'If we run the Node\'s update_info() method do we see valid attributes set on the Node?' node.update_info(ds) assert(node.hash == '60fde9c2310b0d4cad4dab8d126b04387efba289') assert(node.bytes == 14) assert(node.type == 'f') assert(node.permissions > 0) assert(len(node.created) == 19) assert(len(node.modified) == 19) assert(len(node.last_seen) == 19) print 'PASSED\n' print 'Can the Datastore add all the Nodes of a given directory?' ds.add_all_nodes_from_dir('testdata')