def generate_response(intent, order_id, amount): status = intent['status'] db_handler = DBHandler() if status == 'requires_action' or status == 'requires_source_action': # Card requires authentication print(status) return jsonify({ 'requiresAction': True, 'paymentIntentId': intent['id'], 'clientSecret': intent['client_secret'] }) elif status == 'requires_payment_method' or status == 'requires_source': # Card was not properly authenticated, suggest a new payment method print(status) # db_handler.registerPayment(order_id, amount) return jsonify({ 'error': 'Your card was denied, please provide a new payment method' }) elif status == 'succeeded': # Payment is complete, authentication not required # To cancel the payment you will need to issue a Refund (https://stripe.com/docs/api/refunds) print("💰 Payment received!") db_handler.registerPayment(order_id, amount) return jsonify({'clientSecret': intent['client_secret']})
def send_verification(): try: data = request.get_json() totalAmount, order_id = calculate_order_amount(data['items'][0]['id']) db_handler = DBHandler() db_handler.registerPayment(order_id, totalAmount) return jsonify({'totalAmount': totalAmount}) except stripe.error.CardError as e: print(e.user_message) return jsonify({'error': e.user_message})
def __init__(self, port): self.BACKLOG = 1024 # size of the queue for pending connections self.ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) self.ssl_context.load_cert_chain('ssl/certificate.crt', 'ssl/private.key') s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind(('', port)) # Bind to the port self.s = self.ssl_context.wrap_socket(s, server_side=True) self.db_handler = DBHandler() self.logger = self.configure_logging()
def calculate_order_amount(job_id): # Replace this constant with a calculation of the order's amount # Calculate the order total on the server to prevent # people from directly manipulating the amount on the client """get date time from db calculate total amount of elapsed time""" db_handler = DBHandler() order_id = db_handler.getOrderId(job_id) start, end, hourlyPrice = db_handler.getOrderBillingInfo(order_id) start = datetime.strptime(start, '%Y-%m-%d %H:%M:%S') end = datetime.strptime(end, '%Y-%m-%d %H:%M:%S') hour = (end - start).seconds // 3600 minutes = (end - start).seconds // 60 % 60 price = hourlyPrice * (minutes / 60 + hour) price = price * 1.029 + 0.5 return int(price * 100), order_id
def __init__(self,num): self.db = DBHandler() self.machineNumbers = num self.count = 0 self.config = ConfigParser.ConfigParser() self.config.read('config.properties') self.cryptTool = CryptoTool()
def __init__(self, app=None): super(PostHandler, self) self.handlers = {'/submit': self.handleSubmit, \ '/diff': self.handleDiff, \ '/savedraft': self.handleSaveDraft, \ '/loadone': self.handleLoadOne, \ '/status': self.handleStatus, \ '/submissions': self.handleSubmissions, \ '/logout': self.handleLogout, \ '/login': self.handleLogin } self.app = app or current_app self.dbHandler = DBHandler(app) self.fileSystemHandler = FileSystemHandler(app) if app is not None: self.init_app(app)
class TestHander(unittest.TestCase): dbHandler = None; fileHandler = None; def setUp(self): self.dbHandler = DBHandler() self.fileHandler = FileHandler(5) def testFileHanderSave(self,url = 'aa'): path = '/tmp/license.txt' import os if not os.path.isfile(path): os.mknod(path) fileId,physicalName = self.fileHandler.save(path,url) return fileId,physicalName def testGetFilePathByPhysicalName(self): fileId,physicalName = self.testFileHanderSave() path = self.fileHandler.getFilePathByDiskName(physicalName) print 'path :'+ path def testGetFiletypeByURL(self): url = 'http://www.abc.com' self.testFileHanderSave(url) print 'file type is :' + self.fileHandler.getTypeByUrl(url) def testGetFileByID(self): fileId,physicalName = self.testFileHanderSave() opendFile = self.fileHandler.getFileByID(fileId) def testGetFiletypeByID(self): fileId,physicalName = self.testFileHanderSave() print self.fileHandler.getTypeByID(fileId) def testGetTotalFileCount(self): num = self.fileHandler.getTotalFileCount() print "file count : " + str(num) def testExecute(self): sql = 'select * from files' rows = self.dbHandler.excute(sql)
def main(): parser = argparse.ArgumentParser(description='Process GitHub issue records and record to SQLite database') parser.add_argument('-t','--token', help = 'Token for authorization') parser.add_argument('-db','--database',default='', help='specify db filename') args = parser.parse_args() dbFile = args.database token = args.token dbHandler = DBHandler(dbFile) dbHandler.createTables() createUserMap() with open("repos.txt",'r') as f: repos = f.readlines() for repo in repos: extractRepoData(token, repo.strip(), dbHandler) dbHandler.populateUserGroups(User_group_map) dbHandler.closeConnection()
class Authentication(object): def __init__(self): self.HASH_ITERATIONS = 100000 self.HASH_ALGO = 'sha512' self.db_handler = DBHandler() def register_user(self, email, password, username): if self.db_handler.checkEmailAvailability(email): user_id = self.generate_user_id() pswd = self.__hash_password(password) self.db_handler.registerUser(user_id, email, pswd, username) authToken = self.generate_auth_token() self.db_handler.addAuthToken(user_id, authToken) return (user_id, authToken) else: return 1 def sign_in_user(self, email, password, uid): authToken = self.db_handler.getAuthToken(email) if authToken: return authToken else: stored_password = self.db_handler.getStoredPasswordHash(email) if self.__verify_password(stored_password, password): authToken = self.generate_auth_token() self.db_handler.addAuthToken(uid, authToken) return authToken else: return 1 def __hash_password(self, password): salt = hashlib.sha256(os.urandom(60)).hexdigest().encode('ascii') pwdhash = hashlib.pbkdf2_hmac(self.HASH_ALGO, password.encode('utf-8'), salt, self.HASH_ITERATIONS) pwdhash = binascii.hexlify(pwdhash) return (salt + pwdhash).decode('ascii') def __verify_password(self, stored_password, provided_password): salt = stored_password[:64] stored_password = stored_password[64:] pwdhash = hashlib.pbkdf2_hmac(self.HASH_ALGO, provided_password.encode('utf-8'), salt.encode('ascii'), self.HASH_ITERATIONS) pwdhash = binascii.hexlify(pwdhash).decode('ascii') return pwdhash == stored_password def generate_user_id(self): usrid = int(random.random()*900000)+100000 while not self.db_handler.checkUserIdAvailability(usrid): usrid = int(random.random()*900000)+100000 return usrid def generate_auth_token(self): chars = string.ascii_letters + string.digits token = ''.join(random.choice(chars) for i in range(13)) while not self.db_handler.checkAuthTokenAvailability(token) or not self.db_handler.checkAuthTokenBList(token): token = ''.join(random.choice(chars) for i in range(13)) return token
def __init__(self): self.HASH_ITERATIONS = 100000 self.HASH_ALGO = 'sha512' self.db_handler = DBHandler()
def main(): inputFilePath = 'all-measles-rates.csv' dbPath = 'measlesData.db' dbfile = Path(dbPath) dbfile.touch(exist_ok=True) dbh = DBHandler(dbPath) print( 'Welcome to my MMR data analysis program. Please wait while the database is constructed.' ) with dbh: #dbh.resetDB() dbh.createTables() populateDBFromFile(dbh, inputFilePath) running = True while running: userin = input( 'Please enter "US", "State", "SchoolType", or "Exit": ').lower( ) if userin == 'us': print('National overall rate: ' + str(dbh.getOverallImmunizationRate())) for schooltype in dbh.getAllSchoolTypes(): if schooltype == None: continue print('National ' + schooltype + ' school rate: ' + str( dbh.getOverallImmunizationRatePerSchoolType( schooltype))) elif userin == 'state': userin = input('Please choose a state: ').lower().title() if userin not in dbh.getAllStates(): print('No data exists for state ' + userin) else: print(userin + ' overall rate: ' + str(dbh.getStateImmunizationRate(userin))) for schooltype in dbh.getAllSchoolTypes(): if schooltype == None: continue print(userin + ' ' + schooltype + ' school rate: ' + str( dbh.getStateImmunizationRatePerSchoolType( userin, schooltype))) elif userin == 'schooltype': userin = input( 'Please choose a school type: ').lower().capitalize() if userin not in dbh.getAllSchoolTypes(): print('No data exists for school type ' + userin) else: print('National ' + userin + ' school rate: ' + str( dbh.getOverallImmunizationRatePerSchoolType(userin))) elif userin == 'exit': running = False else: print('Unrecognized input.') print('Goodbye!')
class Server: def __init__(self, port): self.BACKLOG = 1024 # size of the queue for pending connections self.ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) self.ssl_context.load_cert_chain('ssl/certificate.crt', 'ssl/private.key') self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.s.bind(('', port)) # Bind to the port self.s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.s = self.ssl_context.wrap_socket(self.s, server_side=True) self.db_handler = DBHandler() self.auth = Authentication() self.logger = self.configure_logging() def run(self): self.s.listen(self.BACKLOG) # Now wait for client connection. self.logger.info('Server up and running.\n') while True: try: conn, addr = self.s.accept() except Exception as e: self.logger.error('Error in accepting request:' + str(e)) continue try: Thread(target=self.serve_client, args=(conn, addr)).start() except: self.refuse_client(conn, addr) self.logger.error( f'Couldn\'t create thread. Refused client at {addr}') def configure_logging(self): logger = logging.getLogger('Server.logger') logger.setLevel(logging.INFO) currentDT = str(datetime.datetime.now()).replace(' ', '_') format_ = logging.Formatter( '%(asctime)s %(name)-12s %(levelname)-8s %(message)s') # create log fle handler file_handler = logging.FileHandler('logs/logfile_' + currentDT) file_handler.setLevel(logging.INFO) file_handler.setFormatter(format_) # create console handler with a higher log level console_handler = logging.StreamHandler() console_handler.setLevel(logging.WARNING) console_handler.setFormatter(format_) logger.addHandler(file_handler) logger.addHandler(console_handler) logger.info('begin log') return logger def serve_client(self, conn, addr): self.logger.info( f'Thread {threading.get_ident()} initialized to server request from {addr}' ) req_pipe = Messaging(conn, addr) req_pipe.read() if not req_pipe.jsonheader or not req_pipe.request: self.logger.warning(f'invalid request from {addr}.') else: # self.authHandler = Authentication() if req_pipe.request.get('request-type') == 'sign-in': self.sign_in_user(req_pipe, conn, addr) elif req_pipe.request.get('request-type') == 'sign-up': self.register_user(req_pipe, conn, addr) else: if 'authToken' not in req_pipe.request or 'request-type' not in req_pipe.request: self.logger.warning(f'invalid request from {addr}.') response_content = { 'status': 'error', 'error-msg': 'invalid request. check that you have authToken and request-type in the request', } req_pipe.write(response_content, 'text/json') else: authToken = req_pipe.request.get('authToken') if (self.db_handler.checkAuthToken(authToken)): uid = self.db_handler.getUserIdFromAuthToken(authToken) if req_pipe.request.get('request-type') == 'sign-out': self.sign_out_user(req_pipe, conn, addr, uid) else: if 'role' not in req_pipe.request: self.logger.warning( f'invalid request from {addr}.') response_content = { 'status': 'error', 'error-msg': 'invalid request. check that you have authToken, role and request-type in the request', } req_pipe.write(response_content, 'text/json') return if req_pipe.request.get('role') == 'renter': self.serve_renter_request( req_pipe, conn, addr, uid) elif req_pipe.request.get('role') == 'leaser': self.serve_leaser_request( req_pipe, conn, addr, uid) else: # log error, send error msg self.logger.warning( f'invalid request from {addr}: no/invalid credentials' ) def register_user(self, req_pipe, conn, addr): request = req_pipe.request if 'email' not in request or 'password' not in request or 'user-type' not in request or 'machine-chars' not in request: self.logger.warning( f'could not sign up: missing field(s) in a sign up request from {addr}.' ) response_content = { 'status': 'error', 'error-msg': 'could not sign up: some field(s) missing' } req_pipe.write(response_content, 'text/json') else: email, pswd, username, usr_type, chars = request['email'], request[ 'password'], request['username'], request[ 'user-type'], request['machine-chars'] res = self.auth.register_user(email, pswd, username) if res == 1: self.logger.warning('could not sign up: email already in use') response_content = { 'status': 'error', 'error-msg': 'could not sign up: email already in use' } req_pipe.write(response_content, 'text/json') else: user_id, authToken = res self.logger.info(f'successfully registered user {user_id}') response_content = { 'status': 'success', 'user-id': user_id, 'authToken': authToken, 'username': username # 'user-type': usr_type } req_pipe.write(response_content, 'text/json') def sign_in_user(self, req_pipe, conn, addr): request = req_pipe.request if 'email' not in request or 'password' not in request: self.logger.warning( f'could not sign in: missing field(s) in the request from {addr}.' ) response_content = { 'status': 'error', 'error-msg': 'could not sign in: some field(s) missing' } req_pipe.write(response_content, 'text/json') else: email, pswd = request['email'], request['password'] res = self.db_handler.getUserInfo(email) if res is None: self.logger.warning( f'could not sign in: email not registered.') response_content = { 'status': 'error', 'error-msg': 'bad credentials' } req_pipe.write(response_content, 'text/json') else: user_id, username, user_type = res authToken = self.auth.sign_in_user(email, pswd, user_id) if authToken == 1: self.logger.warning( f'could not sign in: bad credentials from {addr}.') response_content = { 'status': 'error', 'error-msg': 'bad credentials' } req_pipe.write(response_content, 'text/json') else: leasing_status = self.db_handler.getLeasingStatus(user_id) self.logger.info(f'successful sign in. uid: {user_id}') response_content = { 'status': 'success', 'authToken': authToken, 'user-type': user_type, 'username': username, 'leasing-status': leasing_status } req_pipe.write(response_content, 'text/json') def sign_out_user(self, req_pipe, conn, addr, uid): self.db_handler.removeAuthToken(uid) self.logger.info(f'logged out user {uid}.') response_content = { 'status': 'success', } req_pipe.write(response_content, 'text/json') def refuse_client(self, conn, addr): req_pipe = Messaging(conn, addr) req_pipe.read() response_content = { 'status': 'error', 'error-msg': 'Server busy, can\'t serve at the time.' } req_pipe.write(response_content, 'text/json') def serve_renter_request(self, req_pipe, conn, addr, uid): header, request_content = req_pipe.jsonheader, req_pipe.request if request_content['request-type'] == 'get-job-statuses': self.logger.info( f'connection: leaser from {addr}; request type: get-job-statuses' ) job_statuses = self.db_handler.getJobStatuses(uid) response_content = { 'status': 'success', 'statuses': job_statuses, } req_pipe.write(response_content, 'text/json') self.logger.info(f'job statuses sent to renter at {addr}') elif request_content['request-type'] == 'get-job-status': self.logger.info( f'connection: leaser from {addr}; request type: get-job-status' ) job_id = request_content['job-id'] job_status = self.db_handler.getJobStatus(job_id) response_content = { 'status': 'success', 'job-status': job_status, } req_pipe.write(response_content, 'text/json') self.logger.info(f'job status sent to renter at {addr}') elif request_content['request-type'] == 'executable-upload-permission': self.logger.info( f'connection: renter {uid} from {addr}; request type: executable-upload-permission' ) job_id = self.generate_job_id() # some unique id generator db_token = self.generate_db_token() job_type = request_content['file-type'] file_size = request_content['file-size'] job_description = request_content['job-description'] response_content = { 'status': 'success', 'db-token': db_token, 'job-id': job_id } req_pipe.write(response_content, 'text/json') # add job to DB self.db_handler.addJob(uid, job_id, job_type, file_size, db_token, job_description, status='xtbu') self.logger.info( f'issued permission to renter {uid} to submit job {job_id} via token {db_token}' ) elif request_content['request-type'] == 'get-available-leasers': self.logger.info( f'connection: renter from {addr}; request type: get-available-leasers' ) available_leasers = self.db_handler.queryLeasers(status='a') response_content = { 'status': 'success', 'leasers': available_leasers, } req_pipe.write(response_content, 'text/json') self.logger.info(f'available leasers sent to renter at {addr}') elif request_content['request-type'] == 'submit-job-order': self.logger.info( f'connection: renter from {addr}; request type: submit-job-order' ) job_id = request_content['job-id'] job_mode = request_content['job-mode'] leaser_username = request_content['leaser'] job_description = request_content['job-description'] order_id = self.generate_order_id() leaser_id = self.db_handler.getUserId(leaser_username) price = self.db_handler.getLeaserPrice(leaser_id) self.db_handler.submitJobOrder(order_id, uid, job_id, job_description, job_mode, leaser_id, price, status='p') response_content = {'status': 'success', 'order-id': order_id} req_pipe.write(response_content, 'text/json') self.logger.info( f'job order ({job_id}) successfully submitted (from {uid} to {leaser_id})' ) elif request_content['request-type'] == 'get-payment-verification': self.logger.info( f'connection: renter from {addr}; request type: get-payment-verification' ) job_id = request_content['job-id'] order_id = self.db_handler.getOrderId(job_id) status = self.db_handler.get_payment_verification(order_id) if status: response_content = { 'status': 'success', 'payment-status': 'verified' } else: response_content = { 'status': 'success', 'payment-status': 'not verified' } req_pipe.write(response_content, 'text/json') elif request_content['request-type'] == 'output-download-permission': self.logger.info( f'connection: renter from {addr}; request type: output-download-permission' ) if 'job-id' not in request_content: response_content = { 'status': 'error', 'error-msg': 'no job id provided' } req_pipe.write(response_content, 'text/json') return requested_job_id = request_content['job-id'] user_submitted_jobs = self.db_handler.getUserJobs( uid, status='f') # TODO optimize this if requested_job_id not in user_submitted_jobs: response_content = { 'status': 'error', 'error-msg': 'not your job' } req_pipe.write(response_content, 'text/json') self.logger.warning( f'couldn\'t issue permission to renter {uid} to download output of job {requested_job_id}: job doesn\'t belong to this user' ) return job_is_finished = self.db_handler.isFinished(requested_job_id) requested_token = self.db_handler.getOutputToken(requested_job_id) if job_is_finished and requested_token: file_size = self.db_handler.getOutputFileSize(requested_job_id) response_content = { 'status': 'success', 'file-size': file_size, 'db-token': requested_token } req_pipe.write(response_content, 'text/json') self.logger.info( f'issued permission to renter {uid} to download output of job {requested_job_id} via token {requested_token}' ) else: response_content = { 'status': 'error', 'error-msg': 'no output files found for this job id' } req_pipe.write(response_content, 'text/json') self.logger.warning( f'couldn\'t issue permission to renter {uid} to download output of job {requested_job_id}: no output files for this job' ) else: self.logger.warning( f'connection: renter {uid} from {addr}; request type: invalid') response_content = { 'status': 'error', 'error-msg': 'unable to serve request. unknown request type' } req_pipe.write(response_content, 'text/json') def serve_leaser_request(self, req_pipe, conn, addr, uid): header, request_content = req_pipe.jsonheader, req_pipe.request if request_content['request-type'] == 'get-job-status': self.logger.info( f'connection: leaser from {addr}; request type: get-job-status' ) job_id = request_content['job-id'] job_status = self.db_handler.getJobStatus(job_id) response_content = { 'status': 'success', 'job-status': job_status, } req_pipe.write(response_content, 'text/json') self.logger.info(f'job status sent to renter at {addr}') elif request_content['request-type'] == 'mark-available': self.logger.info( f'connection: leaser from {addr}; request type: mark-available' ) oneliner = request_content['oneline-machine-info'] full_machine_info = request_content['full-machine-info'] hourly_price = request_content['price'] self.db_handler.markAvailable(uid, oneliner, full_machine_info, hourly_price) response_content = { 'status': 'success', } req_pipe.write(response_content, 'text/json') self.logger.info(f'leaser {uid} marked available') elif request_content['request-type'] == 'mark-unavailable': self.logger.info( f'connection: leaser from {addr}; request type: mark-unavailable' ) self.db_handler.markUnavailable(uid) response_content = { 'status': 'success', } req_pipe.write(response_content, 'text/json') self.logger.info(f'leaser {uid} marked unavailable') elif request_content['request-type'] == 'get-job-requests': self.logger.info( f'connection: leaser from {addr}; request type: get-job-requests' ) job_requests = self.db_handler.getJobRequests(uid) response_content = { 'status': 'success', 'jobs': job_requests, } req_pipe.write(response_content, 'text/json') self.logger.info(f'job requests sent to leaser at {addr}') elif request_content['request-type'] == 'decline-job-order': self.logger.info( f'connection: leaser from {addr}; request type: decline-job-order' ) order_id = request_content['order-id'] self.db_handler.updateJobOrderStatus(order_id, 'd') response_content = { 'status': 'success', } req_pipe.write(response_content, 'text/json') self.logger.info(f'leaser {uid} declined order {order_id}') elif request_content['request-type'] == 'accept-job-order': self.logger.info( f'connection: leaser from {addr}; request type: accept-job-order' ) if 'order-id' not in request_content: response_content = { 'status': 'error', 'error-msg': 'no order id provided' } req_pipe.write(response_content, 'text/json') return order_id = request_content['order-id'] job_id = self.db_handler.getOrderJobId(order_id) requested_token = self.db_handler.getExecfileToken(job_id) if requested_token: file_size = self.db_handler.getJobFileSize(job_id) response_content = { 'status': 'success', 'file-size': file_size, 'db-token': requested_token } req_pipe.write(response_content, 'text/json') now = datetime.datetime.now() formatted_date = now.strftime('%Y-%m-%d %H:%M:%S') self.db_handler.setExecStartTime(order_id, formatted_date) self.db_handler.updateJobOrderStatus(order_id, 'x') self.logger.info( f'leaser {uid} accepted order #{order_id}. given permission to download {job_id} via token {requested_token}' ) else: response_content = { 'status': 'error', 'error-msg': 'no files found for this order' } req_pipe.write(response_content, 'text/json') self.logger.warning( f'leaser {uid} couldn\'t accept to download executable of job {job_id}: no files for this job' ) elif request_content['request-type'] == 'output-upload-permission': self.logger.info( f'connection: leaser from {addr}; request type: output-upload-permission' ) job_id = request_content['job-id'] file_size = request_content['file-size'] db_token = self.generate_db_token() # so that leaser can upload the output file self.db_handler.addOutputFileToken(job_id, db_token, file_size) self.db_handler.changeJobStatus(job_id, 'otbu') response_content = {'status': 'success', 'db-token': db_token} req_pipe.write(response_content, 'text/json') now = datetime.datetime.now() formatted_date = now.strftime('%Y-%m-%d %H:%M:%S') self.db_handler.setExecFinishTime( self.db_handler.getOrderId(job_id), formatted_date) # TODO tie this job id to its corresponding token so that the file can be accessed knowing job id self.logger.info( f'issued permission to leaser {uid} to upload output of job {job_id} via token {db_token}' ) else: self.logger.warning( f'connection: leaser from {addr}; request type: invalid') response_content = { 'status': 'error', 'error-msg': 'unable to serve request. unknown request type' } req_pipe.write(response_content, 'text/json') def sendmail(self, receiver_email, message): port = 587 # For starttls smtp_server = "smtp.gmail.com" sender_email = "*****@*****.**" password = "******" """\ Subject: no-reply Rendt """ context = ssl.create_default_context() with smtplib.SMTP(smtp_server, port) as server: server.ehlo() # Can be omitted server.starttls(context=context) server.ehlo() # Can be omitted server.login(sender_email, password) server.sendmail(sender_email, receiver_email, message) def generate_db_token(self): token = int(random.random() * 90000) + 10000 while not self.db_handler.checkDBTokenAvailability(token): token = int(random.random() * 90000) + 10000 return token def generate_job_id(self): job_id = int(random.random() * 9000000) + 1000000 while not self.db_handler.checkJobIdAvailability(job_id): job_id = int(random.random() * 9000000) + 1000000 return job_id def generate_order_id(self): order_id = int(random.random() * 9000000) + 1000000 while not self.db_handler.checkOrderIdAvailability(order_id): order_id = int(random.random() * 9000000) + 1000000 return order_id def shutdown_server(self): self.s.close() self.logger.critical('Server shut down.')
from flask import jsonify from base64 import decodestring from flask import request import logging import json from dbHandler import DBHandler from image_processor import ImageProccessor app = Flask(__name__) dbHandler = DBHandler() image_processor = ImageProccessor() logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s') # DEFINES THE ImageProcessor VOTING \ RULING ALGO ############## vote_algo = 'Vote1' rule = 'Majority' image_processor_debug_mode = 'Debug' ############################################################# # stores a photo that is uploaded from camera unit @app.route('/store/<id>', methods=['GET', 'POST']) def store_photo(id): logging.debug('<Photo POST recieved>') photo = request.get_json()['photo_data'] roi = dbHandler.getROIcoord(id) image_processor.Process(int(id), photo, roi, vote_algo, rule)
class Storage: def __init__(self, port): self.BACKLOG = 1024 # size of the queue for pending connections self.ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) self.ssl_context.load_cert_chain('ssl/certificate.crt', 'ssl/private.key') s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind(('', port)) # Bind to the port self.s = self.ssl_context.wrap_socket(s, server_side=True) self.db_handler = DBHandler() self.logger = self.configure_logging() def run(self): self.s.listen(self.BACKLOG) # Now wait for client connection. self.logger.info('Storage up and running.\n') while True: try: conn, addr = self.s.accept() except Exception as e: self.logger.error('Error in accepting request:' + str(e)) continue try: Thread(target=self.serve_client, args=(conn, addr)).start() except: self.refuse_client(conn, addr) self.logger.error( f'Couldn\'t create thread. Refused client at {addr}') def configure_logging(self): logger = logging.getLogger('Storage.logger') logger.setLevel(logging.INFO) currentDT = str(datetime.datetime.now()).replace(' ', '_') format_ = logging.Formatter( '%(asctime)s %(name)-12s %(levelname)-8s %(message)s') # create log fle handler file_handler = logging.FileHandler('logs/logfile_' + currentDT) file_handler.setLevel(logging.INFO) file_handler.setFormatter(format_) # create console handler with a higher log level console_handler = logging.StreamHandler() console_handler.setLevel(logging.WARNING) console_handler.setFormatter(format_) logger.addHandler(file_handler) logger.addHandler(console_handler) logger.info('begin log') return logger def serve_client(self, conn, addr): self.logger.info( f'Thread {threading.get_ident()} initialized to server request from {addr}' ) req_pipe = Messaging(conn, addr) req_pipe.read() if not req_pipe.jsonheader or not req_pipe.request or 'role' not in req_pipe.request or 'request-type' not in req_pipe.request: self.logger.warning(f'invalid request from {addr}.') elif req_pipe.request.get('role') == 'renter': self.serve_renter_request(req_pipe, conn, addr) elif req_pipe.request.get('role') == 'leaser': self.serve_leaser_request(req_pipe, conn, addr) def refuse_client(self, conn, addr): req_pipe = Messaging(conn, addr) req_pipe.read() response_content = { 'status': 'error', 'error-msg': 'Storage server busy, can\'t serve at the time.' } req_pipe.write(response_content, 'text/json') def serve_renter_request(self, req_pipe, conn, addr): header, request_content = req_pipe.jsonheader, req_pipe.request if 'db-token' not in request_content: response_content = { 'status': 'error: no/invalid token provided', } req_pipe.write(response_content, 'text/json') self.logger.warning( f'invalid connection from retner at {addr[0]}: no/invalid token' ) return client_db_token = request_content['db-token'] if request_content['request-type'] == 'executable-upload': self.logger.info( f'connection: renter from {addr}; request type: executable-upload' ) job_id = self.db_handler.getJobIdFromToken(client_db_token, 'x') st = self.recv_file(conn, f'jobs/toexec{job_id}.zip') if st: self.db_handler.changeJobStatus(job_id, 'a') self.logger.info( f'successfully received exec files for job {job_id} from renter {addr[0]}' ) else: self.db_handler.changeJobStatus(job_id, 'uf') self.logger.info( f'received invalid exec files for job {job_id} from renter {addr[0]}' ) return elif request_content['request-type'] == 'output-download': self.logger.info( f'connection: renter from {addr}; request type: output-download' ) job_id = self.db_handler.getJobIdFromToken(client_db_token, 'o') requested_file_path = f'outputs/output{job_id}.zip' if os.path.exists(requested_file_path): self.send_file(conn, requested_file_path) self.logger.info( f'successfully sent output file for job {job_id} to renter {addr[0]}' ) return # TODO handle error def serve_leaser_request(self, req_pipe, conn, addr): header, request_content = req_pipe.jsonheader, req_pipe.request if 'db-token' not in request_content: response_content = { 'status': 'error: no/invalid token provided', } req_pipe.write(response_content, 'text/json') self.logger.warning( f'invalid connection from leaser at {addr[0]}: no/invalid token' ) return client_db_token = request_content['db-token'] if request_content['request-type'] == 'executable-download': self.logger.info( f'connection: leaser from {addr}; request type: executable-download' ) job_id = self.db_handler.getJobIdFromToken(client_db_token, 'x') requested_file_path = f'jobs/toexec{job_id}.zip' if os.path.exists(requested_file_path): self.send_file(conn, requested_file_path) self.logger.info( f'sent exec file for job {job_id} to leaser {addr[0]}') return elif request_content['request-type'] == 'output-upload': self.logger.info( f'connection: leaser from {addr}; request type: output-upload') job_id = self.db_handler.getJobIdFromToken(client_db_token, 'o') self.recv_file(conn, f'outputs/output{job_id}.zip') self.db_handler.changeJobStatus(job_id, 'f') order_id = self.db_handler.getOrderId(job_id) self.db_handler.updateJobOrderStatus(order_id, 'f') self.logger.info( f'successfully received output file for job {job_id} from leaser {addr[0]}' ) # TODO send success message somehow return def recv_file(self, conn, file_name): # receive checksum checksum_received = conn.recv(32) # receive file checksum_computed = hashlib.md5() f = open(file_name, "wb") while True: chunk = conn.recv(4096) if not chunk: break checksum_computed.update(chunk) f.write(chunk) f.close() # check for integrity if checksum_computed.hexdigest().encode('utf-8') == checksum_received: return 1 else: return 0 def send_file(self, conn, file_name): # calculate checksum checksum = hashlib.md5() with open(file_name, "rb") as fl: for chunk in iter(lambda: fl.read(4096), b""): checksum.update(chunk) checksum = checksum.hexdigest() # send file with checksum f = open(file_name, "rb") conn.send(checksum.encode('utf-8')) for chunk in iter(lambda: f.read(4096), b""): conn.send(chunk) f.close() conn.shutdown(socket.SHUT_WR)
def setUp(self): self.dbHandler = DBHandler() self.fileHandler = FileHandler(5)
def accessDB(self): conn = DBHandler.connect() cursor = conn.cursor() db = DBHandler(conn, cursor) return db
import re from dbHandler import DBHandler def getToken(): f = open("config.json","r") dat = json.load(f) return dat['telegram_Bot_Token'] def getDBConfig(): f = open("config.json","r") dat = json.load(f) return dat['cluster_name'],dat["dataBase_name"] cName, dbName = getDBConfig() db = DBHandler(cName,dbName) TOKEN = getToken() URL = "https://api.telegram.org/bot{}/".format(TOKEN) commands = {'/links':'display all the links','/chcur':'change the default currency','/add <link>':'provide link to save in database for price retrive'} def get_url(url): response = requests.get(url) content = response.content.decode("utf8") return content def check_url(link): retList = ['0','ERROR_PROG','AppID','ProductID','URL','JSON'] ret = get_price(link)
def processMessage(msg): # print("Inside message -->" + msg) split_msg = msg.split('$') if not isSourceJava(split_msg[0]): return userID = getSecondColumn(split_msg[1]) connectionID = getSecondColumn(split_msg[2]) typeOfTesting = getSecondColumn(split_msg[3]) timestamp = getSecondColumn(split_msg[4]) url = getSecondColumn(split_msg[5]) if url == 'none': print("url is none so return") return isFile = getSecondColumn(split_msg[6]) # isFile -> 0 then only url check ... so DO NOT add to database periodicity = getSecondColumn(split_msg[7]) isPeriodic = getSecondColumn(split_msg[8]) fileNamePeriodic = "NULL" iterationNumber = -1 # Initialise to -1 if periodicity == 'forced': iterationNumber = 0 # Initial check is 0th if isPeriodic == 1: fileNamePeriodic = getSecondColumn(split_msg[9]) iterationNumber = getSecondColumn(split_msg[10]) if typeOfTesting == 'DNS': print('Run DNS .... inside pythonServer.py') dns_check = DNS_CENSORSHIP() if isFile == 0: dns_check.ADD_TO_DATABASE = 0 db = DBHandler() # db.checkAndMakeConnection(userID) #TO DO [Problem [NoneType etc]] dns_check.dns_censorship_check(url) # Actually does NOT RETURN report = dns_check.report # Further modifications .... report.url = url report.time_stamp = datetime.now().strftime('%d-%m-%Y %H:%M:%S') # In this format report.is_file_check = isFile report.is_periodic = isPeriodic report.file_name_periodic = fileNamePeriodic report.iteration_number = iterationNumber report.type_of_testing = "DNS" # print("Inside pythonServer.py ... typeOfTesting('dns') <PRINTING REPORT> url = " + url) # report.printReport() db.handleReport_DNS(report) elif typeOfTesting == 'TCP': print('Run TCP ..... inside pythonServer.py') tcp_check = TCP_3_WAY_HANDSHAKE() db = DBHandler() # Check for 5 iterations report_arr = tcp_check.tcp_handshake_check(url, 5) report = Report() report.url = url report.time_stamp = datetime.now().strftime('%d-%m-%Y %H:%M:%S') # In this format report.is_file_check = isFile report.is_periodic = isPeriodic report.file_name_periodic = fileNamePeriodic report.iteration_number = iterationNumber report.type_of_testing = "TCP" # print("---------------------------INSIDE pythonServer.py report_arr.len = " + len(report_arr).__str__() + "-----------------") # for rep in report_arr: # rep.printReport() # print("----------------------------- DONE PRINTING REPORT [dbHandler.handle report is commented out for now]-----------------------") if len(report_arr) > 0: report.censorship_details = report_arr[0].censorship_details # report.is_censored = report_arr[0].is_censored is_cens = 1 for rep in report_arr: if rep.tcp_description.is_censored_TCP == 0: is_cens = 0 report.is_censored = is_cens report.censorship_details = report_arr[0].censorship_details report.tcp_description_arr = [] for rep in report_arr: rep.tcp_description.is_censored_TCP = rep.is_censored # For each TCP_Description ... report.tcp_description_arr.append(rep.tcp_description) db.handleReport_TCP(report) # print("Now inside pythonServer.py ... printing report") # report.printReport() elif typeOfTesting == 'HTTP': print('>>>> Run HTTP ..... inside pythonServer.py') http_obj = http_https_data() # For HTTP Checking db = DBHandler() rep_HTTP, rep_HTTPS = http_obj.check_http_https_censorship(url) rep_HTTP.url = url rep_HTTP.time_stamp = datetime.now().strftime('%d-%m-%Y %H:%M:%S') # In this format rep_HTTP.is_file_check = isFile rep_HTTP.is_periodic = isPeriodic rep_HTTP.file_name_periodic = fileNamePeriodic rep_HTTP.iteration_number = iterationNumber rep_HTTP.type_of_testing = "HTTP" rep_HTTPS.url = url rep_HTTPS.time_stamp = datetime.now().strftime('%d-%m-%Y %H:%M:%S') # In this format rep_HTTPS.is_file_check = isFile rep_HTTPS.is_periodic = isPeriodic rep_HTTPS.file_name_periodic = fileNamePeriodic rep_HTTPS.iteration_number = iterationNumber rep_HTTPS.type_of_testing = "HTTPS" # Put everything in one report ... rep_HTTP.copyHTTPSDescription(rep_HTTPS) db.handleReport_HTTP(rep_HTTP) elif typeOfTesting == 'ALL': print('Run All ..... ') else: return
class FileHandler(object): def __init__(self,num): self.db = DBHandler() self.machineNumbers = num self.count = 0 self.config = ConfigParser.ConfigParser() self.config.read('config.properties') self.cryptTool = CryptoTool() def nextMachine(self): self.count = self.getTotalFileCount() nextIndex = self.count % self.machineNumbers return nextIndex def parseFileMeta(self,filePath): basename = os.path.basename(filePath) name,ext = os.path.splitext(basename) nextIndex = self.nextMachine() # currentTimestamp = int(time.time()) savedPath = self.config.get('Section Folder','folder'+str(nextIndex)) # savedPath = '/home/michael/1/' # mixedFileName = name + '|' + ext + '|' + str(currentTimestamp) + '|' + savedPath return name,ext,nextIndex,savedPath def getTotalFileCount(self): getAllFilesCount = ''' select count(*) from files ''' rows = self.db.excute(getAllFilesCount) return rows.getresult()[0][0] def save(self,path,url): fileName,ext,nextIndex,savedPath = self.parseFileMeta(path) fileDiskName = str(uuid.uuid1()) + ext fileSavedPath = savedPath + fileDiskName shutil.move(path,fileSavedPath) cryptUrl = self.cryptTool.encryptString(url) fileId = self.save_to_db(cryptUrl,ext,fileName,fileDiskName,nextIndex) return fileId,fileDiskName def getTypeByUrl(self,url): sql = ''' select file_type from files where from_url = '{url}' '''.format ( url = url ) rows = self.db.excute(sql) return rows.getresult()[0][0] def getTypeByID(self,fileId): sql = ''' select file_type from files where id = '{fileId}' '''.format ( fileId = fileId ) rows = self.db.excute(sql) return rows.getresult()[0][0] def getFileByID(self,fileId): sql_get_file = """ select physical_name,saved_position from files where id = {fileId} """.format( fileId = fileId ) rows = self.db.excute(sql_get_file) physical_name = rows.getresult()[0][0] saved_position = rows.getresult()[0][1] folder = self.config.get('Section Folder','folder' + str(saved_position)) path = folder + physical_name print 'file path is :' + path return open(path,'r') def getFilePathByDiskName(self,physicalName): sql_select = """ select physical_name,saved_position from files where physical_name ='{physical_name}'""".format( physical_name = physicalName ) rows = self.db.excute(sql_select) physical_name = rows.getresult()[0][0] saved_position = rows.getresult()[0][1] path = self.config.get('Section Folder','folder' + str(saved_position)) return path def save_to_db(self,from_url,file_type,logical_name,physical_name,saved_position): sql_insert ="""INSERT INTO files(logical_name,physical_name,file_type,from_url,saved_position) values('{logical_name}','{physical_name}','{file_type}','{from_url}','{saved_position}')""".format( logical_name = logical_name, physical_name = physical_name, file_type = file_type, from_url = from_url , saved_position = saved_position ) self.db.excute(sql_insert) sql_id_selector = """ select id from files where from_url = '{from_url}' and logical_name = '{logical_name}' and physical_name = '{physical_name}' and saved_position = '{saved_position}' """.format( logical_name = logical_name, physical_name = physical_name, file_type = file_type, from_url = from_url , saved_position = saved_position ) rows = self.db.excute(sql_id_selector) return rows.getresult()[0][0]