def check_webshell(file_id): if os.path.isfile(os.path.join(app.config['UPLOAD_FOLDER'], file_id)): db = Database() fetch = db.check_result(file_id) if fetch: logging.info('got previous record: {0}'.format(file_id)) malicious_judge = (fetch[0] == 1) malicious_chance = fetch[1] res = { 'file_id': file_id, 'malicious_judge': malicious_judge, 'malicious_chance': malicious_chance } else: logging.info('checking file: {0}'.format(file_id)) res_check = check_with_model(file_id) res = { 'file_id': file_id, 'malicious_judge': res_check['judge'], 'malicious_chance': res_check['chance'] } db.create_result(file_id, res_check['judge'], res_check['chance']) logging.info('record created: {0}'.format(file_id)) else: res = { 'file_id': file_id, 'malicious_judge': None, 'malicious_chance': None } return jsonify(res)
def remove(data_id): # if not request.json: # abort(400) # user, data Database.remove_data(int(float(data_id))) return jsonify(text='success')
def update(): if request.json: data = request.get_json() else: # because of formData in angular? data = request.values user = {'id': 1} Database.update(user, data) return jsonify(text='success')
def login(preprocessed_data): print('email: {}'.format(preprocessed_data['email'])) user_entry = Database.find_user({'email': '*****@*****.**'}) if user_entry is None: abort(400, 'User not found') login_token = user_entry['login_token'] return jsonify(login_token=login_token)
def setUp(self): D.DBManager.instance = None self.dbm = D.DBManager('test.sqlite') self.dbm.create_cursor()
def test_fail_create_table_wo_cursor(self): dbm = D.DBManager('test.sqlite') self.assertRaises(D.DBException, D.DBManager.instance.create_table, T.create_items_table)
def test_create_cursor(self): dbm = D.DBManager('test.sqlite') dbm.create_cursor() self.assertIsNotNone(dbm.instance.cursor)
def test_create_db(self): self.assertIsNone(D.DBManager.instance) dbm = D.DBManager('test.sqlite') self.assertIsNotNone(dbm) self.assertIsNotNone(dbm.instance.conn)
def main(arguments) -> None: """ Main function for pira framework. Used to invoke the various components. """ show_pira_invoc_info(arguments) invoc_cfg = process_args_for_invoc(arguments) use_extra_p, extrap_config = process_args_for_extrap(arguments) home_dir = U.get_cwd() U.set_home_dir(home_dir) U.make_dir(invoc_cfg.get_pira_dir()) BackendDefaults(invoc_cfg) csv_config = process_args_for_csv(arguments) try: if arguments.config_version is 1: config_loader = CLoader() else: config_loader = SCLoader() configuration = config_loader.load_conf(invoc_cfg.get_path_to_cfg()) checker.check_configfile(configuration, arguments.config_version) if B.check_queued_job(): # FIXME: Implement L.get_logger().log( 'In this version of PIRA it is not yet implemented', level='error') assert (False) else: ''' This branch is running PIRA actively on the local machine. It is blocking, and the user can track the progress in the terminal. ''' L.get_logger().log('Running the local case') # The FunctorManager manages loaded functors and generates the respective names F.FunctorManager(configuration) dbm = D.DBManager(D.DBManager.db_name + '.' + D.DBManager.db_ext) dbm.create_cursor() analyzer = A(configuration) runner_factory = PiraRunnerFactory(invoc_cfg, configuration) runner = runner_factory.get_simple_local_runner() if use_extra_p: L.get_logger().log('Running with Extra-P runner') runner = runner_factory.get_scalability_runner(extrap_config) if runner.has_sink(): analyzer.set_profile_sink(runner.get_sink()) # A build/place is a top-level directory for build in configuration.get_builds(): L.get_logger().log('Build: ' + str(build)) app_tuple = (U.generate_random_string(), build, '', '') dbm.insert_data_application(app_tuple) # An item is a target/software in that directory for item in configuration.get_items(build): L.get_logger().log('Running for item ' + str(item)) # A flavor is a specific version to build if configuration.has_local_flavors(build, item): for flavor in configuration.get_flavors(build, item): L.get_logger().log('Running for local flavor ' + flavor, level='debug') # prepare database, and get a unique handle for current item. db_item_id = dbm.prep_db_for_build_item_in_flavor( configuration, build, item, flavor) # Create configuration object for the item currently processed. place = configuration.get_place(build) t_config = TargetConfiguration( place, build, item, flavor, db_item_id, invoc_cfg.is_compile_time_filtering(), invoc_cfg.get_hybrid_filter_iters()) # Execute using a local runner, given the generated target description execute_with_config(runner, analyzer, invoc_cfg.get_pira_iters(), t_config, csv_config) # If global flavor else: # TODO: Implement L.get_logger().log( 'In this version of PIRA it is not yet implemented', level='error') assert (False) U.change_cwd(home_dir) except RuntimeError as rt_err: U.change_cwd(home_dir) L.get_logger().log('Runner.run caught exception. Message: ' + str(rt_err), level='error') L.get_logger().dump_tape() sys.exit(-1)
def upload(): # parser data to sql user, data = _parse_data_from_upload_request() # return Database.upload(user, data) return _data_as_response(Database.test_data_uploaded_data(1))
def uploaded_data_by_id(data_id): print('id {}'.format(data_id)) # load test data return _data_as_response(Database.test_data_uploaded_data(data_id))
def uploaded_data(): # if not request.json: # abort(400) # return jsonify(json=json.dumps(Database.test_data_uploaded_data_overview())) return json.dumps(Database.test_data_uploaded_data_overview())
import time import RPi.GPIO as GPIO from lib import Database, MoistureSensor, TemperatureSensor # Setup GPIO stuff GPIO.setmode(GPIO.BCM) GPIO.setwarnings(False) GPIO.setup(17, GPIO.OUT) GPIO.setup(22, GPIO.OUT) GPIO.setup(23, GPIO.OUT) GPIO.setup(24, GPIO.OUT) GPIO.setup(27, GPIO.OUT) # Create instances of components database = Database("provil-ict.be", "gip_agritower", "agritower", "gip_2019_agritower") moisture_sensor = MoistureSensor(0) temperature_sensor = TemperatureSensor() # Main program def main(): count = 0 print "Program has started" database.moveMoistureArchive() database.moveTemperatureArchive() print "Moved data to archive" previous_status_led1 = 0
import sys from lib import open_file from lib import DataSanitation from lib import Database if __name__ == '__main__': try: print(sys.argv[1]) schema_file = r'C:\Users\userab\PycharmProjects\MomWorkHelper\schema.sql' db_file = r'C:\Users\userab\PycharmProjects\MomWorkHelper\test.db' raw = open_file(sys.argv[1]) data = DataSanitation(raw)() database = Database(schema_file, db_file) for line in data: database.insert(line) print(f'TOTAL = {len(data)}') # input() except: import traceback traceback.print_exc() input()
with open(os.environ['AYL_CONFIG'], 'r') as f: config = json.load(f) with open(config['header_data'], 'r') as f: header_data = f.read() if config['cache_buster'] == 'RANDOM': config['cache_buster'] = random.randint(10000, 99999) mailer = Mailer(config['sendgrid']['api_key']) lastfm = LastFM(config['lastfm']['api_key'], cache=config['lastfm']['cache'], use_api_for_top=config['lastfm']['use_api_for_top'], top_albums_file=config['lastfm']['top_albums_file'], cache_age=config['lastfm']['cache_age']) imgcache = ImageCache(data=config['covers']['data_file'], dump=config['covers']['dump_dir']) paypal = Paypal(config['paypal']['client_id'], config['paypal']['client_secret'], api_url=config['paypal']['api_url']) database = Database(host=config['database']['host'], username=config['database']['username'], password=config['database']['password'], database=config['database']['database']) app = Flask(__name__, template_folder=config['templates_folder']) # ERRORS @app.errorhandler(400) def error_400(e): return render_template('error.html', error_code=400), 400 @app.errorhandler(404) def error_404(e): return render_template('error.html', error_code=404), 404 @app.errorhandler(500) def error_500(e): database.add_tracking_event('ERROR', 'none', request, data='generic error')