def recipe_id(id): delete = delete_form() delete.id.label = id # Get all ingredients form = add_to_recipe_form() form.select.choices = [ ( 0, '- Select Ingredient -' ) ] \ + Database().get_all_ingredient_names() if form.validate_on_submit(): Database().add_recipe_item(id, int(form.data['select']), form.data['quantity'], form.data['unit']) return redirect('/recipe/id={}'.format(id)) # Get ingredients for the recipe ingredients = Database().get_ingredients_for_recipe(id) title = Database().get_recipe_name(id) return render_template('recipe.html', title=title, ingredients=ingredients, new_ingredient=form, delete=delete)
def save_into_db(data): # create an an sqlite_advisory object db = Database("countries.sqlite") db.drop_table("IE") db.add_table("IE", country_iso="text", name="text", advisory_text="text", visa_info="text") try: for country in data: iso = data[country].get('country-iso') name = data[country].get('name') advisory = data[country].get('advisory-text').replace('"', '') LOGGER.info(f'Saving {name} into the IE table') visa = data[country].get('visa-info') db.insert("IE", iso, name, advisory, visa) LOGGER.success( f"{name} was saved into the IE table with the following information: {visa}. {advisory}" ) LOGGER.info('IE table successfully saved to the database') except Exception as error_msg: LOGGER.error( f'An error has occured while saving {name} into the IE table because of the following error: {error_msg}' ) db.close_connection()
def save_to_canada(): db = Database("countries.sqlite") db.drop_table("CA") db.add_table("CA", country_iso="text", name="text", advisory_text="text", visa_info="text") LOGGER.info('Saving CA table into the databse') #getting the data from all countries all_countries = get_all_countries() countries_data = advisory_canada(all_countries) #saving the data in db try: for country in countries_data: iso = countries_data[country].get('country-iso') name = countries_data[country].get('name') advisory = countries_data[country].get('advisory-text') visa = countries_data[country].get('visa-info') LOGGER.info(f'Saving {name} into the CA table') db.insert("CA", iso, name, advisory, visa) LOGGER.success( f'{name} was successfully saved into the CA table with the following table: {advisory}. {visa}' ) LOGGER.success('CA table was successfully saved into the database') except Exception as error_msg: LOGGER.error( f'An error has occurred while saving the countries into the CA table because of the following error: {error_msg}' ) db.close_connection()
def save_cities_timezones(): LOGGER.info("Retreiving timezones information for all countries...") data = adding_lat_and_lng(get_cities_info()) # geolocator = Nominatim(user_agent="travelingstrategy") #con = sqlite3.connect('../countries.sqlite') #cur = con.cursor() # should not create the table every time # change in the future #cur.execute('DROP TABLE IF EXISTS timezones') #con.commit() #cur.execute('CREATE TABLE timezones (city VARCHAR, country_name VARCHAR, country_iso VARCHAR, timezone VARCHAR, lat REAL, lng REAL, utc_offset int)') #SScon.commit() db = Database("countries.sqlite") db.drop_table("timezones") db.add_table("timezones", city="VARCHAR", country_name="VARCHAR", country_iso="VARCHAR", timezone="VARCHAR", la="REAL", lng="REAL",utc_offset = "int") for city_info in data: city = city_info["city"] country_name = city_info["country_name"] country_iso = city_info["country_iso"] timezone = city_info["timezone"] lat = city_info["lat"] lng = city_info["lng"] utc_offset = city_info["utc_offset"] LOGGER.success(f"{country_name} was sucefuly save into the timezone table with the following information: {country_iso} and {timezone}") db.insert("timezones",city, country_name, country_iso, timezone, lat, lng, utc_offset) LOGGER.success{f'{country_name} successfully saved to the database.'} db.close_connection()
def make_chunks(physiological_file_id, config_file, verbose): """ Call the function create_chunks_for_visualization of the Physiology class on the PhysiologicalFileID provided as argument to this function. :param physiological_file_id: PhysiologicalFileID of the file to chunk :type physiological_file_id: int :param config_file: path to the config file with database connection information :type config_file: str :param verbose : flag for more printing if set :type verbose : bool """ # database connection db = Database(config_file.mysql, verbose) db.connect() # grep config settings from the Config module data_dir = db.get_config('dataDirBasepath') # making sure that there is a final / in data_dir data_dir = data_dir if data_dir.endswith('/') else data_dir + "/" # load the Physiological object physiological = Physiological(db, verbose) # create the chunked dataset if physiological.grep_file_path_from_file_id(physiological_file_id): print('Chunking physiological file ID ' + str(physiological_file_id)) physiological.create_chunks_for_visualization(physiological_file_id, data_dir)
def title_scan(domain, ret, now_time): ret = list(ret) database = Database( os.path.join( os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'srcscan.db')) database.connect() database.init() logger.sysinfo('Checking %d subdomains of %s.' % (len(ret), domain)) loop = asyncio.get_event_loop() thread_num = int(conf['config']['basic']['thread_num']) thread_num = thread_num if len(ret) > thread_num else thread_num tasks = [] for i in range(0, thread_num): tasks.append( asyncio.ensure_future( get_title([ret[x] for x in range(0 + i, len(ret), thread_num)]))) loop.run_until_complete(asyncio.wait(tasks)) for task in tasks: for subdomain, url, title, status, content_length in task.result(): database.update_subdomain_status(subdomain, url, title, status, content_length, now_time) database.disconnect() logger.sysinfo("Checked subdomains' status of %s." % domain)
def subdomain_scan(domain, ret, now_time): database = Database( os.path.join( os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'srcscan.db')) database.connect() database.init() logger.sysinfo("Scanning domain %s." % domain) _engines = [_(domain) for _ in engines.values()] loop = asyncio.get_event_loop() if debug: loop.set_debug(True) for task in [asyncio.ensure_future(_engine.run()) for _engine in _engines]: loop.run_until_complete(task) # loop.close() for _engine in _engines: logger.sysinfo("{engine} Found {num} sites".format( engine=_engine.engine_name, num=len(_engine.results['subdomain']))) ret.update(_engine.results['subdomain']) logger.sysinfo("Found %d subdomains of %s." % (len(ret), domain)) for subdomain in ret: database.insert_subdomain(subdomain, None, None, 0, 0, now_time, domain) database.disconnect() return ret
def save_into_db(data): # create an an sqlite_advisory object db = Database("countries.sqlite") db.drop_table("US") db.add_table("US", country_iso="text", name="text", advisory_text="text", visa_info="text") LOGGER.info('Saving United State table into the database') try: for country in data: iso = data[country].get('country-iso') name = data[country].get('name') advisory = data[country].get('advisory-text') visa = data[country].get('visa-info') LOGGER.info(f"Saving {name} into the US table") db.insert("US", iso, name, advisory, visa) LOGGER.info( f"{name} was succesfully saved into the US table with the following information: {visa}. {advisory}" ) LOGGER.success('US table has been successfully saved into the databse') except Exception as error_msg: LOGGER.error( f'Error has occured while saving the countries into the US table because of the following error: {error_msg}' ) db.close_connection() #save_to_united_states()
def createFromNodeJs(self, data): if not self._validateNodeJsData(data): return False self._populateFromNodeJs(data) Debugger.log("Storing request data to Database...", 'debug') db = Database() # Storing main table query = "INSERT INTO requests (id, datetime, url, memory, stats, get, post, server, level, read)" \ " VALUES (NULL, ?,?,?,?,?,?,?,?,0)" bind = (self._datetime, self._url, self._memory, JSON.encode(self._stats), JSON.encode(self._get), JSON.encode(self._post), JSON.encode(self._server), self._level) db.execute(query, bind) self._id = db.last_inserted_id # Storing messages messages = self._messages self._messages = [] for message in messages: m = Message() message['request'] = self._id m.createFromNodeJs(message) self._messages.append(m) """ Modify data (this will also affect the Request table model) """ settings = Settings() if Request._RECORDS is not None: Request._RECORDS.insert(0, self) if len(Request._RECORDS) > settings.requests_limit: del Request._RECORDS[-1] return True
def save_to_MU(): LOGGER.info(f'Saving and parsing Mauritius into the databse') driver = create_driver() LOGGER.info('Begin parsing for Mauritius advisory') try: wiki_visa_url = wiki_visa_url_MU wiki_visa_ob = wiki_visa_parser(wiki_visa_url, driver) visas = wiki_visa_ob.visa_parser_table() LOGGER.success( 'Parsing for Mauritius advisory has been successfully completed') except Exception as error_msg: LOGGER.error( f'Error has occured while parsing for Mauritius advisory because of the following error: {error_msg}' ) info = {} array_info = [] # create an an sqlite_advisory object db = Database("countries.sqlite") db.drop_table("MU") db.add_table("MU", country_iso="text", name="text", advisory_text="text", visa_info="text") LOGGER.info('Saving Mauritius table into the database') try: for country in visas: iso = find_iso_of_country(country) if (iso != ""): name = country LOGGER.info(f'Saving {name}') visa = visas[country].get( 'visa') #dictionary for visa info is country{visa:text} advisory = "Not available yet" info = { "country_iso": iso, "name": name, "advisory": advisory, "visa_info": visa } array_info.append(info) print(name, " ", visa, " ", advisory) db.insert("MU", iso, name, advisory, visa) LOGGER.success( f'{name} was sucessfully saved to the database with the following information: {visa}. {advisory}.' ) LOGGER.success( 'Mauritius table successfully saved to the database') except Exception as error_msg: LOGGER.error( f'An error has occured while saving Mauritius table to the database because of the following error: {error_msg}' ) db.close_connection() quit_driver(driver) with open('./advisory-mu.json', 'w') as outfile: json.dump(array_info, outfile)
def save(self): db = Database() for i in Settings._SETTINGS.items(): key = i[0] value = i[1] if key == 'log_levels': value = JSON.encode(value) query = "UPDATE settings SET value = ? WHERE key = ?" db.execute(query, (value, key))
def __test(): db = Database() db.db_init() dbsession = db.db_session() testCVE = Spider(dbsession) result, e = testCVE.get_vulns() if not result: print(e)
def vul_scan(domain, now_time): datas = [] database = Database( os.path.join( os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'srcscan.db')) database.connect() database.init() logger.sysinfo("Scaning vul for: %s " % (domain)) for _row in database.select_mondomain(domain): data = { "subdomain": _row[0], "url": _row[1], "title": _row[2], "status": _row[3], "len": _row[4], "update_time": _row[5], "domain": _row[6] } datas.append(data) for data in datas: if data['status'] != 0: logger.sysinfo("Scaning vul for %s." % (data['url'])) crawlergo_scan(data['url'], data['domain'], now_time, database) logger.sysinfo("Scaned vul for: %s " % (domain)) database.disconnect()
def create(): db = Database() weapon = Weapon() weapon.weapon_name = read_input(text="Weapon Name (str): ") weapon.variant_id = read_input(text="Variant ID (int): ", data_type=lambda x: int(x)) weapon.wear_from = read_input(text="Wear from (int) [0]: ", default=0, data_type=lambda x: int(x)) weapon.wear_to = read_input(text="Wear to (int) [100]: ", default=100, data_type=lambda x: int(x)) weapon.stat_track = read_input(text="Stat-Track (int) [0]: ", default=100, data_type=lambda x: bool(x)) db.save_weapon(weapon)
def save_to_UK(): LOGGER.info("Begin parsing and saving for United Kingdom table...") driver = create_driver() LOGGER.info('Parsing the visa requirements of all countries for United Kingdom advisory') try: wiki_visa_url ="https://en.wikipedia.org/wiki/Visa_requirements_for_British_citizens" wiki_visa_ob = wiki_visa_parser(wiki_visa_url,driver) visas = wiki_visa_ob.visa_parser_table() data = parse_all_countries_advisory() LOGGER.success('Successfully parsed the visa requirements of all countries for United Kingdom advisory') except Exception as error_msg: LOGGER.error(f'An error has occured while retrieving the visa reuirements of all countries for United Kingdom advisory because of the following error: {error_msg}') info = {} array_info = [] # create an an sqlite_advisory object] db = Database("countries.sqlite") db.drop_table("GB") db.add_table("GB", country_iso="text", name="text", advisory_text="text", visa_info="text") LOGGER.info('Saving countries informations into the UK table') try: for country in visas: iso = find_iso_of_country(country) if(iso != ""): try: name = country advisory = data[iso].get('advisory') #dictionary for the travel advisory is iso{advisory:text} visa_info = visas[country].get('visa') #dictionary for visa info is country{visa:text} info = { "country_iso" : iso, "name": name, "advisory": advisory, "visa_info": visa_info } array_info.append(info) LOGGER.success(f"Saving {name} into the UK table with the following information: {visa_info}. {advisory}") db.insert("GB",iso,name,advisory,visa_info) LOGGER.success(f'{name} sucesfully saved to the database.') except KeyError: LOGGER.warning(f'This country doesn\'t have advisory info: {country}') print("This country doesn't have advisory info: ",country) LOGGER.info(f'Its ISO is {iso}') print("Its ISO is: ",iso) LOGGER.success('All countries have been succesfully saved into the UK table') except Exception as error_msg: LOGGER.error(f'An error has occured while saving countries into the UK table because of the following: {error_msg}') db.close_connection() with open('./advisory-uk.json', 'w') as outfile: json.dump(array_info, outfile)
def __init__(self): try: self.db = Database( None) #reading all connection data from config.ini logging.debug('Database connected...') except: #if DB server is not accessible should wrap up an close connection here logging.debug( 'Connection to DB failed, try running admin first...') self.form = cgi.FieldStorage() self.user = EditorialUser(self.form, self.db) self.content = EditorialContent(self.form, self.user, self.db)
def save_drug_law(): marijuana = get_countries_canabaislaw() cocaine = get_countries_cocainelaw() methaphetamine = get_countries_methaphetaminelaw() DB = Database(sqlite_db) DB.drop_table('drugs') DB.add_table('drugs', country_iso='text', name="text", methaphetamine_possession='text', methaphetamine_sale='text', methaphetamine_transport='text', methaphetamine_cultivation='text', cocaine_possession='text', cocaine_sale='text', cocaine_transport='text', cocaine_cultivation='text', canabais_recreational='text', canabais_medical='text') drug_info = combine_dictionaries(marijuana,cocaine, methaphetamine) for iso in drug_info: country_iso = drug_info[iso].get("iso") country_name = drug_info[iso].get("name") methaphetamine_possession = drug_info[iso].get("methaphetamine_possession") methaphetamine_sale = drug_info[iso].get("methaphetamine_sale") methaphetamine_transport = drug_info[iso].get("methaphetamine_transport") methaphetamine_cultivation = drug_info[iso].get("methaphetamine_cultivation") cocaine_possession = drug_info[iso].get("cocaine_possession") cocaine_sale = drug_info[iso].get("cocaine_sale") cocaine_transport = drug_info[iso].get("cocaine_transport") cocaine_cultivation = drug_info[iso].get("cocaine_cultivation") canabais_recreational = drug_info[iso].get("canabais_recreational") canabais_medical = drug_info[iso].get("canabais_medical") LOGGER.info(f"Parsing {country_name} to insert into drug table with the following information: {canabais_recreational}. {canabais_medical}.{cocaine_possession}.{methaphetamine_possession}") DB.insert('drugs', country_iso, country_name, methaphetamine_possession, methaphetamine_sale, methaphetamine_transport, methaphetamine_cultivation, cocaine_possession, cocaine_sale, cocaine_transport, cocaine_cultivation, canabais_recreational, canabais_medical)
def main(): QtCore.QCoreApplication.setApplicationName('pntest') app = QtWidgets.QApplication(sys.argv) app_path = get_app_path() src_path = os.path.join(app_path, 'src') data_path = get_resource_path(app_path, 'include') tmp_db_path = get_resource_path(app_path, 'pntest-tmp.db') style_dir_path = get_resource_path(app_path, 'style') print(f'[Gui] App path: {app_path}') print(f'[Gui] Data path: {data_path}') print(f'[Gui] DB path: {tmp_db_path}') print(f'[Gui] style dir path: {style_dir_path}') # Load DB from the CLI if argument given try: tmp_db_path = sys.argv[1] print(f'[Frontend] Overridding DB path from CLI: {tmp_db_path}') database = Database(tmp_db_path) database.load_or_create() except IndexError: database = Database(tmp_db_path) database.delete_existing_db() database.load_or_create() process_manager = ProcessManager(src_path) main_window = MainWindow() main_window.set_process_manager(process_manager) main_window.show() app.aboutToQuit.connect(main_window.about_to_quit) # type: ignore # Style: app.setStyle('Fusion') style_loader = StyleheetLoader(style_dir_path) stylesheet = style_loader.load_theme(THEME) if stylesheet is not None: app.setStyleSheet(stylesheet) # Icon: app.setWindowIcon( QtGui.QIcon(QtGui.QPixmap('/home/evan/Code/pntest/pntest-icon32.ico'))) main_window.setWindowIcon( QtGui.QIcon(QtGui.QPixmap('/home/evan/Code/pntest/pntest-icon32.ico'))) sys.exit(app.exec_())
def test_installer_simple(): silentremove("/tmp/simple.meta") error_code = main([SIMPLE, "/tmp/simple.meta"]) assert error_code == 0 with open("/tmp/simple.meta") as metadata: reader = csv.reader(metadata) n_tables = 0 products_table_exists = 0 for row in reader: n_tables += 1 if row[1] == 'Products': products_table_exists += 1 assert "xmin" in row[2] assert "productDescription" in row[2] assert n_tables == 8 assert products_table_exists == 1 # check solr s = Solr() res = s.list() assert res["hits"] == 8 assert len(res["docs"]) == 8 # check database db = Database() db.connect() cursor = db.cursor() cursor.execute("SELECT * FROM documents ORDER BY ID") rows = cursor.fetchall() assert len(rows) == 8 assert rows[0]["universal_id"] == "ClassicModels.public.OrderDetails" assert rows[1]["universal_id"] == "ClassicModels.public.Offices" assert rows[2]["universal_id"] == "ClassicModels.public.Payments" assert rows[3]["universal_id"] == "ClassicModels.public.ProductLines" assert rows[4]["universal_id"] == "ClassicModels.public.Customers" assert rows[5]["universal_id"] == "ClassicModels.public.Orders" assert rows[6]["universal_id"] == "ClassicModels.public.Employees" assert rows[7]["universal_id"] == "ClassicModels.public.Products" cursor.execute("SELECT * FROM filters ORDER BY ID") rows = cursor.fetchall() assert len(rows) == 2 assert rows[0]["name"] == "ClassicModels" assert rows[1]["name"] == "public" assert rows[0]["id"] == rows[1]["parent_id"]
def __init__(self, report_config, state): self.report_config = state.config self.logger = state.logger self.state = state self.db = Database(self.state.config.get('database', 'host'), self.state.config.get('database', 'port'), self.state.config.get('database', 'database'), self.state.config.get('database', 'collection')) self.query_filter = self.get_filter() self.get_time_interval() self.date_field = self.state.config.get('general', 'date field') self.logger.info('Creating Mailer instance')
def reinitialize(self): self.database = Database(self) self._pid = 0 self._package = None self._process = None self._script = None self._spawned = False self._resumed = False self.java_available = False # frida device self._device = None # process self._process = None self._script = None # hooks self.hooks = {} self.native_on_loads = {} self.java_on_loads = {} self.java_hooks = {} self.temporary_input = '' self.native_pending_args = None self.java_pending_args = None self.context_tid = 0
def __init__(self) -> None: super().__init__( command_prefix=commands.when_mentioned_or( environ.get('PREFIX', 'aria ')), help_command=None, ) self.db = Database(self)
def get_database_connection(): database = Database( app_config["DATABASE"]["host"], app_config["DATABASE"]["username"], app_config["DATABASE"]["password"], app_config["DATABASE"]["dbname"]) return database
def insert_or_update(cls, info): if not info: print("Nothing to write...") return None table_name = cls.TABLE_NAME keys = ", ".join(info.keys()) values = ", ".join(["%s"] * len(info)) update_keys = ", ".join(map(lambda x: f"{x}= excluded.{x}", info.keys())) conn = None cursor = None query = ( f"INSERT INTO {table_name} ({keys}) VALUES ({values}) " f"ON CONFLICT (slack_id) DO UPDATE " f"SET {update_keys}" ) try: conn = Database.connect() cursor = conn.cursor() cursor.execute(query, list(info.values())) except Exception as e: # TODO: Change print to logger print(e) return None finally: if cursor: cursor.close() if conn: conn.commit() conn.close()
def test_init_repository(self): with tempfile.TemporaryDirectory() as directory: # Arrange project_id = 10868464 repository_path = os.path.join(directory, str(project_id)) expected = os.path.join( directory, str(project_id), 'squib' ) # Act attributes = Attributes( self.rawattributes, database=Database(self.rawsettings), goptions=self.rawgoptions ) try: attributes.database.connect() actual = attributes._init_repository( project_id, repository_path ) # Assert self.assertTrue(len(os.listdir(repository_path)) > 0) self.assertTrue(expected in actual) finally: attributes.database.disconnect()
def main(): """ Main execution flow. """ try: args = process_arguments() config = utilities.read(args.config_file) manifest = utilities.read(args.manifest_file) # TODO: Refactor core.config = config utilities.TOKENIZER = core.Tokenizer() database = Database(config['options']['datasource']) globaloptions = { 'today': config['options']['today'], 'timeout': config['options']['timeout'] } attributes = Attributes(manifest['attributes'], database, args.cleanup, args.key_string, **globaloptions) if not os.path.exists(args.repositories_root): os.makedirs(args.repositories_root, exist_ok=True) table = 'reaper_results' if args.goldenset: table = 'reaper_goldenset' _run = run.Run(args.repositories_root, attributes, database, config['options']['threshold'], args.num_processes) _run.run([int(line) for line in args.repositories_sample], table) except Exception as e: extype, exvalue, extrace = sys.exc_info() traceback.print_exception(extype, exvalue, extrace)
def insert(cls, message, match): if not message: print("No message to write...") return None data = { "message_id": message.id or -1, "text": message.text, "room": message.room, "user_id": message.user.id, "match": "t" if match else "f", } table_name = cls.TABLE_NAME keys = ", ".join(data.keys()) values = ", ".join(["%s"] * len(data)) conn = None cursor = None query = f"INSERT INTO {table_name} ({keys}) VALUES ({values})" try: conn = Database.connect() cursor = conn.cursor() cursor.execute(query, list(data.values())) except Exception as e: # TODO: Change print to logger print(e) return None finally: if cursor: cursor.close() if conn: conn.commit() conn.close()
def test_run_timeout(self): with tempfile.TemporaryDirectory() as directory: # Arrange project_id = 10868464 repository_path = directory rawattributes = copy.deepcopy(self.rawattributes) for attribute in rawattributes: if 'architecture' in attribute['name']: attribute['options']['timeout'] = '1S' # Sabotage expected = (0, {'architecture': None}) # Act attributes = Attributes( rawattributes, database=Database(self.rawsettings), keystring='a', goptions=self.rawgoptions ) try: attributes.database.connect() actual = attributes.run(project_id, repository_path) # Assert self.assertEqual(expected, actual) finally: attributes.database.disconnect()
def execute_worker(args): try: url, country = args print("[migrate][debug] URL: %s" % url.encode("utf8")) get_by_url = APIFactory.get_api(APIFactory.GET_BY_URL) res = get_by_url.execute(url=url) if country is None: raise CannotFindBlog("Country is null") document = { "id" : res["id"], "name" : res["name"], "description" : res["description"], "created" : arrow.get(res["published"]).datetime, "updated" : arrow.get(res["updated"]).datetime, "url" : res["url"], "domain" : NetworkTools.get_domain(res["url"], with_scheme=False), "country" : country, "_insert_time" : arrow.utcnow().datetime, "is_active" : True } db = Database.get_db() db.blog_list.insert_one(document) except CannotFindBlog as ex: print(fmtstr("[migrate][debug] %s" % ex,"red")) except pymongo.errors.DuplicateKeyError: print(fmtstr("[migrate][debug] Blog has already exists.","red"))
def recipes(): new_recipe = add_recipe_form() if new_recipe.validate_on_submit(): Database().add_recipe(new_recipe.data['name']) redirect('/recipes') delete = delete_form() recipes = Database().get_all_recipes() return render_template('recipe_list.html', title='Recipes', recipes=recipes, new_recipe=new_recipe, delete=delete)
def index(request): if request.method == "POST": dn_ip = request.POST.get("dn_ip") date = datetime.date.today() port = force_int(request.POST.get("port")) source = request.POST.get("source") note = source_parse(request.POST.get("note")) db = Database() msg = "" if dn_ip and source: if check_ip(dn_ip): if not port and note: msg = db.insert_malicious_ip_record(ip=dn_ip, date=date, source=source, note=note) elif port and not note: msg = db.insert_malicious_ip_record(ip=dn_ip, date=date, source=source, port=port) else: msg = db.insert_malicious_ip_record(ip=dn_ip, date=date, source=source) else: if not port and note: msg = db.insert_malicious_dn_record(dn=dn_ip, date=date, source=source, note=note) elif port and not note: msg = db.insert_malicious_dn_record(dn=dn_ip, date=date, source=source, port=port) else: msg = db.insert_malicious_dn_record(dn=dn_ip, date=date, source=source) else: return render_to_response("add/index.html", context_instance=RequestContext(request)) else: return render_to_response("add/index.html", context_instance=RequestContext(request)) if msg: return render_to_response("error.html", {"error": str(msg)}, context_instance=RequestContext(request)) else: return render_to_response( "sucess.html", {"message": "Add {} to DB sucessfully".format(dn_ip)}, context_instance=RequestContext(request))
class Importer(Initiator): def __init__(self, qpath="data/src", wpath="data", cnx_string=database): super(Importer, self).__init__(qpath, wpath) self.cnx_string = cnx_string self.files = (f for f in listdir(join(getcwd(), wpath)) if isfile(join(getcwd(), f)) and splitext(f)[1] == ".bindata") try: self.database = Database(cnx_string, reset=True) except Exception as e: print(e) print("Unable to connect to database `{}`".format(self.cnx_string)) sys.exit(1) def run(self): qids = {} for q in self.questions: try: qids[q] = self.database.addQuestion(q).id except Exception as e: print(e) print("Unable to persist question `{}` to database".format(q)) sys.exit(1) oids = {} for o in self.objects: try: oids[o] = self.database.addMystery(o).id except Exception as e: print(e) print("Unable to persist object `{}` to database".format(o)) sys.exit(1) for f in self.files: name = splitext(f)[0] word = Word(name, wpath) oid = oids[name] for q in word.questions: if q in self.questions: answer = word.data["answers"][question] qid = qids[question] try: self.database.addAnswer(qid, oid, answer) except Exception as e: print(e) print("Unable to persist answer to database : ") print("[{}] {} : {}".format(name, question, answer)) sys.exit(1)
def createFromNodeJs(self, data): if not self._validateNodeJsData(data): return False self._populateFromNodeJs(data) Debugger.log("Storing message data to Database...", 'debug') db = Database() # Storing main table query = "INSERT INTO messages (`group`, message, file, line, memory, level, datetime, context, extra, request) " \ "VALUES (?,?,?,?,?,?,?,?,?,?)" bind = ( self._group, self._message, self._file, self._line, self._memory, self._level, self._datetime, JSON.encode(self._context), JSON.encode(self._extra), self._request) db.execute(query, bind) self._id = db.last_inserted_id return True
class Editorial(object): def __init__(self): try: self.db = Database( None) #reading all connection data from config.ini logging.debug('Database connected...') except: #if DB server is not accessible should wrap up an close connection here logging.debug( 'Connection to DB failed, try running admin first...') self.form = cgi.FieldStorage() self.user = EditorialUser(self.form, self.db) self.content = EditorialContent(self.form, self.user, self.db) def getPage(self): contenterror = '' pwerror = '' if self.user.verifyId( ): #check of username succeeded: prepare content page if 'title' in self.form.keys( ): #if we are calling the cgi from the content page contenterror = self.content.ecSubmit( ) #add to content table in DB # and get message for pageupdate self.content.updateNewsfile( HTML_CONTENT_PATH) #update news.html if 'newpass' in self.form.keys( ): #if we are calling the cgi from the content page AND have checked the changePW box pwerror = self.user.updatePass( self.form) #update PW in persons table in DB self.db.closeDb() #DB server close logging.debug('Branch1: Database connection closed...') logging.debug('pwerror: {}'.format(pwerror)) logging.debug('contenterror: {}'.format(contenterror)) print( HTMLTEMPLATE1.format( self.user.name, self.user.pw, #hidden form data self.content.text, #text will be carried over contenterror, pwerror)) else: #bad username: prepare another login prompt self.db.closeDb() logging.debug('Branch2: Database connection closed...') print(HTMLTEMPLATE2)
class Authentication(): def __init__(self, db_name='db.json'): self.db = Database(name=db_name) def _gen_salt(self, length=6, chars=string.ascii_letters): return ''.join([random.choice(chars) for _ in range(length)]) def _gen_password(self): return '12345' def create_user(self, kind, username, **kwargs): # Create client in the database if kind == 'client': user_id = self.db.new_client(**kwargs) elif kind == 'employee': user_id = self.db.new_employee(**kwargs) else: raise Exception('Invalid user.') # Create new user in the database salt = self._gen_salt() m = hashlib.sha256() m.update(self._gen_password().encode('utf-8')) m.update(salt.encode('utf-8')) self.db.new_user(username, m.hexdigest(), salt, user_id) return user_id # Client needs to know the password def login(self, username, password): # retrieve the data from db info = self.db.get_login_data(username) if not info: # wrong username return False # generate the password m = hashlib.sha256() m.update(password.encode('utf-8')) m.update(info['salt'].encode('utf-8')) return info['password'] == m.hexdigest()
def find(where=None): db = Database() query = "SELECT * FROM messages" bind = [] if where is not None: whereSql = " WHERE 1 " for key in where: whereSql += "AND " + key + " = ?" bind.append(where[key]) query += whereSql results = db.execute(query, bind).fetchAll() messages = [] for data in results: message = Message() message._populateFromDb(data) messages.append(message) return messages
def index(request): if request.method == "POST": dn_ip = request.POST.get("dn_ip") date = datetime.date.today() port = force_int(request.POST.get("port")) source = request.POST.get("source") note = source_parse(request.POST.get("note")) db = Database() msg = "" if dn_ip and source: if check_ip(dn_ip): if not port and note: msg = db.insert_malicious_ip_record(ip=dn_ip, date=date, source=source, note=note) elif port and not note: msg = db.insert_malicious_ip_record(ip=dn_ip, date=date, source=source, port=port) else: msg = db.insert_malicious_ip_record(ip=dn_ip, date=date, source=source) else: if not port and note: msg = db.insert_malicious_dn_record(dn=dn_ip, date=date, source=source, note=note) elif port and not note: msg = db.insert_malicious_dn_record(dn=dn_ip, date=date, source=source, port=port) else: msg = db.insert_malicious_dn_record(dn=dn_ip, date=date, source=source) else: return render_to_response("add/index.html", context_instance=RequestContext(request)) else: return render_to_response("add/index.html", context_instance=RequestContext(request)) if msg: return render_to_response("error.html", {"error": str(msg)}, context_instance=RequestContext(request)) else: return render_to_response("sucess.html", {"message": "Add {} to DB sucessfully".format( dn_ip)}, context_instance=RequestContext(request))
def __init__(self, config): self._config = config self.telegram = telebot.TeleBot(config.get('bot', 'key'), skip_pending=True) self.bot_user = self.telegram.get_me() telebot.logger.setLevel(logging.WARNING) self._logger = logging.getLogger('pyper') self.database = Database('data/pyper.json') self.admins = [] self.__init_config() self.commands = {} self.__init_commands() self.__init_handlers()
def __init__(self, qpath="data/src", wpath="data", cnx_string=database): super(Importer, self).__init__(qpath, wpath) self.cnx_string = cnx_string self.files = (f for f in listdir(join(getcwd(), wpath)) if isfile(join(getcwd(), f)) and splitext(f)[1] == ".bindata") try: self.database = Database(cnx_string, reset=True) except Exception as e: print(e) print("Unable to connect to database `{}`".format(self.cnx_string)) sys.exit(1)
def index(request): db = Database() query_result = [] if request.method == "POST": query = request.POST.get("query").strip() start_date = request.POST.get("Start_Date") end_date = request.POST.get("End_Date") if not start_date: start_date = '2016-01-01' if not end_date: end_date = datetime.date.today() if query: if check_ip(query): query_result = db.get_pdns_record(ip=query, start=start_date, end=end_date) record = db.get_malicious_ip_record(query) dn_ip = "ip" else: query_result = db.get_pdns_record(dn=query, start=start_date, end=end_date) record = db.get_malicious_dn_record(query) dn_ip = "domain" return render_to_response("search/result.html", {"results": query_result, "record": record, "type": dn_ip}, context_instance=RequestContext(request)) else: form = ToDoForm(request.POST) return render_to_response("search/index.html", {'form': form}, context_instance=RequestContext(request)) else: form = ToDoForm() return render_to_response("search/index.html", {'form': form}, context_instance=RequestContext(request))
def delete(index): if len(Request._RECORDS) == 0: return request = Request._RECORDS[index] db = Database() settings = Settings() query = "DELETE FROM requests WHERE id = ?" db.execute(query, [request.getId()]) query = "DELETE FROM messages WHERE request = ?" db.execute(query, [request.getId()]) del Request._RECORDS[index] # Restore 1 item from database is possible query = "SELECT * FROM requests ORDER BY id DESC LIMIT ?, 1" results = db.execute(query, [settings.requests_limit - 1]).fetchAll() if results: req = Request() req._populateFromDb(results.pop()) Request._RECORDS.append(req)
def all(force=False): """ Get all requests """ if Request._RECORDS is None or force: if Request._RECORDS is None: Request._RECORDS = [] else: del Request._RECORDS[:] db = Database() settings = Settings() query = "SELECT * FROM requests ORDER BY id DESC LIMIT ?" results = db.execute(query, [settings.requests_limit]).fetchAll() for data in results: req = Request() req._populateFromDb(data) Request._RECORDS.append(req) return Request._RECORDS
def __init__(self, sample, rsrc=None, reset=True, **mysql_config): Database.__init__(self, **mysql_config) SussPA300.__init__(self, rsrc, reset=reset) self.sample = sample query = 'SELECT mask, dX, dY, Xmin, Xmax, Ymin, Ymax FROM v02_sample ' \ 'WHERE sample=%s' self.mask, self.dX, self.dY, \ self.X_min, self.X_max, self.Y_min, self.Y_max = \ self.q_row_abs(query, (self.sample,)) dat = self.q_all_abs('SELECT mesa_id, mesa FROM mesa ' 'WHERE mask=%s', (self.mask,)) self.dic_id_mesa = {mesa_id: mesa for mesa_id, mesa in dat} dat = self.q_all_abs('SELECT mesa_id, xm_probe, ym_probe FROM mesa ' 'WHERE mask=%s', (self.mask,)) self.dic_mid_xypr_default = {mesa: xm_ym for mesa, *xm_ym in dat} dat = self.q_all('SELECT mesa_id, xm_probe, ym_probe FROM suss_xm_ym ' 'WHERE sample=%s', (self.sample,)) self.dic_mid_xypr_specified = {mesa: xp_yp for mesa, *xp_yp in dat}
def index(request): db = Database() report = dict( total_dn=db.count_dn(), total_alive_dn=db.count_alive_dn(), last_dn=db.last_added_dn(), top_mapping_ip=db.top_mapping_ip(), top10_country_amount=db.top10_country_amount() ) return render_to_response("dashboard/index.html", {"report": report}, context_instance=RequestContext(request))
def load_file(self, filename=None): if filename is None: filename = self.filename if not os.path.exists(filename): error("file {self.filename} doesn't exist".format(self=self)) if self.interactive_mode: return False die() if not os.path.isfile(filename): error("this is not a file".format(self=self)) if self.interactive_mode: return False die() self.db = Database() self.db.load(filename) try: dis = Disassembler(filename, self.raw_type, self.raw_base, self.raw_big_endian, self.db) except ExcArch as e: error("arch %s is not supported" % e.arch) if self.interactive_mode: return False die() except ExcFileFormat: error("the file is not PE or ELF binary") if self.interactive_mode: return False die() except ExcPEFail as e: error(str(e.e)) error("it seems that there is a random bug in pefile, you shoul retry.") error("please report here https://github.com/joelpx/reverse/issues/16") if self.interactive_mode: return False die() self.dis = dis self.libarch = dis.load_arch_module() return True
def server_loop(): global bind_ip global bind_port global threads db = Database() db.connect() #db.init() db.commit() s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind((bind_ip, bind_port)) s.listen(20) while True: connection, address = s.accept() t = threading.Thread(target=handle_client, args=(connection, db)) threads.append(t) t.setDaemon(True) t.start() db.disconnect() return
def __init__(self, malware_path, vt_key): self.malware_path = malware_path self.db = Database(malware_path) Rules.db = self.db self.string_list = None self.vt = Vt(vt_key, self.malware_path) self.ipAddr = IpAddr() self.url = Url() self.cmd = Cmd() self.id = Id() self.path = Path() self.section = Section() self.symbol = Symbol(self.malware_path) self.formatStr = FormatStr() self.msg = Msg() self.undef = Undefined() self.err_bad_path = "[Error] invalide malware file path." self.err_string_recovery = "[Error] string recovery failed." self.err_empty_bin = "[Error] no strings found."
def setUp(self): parentpath = ( os.path.abspath( os.path.join( os.path.dirname(os.path.realpath(__file__)), os.pardir ) ) ) manifestpath = os.path.join(parentpath, 'manifest.json') configpath = os.path.join(parentpath, 'config.json') rawsettings = None with open(configpath, 'r') as file_: rawsettings = json.load(file_) self.database = Database(rawsettings['options']['datasource']) rawmanifest = None with open(manifestpath, 'r') as file_: rawmanifest = json.load(file_) self.attributes = Attributes(rawmanifest['attributes'], self.database) self.threshold = rawsettings['options']['threshold'] self.processes = 2
class RunTestCase(unittest.TestCase): def setUp(self): parentpath = ( os.path.abspath( os.path.join( os.path.dirname(os.path.realpath(__file__)), os.pardir ) ) ) manifestpath = os.path.join(parentpath, 'manifest.json') configpath = os.path.join(parentpath, 'config.json') rawsettings = None with open(configpath, 'r') as file_: rawsettings = json.load(file_) self.database = Database(rawsettings['options']['datasource']) rawmanifest = None with open(manifestpath, 'r') as file_: rawmanifest = json.load(file_) self.attributes = Attributes(rawmanifest['attributes'], self.database) self.threshold = rawsettings['options']['threshold'] self.processes = 2 def test_init(self): with tempfile.TemporaryDirectory() as directory: try: # Act run = Run( directory, self.attributes, self.database, self.threshold, self.processes ) # Assert self.assertIsNotNone(run.run_id) finally: self.database.post( 'DELETE FROM reaper_runs WHERE id = {0}'.format(run.run_id) ) self.database.disconnect() def test_save(self): with tempfile.TemporaryDirectory() as directory: # Arrange rresults = { 'architecture': 9.9, 'continuous_integration': True, 'community': 9, 'documentation': 9.9, 'history': 9.9, 'license': True, 'management': 9.9, 'unit_test': 9.9, 'state': 'active' } run = Run( directory, self.attributes, self.database, self.threshold, self.processes ) # Act run._save(10868464, 99.99, rresults) # Assert try: self.database.connect() actual = self.database.get( ''' SELECT project_id, architecture, continuous_integration, community, documentation, history, license, management, unit_test, state, score FROM reaper_results WHERE run_id = {0} '''.format(run.run_id) ) self.assertEqual(10868464, actual[0]) self.assertEqual(9.9, actual[1]) self.assertEqual(True, actual[2]) self.assertEqual(9, actual[3]) self.assertEqual(9.9, actual[4]) self.assertEqual(9.9, actual[5]) self.assertEqual(True, actual[6]) self.assertEqual(9.9, actual[7]) self.assertEqual(9.9, actual[8]) self.assertEqual('active', actual[9]) self.assertEqual(99.989998, actual[10]) finally: self.database.post( 'DELETE FROM reaper_runs WHERE id = {0}'.format(run.run_id) ) self.database.disconnect()
def __init__(self): if len(Settings._SETTINGS) == 0: db = Database() query = "SELECT * FROM settings"; data = db.execute(query).fetchAll() self._makeSettingsDictionary(data)
class Bot(object): def __init__(self, config): self._config = config self.telegram = telebot.TeleBot(config.get('bot', 'key'), skip_pending=True) self.bot_user = self.telegram.get_me() telebot.logger.setLevel(logging.WARNING) self._logger = logging.getLogger('pyper') self.database = Database('data/pyper.json') self.admins = [] self.__init_config() self.commands = {} self.__init_commands() self.__init_handlers() def __init_config(self): self._logger.info('Loading config.') try: if self._config.has_option('bot', 'admins'): self.admins = json.loads(self._config.get('bot', 'admins')) except ValueError as ex: self._logger.exception(ex) self._logger.info('Bot admin IDs: %s', self.admins) def __init_commands(self): self._logger.info('Loading commands.') importdir.do('commands', globals()) if self._config.has_option('bot', 'extra_commands_dir'): extra_commands_dir = self._config.get('bot', 'extra_commands_dir') if extra_commands_dir and os.path.exists(extra_commands_dir): self._logger.info('Added %s to command load path.', extra_commands_dir) importdir.do(extra_commands_dir, globals()) disabled_commands = [] try: if self._config.has_option('bot', 'disabled_commands'): disabled_commands = json.loads(self._config.get('bot', 'disabled_commands')) except ValueError as ex: self._logger.exception(ex) for command in Command.__subclasses__(): if command.name not in disabled_commands: self.__enable_command(command) else: del command self._logger.info('Enabled commands: %s.', list(self.commands.keys())) if disabled_commands: self._logger.info('Disabled commands: %s.', disabled_commands) def __init_handlers(self): @self.telegram.message_handler(func=lambda m: m.text and m.from_user and m.text.startswith('/'), content_types=['text']) def handle_command(m): self.database.process_message(m) self.__handle_command(m) @self.telegram.message_handler(func=lambda m: True) def handle_message(m): self._logger.debug('Update: %s', m) self.database.process_message(m) def __parse_command(self, message): message_text = message.text.lstrip('/ \n\r') if not message_text: raise CommandParseException('Could not parse message into command: {0}'.format(message_text)) try: command_split = shlex.split(message_text) except ValueError: command_split = message_text.split() command, __, bot_name = command_split.pop(0).partition('@') command = command.lower() if bot_name and bot_name != self.bot_user.username and message.chat.type != 'private': raise CommandParseException('Command not intended for this bot: {0}'.format(message_text)) args = list(filter(bool, command_split)) return command, args def __handle_command(self, message): user = message.from_user if self.database.get_user_value(message.from_user, 'ignored'): self._logger.debug('Ignoring message because user %s is ignored.', user_to_string(user)) return try: command_trigger, args = self.__parse_command(message) except CommandParseException: return for command in self.commands: command = self.commands[command] if command_trigger == command.name or command_trigger in command.aliases: log_msg = 'Command \'{0}\' with args {1} invoked by user {2}'.format(command.name, args, user_to_string(user)) if message.chat.type != 'private': log_msg += ' from chat {0}'.format(chat_to_string(message.chat)) if command.authorized(user): self._logger.info(log_msg) if self._logger.getEffectiveLevel() == logging.DEBUG: t = timeit.Timer(lambda: command.run(message, args), 'gc.enable()') self._logger.debug('Command \'{0}\' finished in {1:.0f} ms'.format( command.name, t.timeit(number=1) * 1000)) else: command.run(message, args) else: log_msg += ', but access was denied.' self._logger.info(log_msg) command.reply(message, 'You are not authorized to use that command!') def poll(self): try: self._logger.info('Started polling as @%s', self.bot_user.username) self.telegram.polling(none_stop=True, timeout=5) except requests.exceptions.RequestException as ex: self._logger.exception(ex) self._logger.warn('Restarting polling due to RequestException.') self.telegram.stop_polling() self.poll() def __enable_command(self, command): if command not in self.commands.values(): config = dict(self._config.items(command.name)) if self._config.has_section(command.name) else None command = command(self, config) self.commands[command.name] = command def ignore(self, user): self._logger.info('Ignored user %s', user_to_string(user)) self.database.set_user_value(user, 'ignored', True) def unignore(self, user): self._logger.info('Unignored user %s', user_to_string(user)) self.database.set_user_value(user, 'ignored', False) def is_me(self, user): return user.id == self.bot_user.id def is_admin(self, user): return user.id in self.admins
class TicketSystem: def __init__(self): self.tdb = Database(column_family="Thông tin vé") self.index = Database(column_family="Tìm kiếm") self.barcode = Encoder() self.secure = Secure() def add(self, ticket_info): """ Thêm vé vào danh sách vé chưa bán ticket_info là một từ điển tương tự như sau: ticket_info = {"Từ": "Hà Nội", "Đến": "Thanh Hóa", "Ngày": "26/5/2010", "Giờ xuất phát": "3:30"} Trả về: - ticket_id nếu thành công - None nếu không thành công """ try: ticket_id = md5(str(ticket_info)).hexdigest() self.tdb.insert(ticket_id, ticket_info) self.index.insert("Vé chưa bán", {ticket_id: str(time())}) for key in ticket_info.keys(): self.index.insert(key, {ticket_id: str(time())}) self.index.insert("%s: %s" % (key, ticket_info[key]), {ticket_id: str(time())}) return ticket_id except KeyError: return None def buy(self, ticket_id, user_id=None): """ Trả về giá trị thời gian khi thực hiện xong Nếu gặp lỗi trả về None """ try: self.tdb.insert(ticket_id, {"Người mua": str(user_id)}) self.index.insert("Vé đã bán", {ticket_id: str(time())}) self.index.remove("Vé chưa bán", ticket_id) ticket_info = self.tdb.get(ticket_id) for key in ticket_info.keys(): self.index.remove(key, ticket_id) self.index.remove("%s: %s" % (key, ticket_info[key]), ticket_id) ticket_id = self.secure.aes_encode(ticket_id, AES_SECRET_FILE) barcode_image = self.barcode.qrcode(ticket_id) self.tdb.insert(ticket_id, {"Barcode Image": barcode_image}) # TODO: write log return barcode_image except KeyError: return None def get_info(self, ticket_id, key=None): try: if key: return self.tdb.get(ticket_id)[key] return self.tdb.get(ticket_id) except KeyError: return None def available_list(self): return self.index.get("Vé chưa bán").keys() def check(self, encrypted_ticket_id): try: ticket_id = self.secure.aes_decode(encrypted_ticket_id, AES_SECRET_FILE) self.tdb.get(ticket_id) return 1 except (NotFoundException, ValueError): return 0 def resend(self, ticket_id): """ Gửi lại vé cho người chủ thực sự nếu có tranh chấp xảy ra """ pass def search(self, query): """ query là một chuỗi gồm tên trường và giá trị query = "Từ: Hà Nội" kết quả trả về sẽ là các vé chưa bán thỏa mãn điều kiện """ try: match_ids = self.index.get(query).keys() results = [] for id in match_ids: result = self.tdb.get(id) results.append(result) return results except NotFoundException: return [] def notification(self): pass
#! /usr/bin/env python #! coding: utf-8 # pylint: disable-msg=W0311 from lib.database import Database database = Database(column_family="Tìm kiếm") ticket = database.get(database.get("Từ: Hà Nội").keys()[0]) for i in ticket.keys(): print i, ticket[i]
enable_force_update = (__addon__.getSetting("enable_force_update") == 'true') enable_search_folders = (__addon__.getSetting("enable_search_folders") == 'true') enable_artwork_folder = (__addon__.getSetting("enable_artwork_folder") == 'true') artwork_folder_path = xbmc.translatePath( __addon__.getSetting( "artwork_folder_path" ) ).decode('utf-8') include_set_prefix = (__addon__.getSetting("include_set_prefix") == 'true') recurse_artwork_folder = (__addon__.getSetting("recurse_artwork_folder") == 'true') setting_thumb_filenames = [ name.strip() for name in __addon__.getSetting( "filename_thumb" ).split(',') ] setting_poster_filenames = [ name.strip() for name in __addon__.getSetting( "filename_poster" ).split(',') ] setting_fanart_filenames = [ name.strip() for name in __addon__.getSetting( "filename_fanart" ).split(',') ] setting_logo_filenames = [ name.strip() for name in __addon__.getSetting( "filename_logo" ).split(',') ] setting_clearart_filenames = [ name.strip() for name in __addon__.getSetting( "filename_clearart" ).split(',') ] setting_banner_filenames = [ name.strip() for name in __addon__.getSetting( "filename_banner" ).split(',') ] setting_extrafanart1_filenames = [ name.strip() for name in __addon__.getSetting( "filename_extrafanart1" ).split(',') ] DB = Database() TBN = Thumbnails() cached_file_maps = {} def file_exists( file_path ): exists = vfs_exists( file_path ) # if not exists: # log( "File doesn't exist: %s" % (file_path) ) return exists def join_path ( base, name ): # Check if base folder is a network path, if so always use '/' separator if re.search("^.*://.*", base): return "%s/%s" % (base, name) else: return os.path.join(base, name)
from lib.database import Database from lib.termcolor import Termcolor # Argument parsing parser = ArgumentParser(description='A python3 script to block publicity') parser.add_argument('-o', dest='filename', help='output file') group = parser.add_mutually_exclusive_group() group.add_argument('-a', action='store_true', help='apply blocking') group.add_argument('-d', action='store_true', help='deactivate blocking') group.add_argument('-u', action='store_true', help='update database') args = parser.parse_args() config = Config() config.write() database = Database() if args.d: database.export(None, True) sys.exit(0) args.u = not database.create() if not args.a or args.u: database.populate() if not args.u or args.a or not sys.argv[1:]: database.export(args.filename)
def __init__(self): self.tdb = Database(column_family="Thông tin vé") self.index = Database(column_family="Tìm kiếm") self.barcode = Encoder() self.secure = Secure()
class Controller(): def __init__(self, db_name='db.json'): self.db = Database(name=db_name) self.a = Authentication(db_name=db_name) def login(self, username, password): return self.a.login(username, password) def create_client(self, username, **kwargs): return self.a.create_user('client', username, **kwargs) def create_client_req(self, **kwargs): event_id = self.db.new_event(**kwargs) clients_events = self.db.get_client('id',kwargs['client_id'], all_data=False)[0]['events'] clients_events.append(event_id) self.db.update_client_events(kwargs['client_id'], clients_events) return event_id def create_employee(self, username, **kwargs): return self.a.create_user('employee', username, **kwargs) def update_event(self, **kwargs): self.db.update_event(kwargs) def create_task(self, **kwargs): return self.db.new_task(**kwargs) def update_task(self, **kwargs): self.db.update_task(kwargs) def create_financial_req(self, **kwargs): return self.db.new_financial_req(**kwargs) def create_recruitment_req(self, **kwargs): return self.db.new_recruitment_req(**kwargs) def update_recruitment_req(self, **kwargs): return self.db.update_recruitment_req(kwargs) def update_financial_req(self, **kwargs): self.db.update_financial_req(kwargs) def get_user_id(self, username): return self.db.get_login_data(username)['user_id'] def update_client_events(self, cl_id, events): self.db.update_client_events(cl_id, events) def get_client(self, col_name='', criteria='', all_data=True): return self._get_data('client', col_name, criteria, all_data) def get_employee(self, col_name='', criteria='', all_data=True): return self._get_data('employee', col_name, criteria, all_data) def get_task(self, col_name='', criteria='', all_data=True): return self._get_data('task', col_name, criteria, all_data) def get_event(self, col_name='', criteria='', all_data=True): return self._get_data('event', col_name, criteria, all_data) def get_financial_req(self, col_name='', criteria='', all_data=True): return self._get_data('financial_req', col_name, criteria, all_data) def get_recruitment_req(self, col_name='', criteria='', all_data=True): return self._get_data('recruitment_req', col_name, criteria, all_data) def _get_data(self, table, col_name, criteria, all_data): return getattr(self.db, "get_%s" % table)(col_name, criteria, all_data)
def markAsRead(self): db = Database() query = "UPDATE requests SET read = 1 WHERE id = ?" bind = (self._id,) db.execute(query, bind) self._read = True
def __init__(self, db_name='db.json'): self.db = Database(name=db_name) self.a = Authentication(db_name=db_name)
conf = ConfigParser() conf.read_file(open('config.ini')) # configuration mysql_host = conf.get('mysql', 'host') mysql_port = conf.getint('mysql', 'port') mysql_user = conf.get('mysql', 'user') mysql_password = conf.get('mysql', 'password') mysql_db = conf.get('mysql', 'schema') qodbc_dsn = conf.get('qodbc', 'dsn') # set defaults Entity.company_file = conf.get('company', 'file_number') if(conf.get('company', 'refresh_from')): Entity.last_entry_datetime = conf.get('company', 'refresh_from') rackspace = Database(pymysql.connect(host=mysql_host, port=mysql_port, user=mysql_user, passwd=mysql_password, db=mysql_db, use_unicode=True, charset="utf8")) quickbooks = Database(pypyodbc.connect('DSN='+qodbc_dsn, autocommit=True)) print('connected') SalesReceipt(quickbooks,rackspace).sync() SalesReceiptItem(quickbooks,rackspace).sync() Inventory(quickbooks,rackspace).sync() rackspace.disconnect() quickbooks.disconnect() print('disconnected')