def check_connection(self, tbl, pw): con = Connection(table=tbl, password=pw) con.open() if con.status is not None: self.error = con.status return False con.close() return True
def clear(agent_id=None, all_agents=False): """ Clears the database. :param agent_id: For an agent. :param all_agents: For all agents. :return: Message. """ # Clear DB conn = Connection(common.database_path) regex = re.compile(r'^\d{,3}-\S+$') db_agents_list = [] if not int(all_agents): raw_str = r'^' + "{}".format(int(agent_id)).zfill(3) + r'-\S+$' regex = re.compile(raw_str) for db_agent in conn.getDbsName(): if (regex.search(db_agent) != None): db_agents_list.append(db_agent) if (db_agents_list.count() <= 0): raise OssecAPIException(1600) for db_agent in db_agents_list: conn.connect(db_agent) if conn.getDb() != None: doc = conn.getDb()['pm_event'] if doc != None: doc.drop() conn.vacuum() doc = conn.getDb()['pmCounterInfo'] if doc != None: doc.drop() conn.vacuum() # Clear OSSEC info if int(all_agents): rootcheck_files = glob('{0}/queue/rootcheck/*'.format( common.ossec_path)) else: if agent_id == "000": rootcheck_files = [ '{0}/queue/rootcheck/rootcheck'.format(common.ossec_path) ] else: agent_info = Agent(agent_id).get_basic_information() rootcheck_files = glob( '{0}/queue/rootcheck/({1}) {2}->rootcheck'.format( common.ossec_path, agent_info['name'], agent_info['ip'])) for rootcheck_file in rootcheck_files: if path.exists(rootcheck_file): remove(rootcheck_file) return "Rootcheck database deleted"
def print_db(agent_id=None, status='all', pci=None, offset=0, limit=common.database_limit, sort=None, search=None): """ Returns a list of events from the database. :param agent_id: Agent ID. :param status: Filters by status: outstanding, solved, all. :param pci: Filters by PCI DSS requirement. :param cis: Filters by CIS. :param offset: First item to return. :param limit: Maximum number of items to return. :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}. :param search: Looks for items with the specified string. :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ # Connection db_url = common.database_path conn = Connection(db_url) conn.connect(conn.getDbById(str(agent_id).zfill(3))) if (conn.getDb() == None): raise OssecAPIException(1600) request = {"$and": []} lastRootcheckEndTime = None lastRootcheckEndTimeObj = list((conn.getDb()['pm_event'].find({ "log": 'Ending rootcheck scan.' }).sort([('date_last', -1)]).limit(1)))[0] if (lastRootcheckEndTimeObj != None): lastRootcheckEndTime = lastRootcheckEndTimeObj.get( 'date_last', datetime.now()) fields = { 'status': 'status', 'event': 'log', 'oldDay': 'date_first', 'readDay': 'date_last' } request['$and'].append({ 'log': { '$nin': [ 'Starting rootcheck scan.', 'Ending rootcheck scan.', 'Starting syscheck scan.', 'Ending syscheck scan.' ] } }) if status == 'outstanding': if lastRootcheckEndTime != None: request['$and'].append({ 'date_last': { '$gt': (lastRootcheckEndTime - timedelta(seconds=86400)) } }) elif status == 'solved': if lastRootcheckEndTime != None: request['$and'].append({ 'date_last': { '$lte': (lastRootcheckEndTime - timedelta(seconds=86400)) } }) if pci: request["$and"].append({"pci_dss": pci}) # search if search: regex = re.compile(".*{0}.*".format(int(search['value']) if search['value'].isdigit() \ else search['value']), re.IGNORECASE) search_con = {"$or": []} for x in fields.values(): search_con["$or"].append({x: regex}) if bool(search['negation']): if search_con["$or"]: request["$and"].append({"$not": search_con}) else: if search_con["$or"]: request["$and"].append(search_con) # Sorting sort_con = [] if sort: if sort['fields']: allowed_sort_fields = set(fields.keys()) # Check if every element in sort['fields'] is in allowed_sort_fields if not set(sort['fields']).issubset(allowed_sort_fields): uncorrect_fields = list( map(lambda x: str(x), set(sort['fields']) - set(allowed_sort_fields))) raise OssecAPIException( 1403, 'Allowed sort fields: {0}. Fields: {1}'.format( allowed_sort_fields, uncorrect_fields)) for i in sort['fields']: str_order = 1 if sort['order'] == 'asc' else -1 sort_con.append((Agent.fields[i], str_order)) else: sort_con.append( (fields["readDay"], 1 if sort['order'] == 'asc' else -1)) else: sort_con.append((fields["readDay"], -1)) if limit: if limit > common.maximum_database_limit: raise OssecAPIException(1405, str(limit)) elif limit == 0: raise OssecAPIException(1406) select = ["status", "date_first", "date_last", "log", "pci_dss"] select_fields = {} for x in set(select): select_fields[x] = 1 if not request["$and"]: request = {} data = {} db_data = conn.getDb()['pm_event'].find(request, select_fields) data['totalItems'] = db_data.count() db_data = db_data.sort(sort_con).skip(offset).limit(limit) # process get data data['items'] = [] for pmEvent in db_data: pmEvent.pop('_id') if pmEvent.get("date_last") != None: if (pmEvent['date_last'] > lastRootcheckEndTime): pmEvent['status'] = 'outstanding' elif (pmEvent['date_last'] <= lastRootcheckEndTime): pmEvent['status'] = 'solved' if pmEvent.get("date_first") != None: pmEvent['date_first'] = (pmEvent.get("date_first") + timedelta(seconds=timeoffset)).__str__() else: pmEvent['date_first'] = pmEvent.get("date_first").__str__() if pmEvent.get("date_last") != None: pmEvent['date_last'] = (pmEvent.get("date_last") + timedelta(seconds=timeoffset)).__str__() else: pmEvent['date_last'] = pmEvent.get("date_last").__str__() data['items'].append(pmEvent) return data
def process_item(self, item, spider): """For each item, check if uname exists. Write tweet to database """ db = Connection(host = HOST_NAME, database = MYSQL_DB_NAME, user = MYSQL_USER_NAME, password = MYSQL_PASSWORD) tweet = item["tweet"] user = db.get("SELECT id FROM users WHERE username=%s", item["username"]) twt = db.get("SELECT * FROM tweets WHERE tweet=%s", tweet) if not twt: if user: db.execute("INSERT into tweets (user, tweet) VALUES (%s, %s)", user["id"], tweet) else: db.execute("INSERT into users (username) VALUES (%s)", item["username"]) user = db.get("SELECT id FROM users WHERE username=%s", item["username"]) db.execute("INSERT into tweets (user, tweet) VALUES (%s, %s)", user["id"], tweet) db.close() return item
from get_info import grabber from send_email import Email from database import Connection import configparser ###user credential details block config = configparser.ConfigParser() config.read('config.cfg') MysqlHost = config.get('MOVIES', 'MYSQL_HOST') username = config.get('MOVIES', 'MYSQL_USER') password = config.get('MOVIES', 'MYSQL_PASSWORD') ##----------------------------------------------------------------------------- ##----------------------------------------------------------------------------- conn = Connection(username, password) conn.connect() conn.create_table() print("Enter Number of Users you want to Register", end=" ") n = int(input()) for i in range(n): x = input("Email address:") tv = input("TV series:") insert = True for ser in conn.select_value(x): if ser[0] == tv: insert = False if insert: conn.insert_value(x.lower(), tv.lower()) else: print("Database already has your information")
from ldif3 import LDIFParser from asn1crypto import crl, x509 import re #from database.storage.DSC import CertX509 from pymrtd.pki.crl import writeToDB, readFromDB from pymrtd.pki.crl import CertificateRevocationList from database import Connection conn = Connection("nejko", "nejko", "icao") certificateList = {} revocationList = {} parser = LDIFParser(open('C://Users/nejko/Desktop/ZeroPass/B1/random/parseCSCAandCRL/database/icaopkd-001-dsccrl-003749.ldif', 'rb')) for dn, entry in parser.parse(): if 'userCertificate;binary' in entry: countryCode = re.findall(r'[c,C]{1}=(.*)(,dc=database){1}', dn)[0][0] cert = x509.Certificate.load(*entry['userCertificate;binary']) if countryCode not in certificateList: certificateList[countryCode] = {} certificateList[countryCode][cert.serial_number] = cert if 'certificateRevocationList;binary' in entry: countryCode = re.findall(r'[c,C]{1}=(.*)(,dc=database){1}', dn)[0][0] ##revocationList[countryCode] = x509.load_der_x509_crl(*entry['certificateRevocationList;binary'], default_backend()) revocationList[countryCode] = crl.CertificateList.load(*entry['certificateRevocationList;binary']) revocationListInObject = revocationList[countryCode] #revocationListInObject1 = CertificateRevocationList(revocationListInObject)
def dblist(): cnxn = Connection(cfg.db_system, cfg.db_server, cfg.db_uid, cfg.db_pwd, cfg.db_name) return {'data': {'records': cnxn.get_databases()}}
def __init__(self): args = Argument.get() self.db = Connection(args.driver, args.server, args.database, args.userid, args.password)
def test_99teardown(): # connect and drop the test database with Connection(): util_drop_all()
# coding=UTF-8 # script create by Konstantyn Davidenko # mail: [email protected] from database import Connection from flask import render_template, redirect, session, url_for, request conn = Connection() class PrivateMess: def __init__(self, username, mess): from sqlalchemy import func self.time = func.now() self.author = username self.message = mess def home(): kw = {} if session.get('logged_in'): user = conn.get_user_by_name(session.get('username')) kw['mychats'] = conn.get_my_chats(user.id) kw['chats'] = conn.get_chats() kw['username'] = session.get('username') kw['title'] = 'Chats' return render_template('index.html', **kw) def login(user, password): user = conn.check_credention(login=user, password=password)
from database import Connection from connectdb import Connection from db_helper import * connection = Connection.session() print(all_users = connection.query(Mortalidade).all())
from flask import Flask, render_template, redirect, url_for, request, session, make_response, flash import hashlib import json import tweepy import re import csv import os app = Flask(__name__) app.secret_key = 'somerandomvalue' app.config['UPLOAD_FOLDER'] = 'user-content/' auth = tweepy.OAuthHandler(consumer_token, consumer_secret) auth.set_access_token(key, secret) twapi = tweepy.API(auth, wait_on_rate_limit=True) db = Connection(app, 'remote.thekrishna.in', 27017) # Push to Database Functions def db_push_commons(username, email, full_name, date_of_birth, address, state, city, pincode, crime_type, platform, post_content): """ Push Common Data for all platforms database """ complaint = { 'username': username, 'email': email, 'full_name': full_name, 'date_of_birth': date_of_birth, 'address': address, 'state': state,
import config # app.py from exceptions import InvalidLoginError, UsernameTakenError # Initialize flask from flask import Flask, render_template, url_for, session, redirect, request, escape app = Flask(__name__) app.debug = True app.secret_key = config.secret_key # yeah, i don't care. hack me # Establish connection to the database from database import Connection db = Connection(app) # Initialize chatlogger from chatlog import Logger logger = Logger('chat.log') # logger.message('global', 'admin', 'test') # Index, shows the chatrooms @app.route('/') def index(): if 'username' not in session: return redirect(url_for('login')) rooms = db.get_rooms() return render_template('rooms.html', username=session['username'],
def main(): print "Connecting" db = Connection() db.connect() print "Compressing thumbnails" db.execute("""SELECT id FROM thumbnail""") for (id,) in db.cursor.fetchall(): db.execute("""SELECT thumbnail FROM thumbnail WHERE id = %d""" % id) thumbnail, = db.cursor.fetchone() new_thumbnail = compress(id, thumbnail) if new_thumbnail: db.execute( """UPDATE thumbnail SET thumbnail = %(tn)s WHERE id = %(id)s""", {"tn": db.make_binary(new_thumbnail), "id": id}, ) db.commit() print "Compressed thumbnail %d from %d bytes to %d bytes" % (id, len(thumbnail), len(new_thumbnail))
"SELECT * FROM `" + tableName + "` WHERE `news_id`=%s limit 0,1", data['news_id']) if not dataGet: dataId = self.db.insert(tableName, **dict(data)) else: dataId = dataGet.id pass self.db.commit() return dataId def updateData(self, table_name, data): if 'id' in data.keys(): for key in data.keys(): affect = self.db.execute( 'update ' + table_name + ' set ' + key + '=%s where id=%s', unicode(data[key]), data['id']) self.db.commit() return affect else: return 0 if __name__ == "__main__": db = Connection(host='localhost', database='spider', user='******', password='******') spider = youtholSpider(db) spider.doFetchContentJob() #spider.fetchArticleList()
import uuid from celery import Celery from celery.utils.log import get_task_logger from fmail import Message from utils import signer, mail, DEFAULT_MAIL_SENDER from database import Connection import settings logger = get_task_logger(__name__) celery = Celery('tasks', broker='redis://localhost:6379/0') mysql = Connection(host=settings.MYSQL_HOST, database=settings.MYSQL_DATABASE, user=settings.MYSQL_USER, password=settings.MYSQL_PASSWORD, autocommit=False) @celery.task def exchange(): """docstring for exchange""" try: lowest_ask_order = mysql.get('select * from ask_order_view limit 1') highest_bid_order = mysql.get('select * from bid_order_view limit 1') # debug if settings.CELERY_DEBUG: logger.info( '++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++') logger.info(highest_bid_order)
log_message('Error Hospital %s ', hospital.url) yield hospital.id, hospital.province_id, hospital.city_id, hospital.city_uri, hospital.page_uri, hospital.url def isHospitalExisted(self, hospital_url): print hospital_url hospital = self.db.get( "SELECT * FROM `hospitals` WHERE url=%s limit 0,1", hospital_url) if hospital != None: log_message('Hospital existed %s', hospital_url) return True else: return False def isPageGot(self, page_uri): count = self.db.count( "SELECT count(*) FROM `hospitals` WHERE page_uri=%s", page_uri) if count == 10: log_message('Page pass %s', page_uri) return True else: return False if __name__ == "__main__": db = Connection(host='localhost', database='python_spider_test', user='******', password='******') spider = hospitalSpider(db) spider.fetchHospitalList()
# app.py # Initialize flask from flask import Flask, render_template, url_for, session, redirect, request from exceptions import InvalidLoginError app = Flask(__name__) app.debug = True app.secret_key = b'JPtUKpetQiyfzGpBS5SM' # yeah, i don't care. hack me # Establish connection to the database from database import Connection # See database.py file (line #13) db = Connection(app, 'db1;db2;db3', 27017) # Initialize chatlogger from chatlog import Logger logger = Logger('chat.log') logger.message('global', 'admin', 'test') # Index, shows the chatrooms @app.route('/') def index(): if 'username' not in session: return redirect(url_for('login')) rooms = db.get_rooms()
mysql_auth = { 'hostname': 'localhost', 'user': '******', 'password': '******', 'database': 'edu_raw', } # Creating tables if not exist create_tables(**mysql_auth) # Adding a book, fetching 'em all, truncate table dummy.run(**mysql_auth) # Working with models conn = Connection(**mysql_auth) with conn: group = Group(conn, 'СГН3-71') group.save() groups = group.objects.all() for i, g in enumerate(groups, 1): print('Group #{}: {}'.format(i, g)) print('> get :: Group #1: {}'.format(group.objects.get(id=1))) student = Student(conn, 'Михаил', 'Волынов', 1) student.save() students = student.objects.all() for i, s in enumerate(students, 1): print('Student #{}: {}'.format(i, s))
# app.py from exceptions import InvalidLoginError # Initialize flask from flask import Flask, render_template, url_for, session, redirect, request app = Flask(__name__) app.debug = True app.secret_key = b'JPtUKpetQiyfzGpBS5SM' # yeah, i don't care. hack me # Establish connection to the database from database import Connection db = Connection(app, 'db1,db2,db3', 27017) # Initialize chatlogger from chatlog import Logger logger = Logger('chat.log') # logger.message('global', 'admin', 'test') # Index, shows the chatrooms @app.route('/') def index(): if 'username' not in session: return redirect(url_for('login')) rooms = db.get_rooms()
import cv2 import os import glob from database import Connection ##===================Connection Establishment===================================================================== username, password = '******', 'password' conn = Connection(username, password) conn.connect() conn.create_table() name=input('Enter your name ') x = conn.insert_value(name) ids = conn.select_value(name) conn.close_connection() ##=====================Camera Starts for dataset creation========================================================== cam = cv2.VideoCapture(0) detector=cv2.CascadeClassifier('cascade/haarcascade_frontalface_default.xml') if(int(x) == 1): q = "dataset/"+str(ids)+"*.jpg" w = [] for file in glob.glob(q): w.append(int(file.strip().split('\\')[-1].split('.')[1])) i = sorted(w)[-1] print("i =",i) else: i=0 offset=50 os.makedirs('dataset', exist_ok=True)
def connect(self, connstr = None): self.db = Connection(connstr) self.db.connect() self.prepare_queries()
def files(agent_id=None, event=None, filename=None, filetype='file', md5=None, sha1=None, hash=None, summary=False, offset=0, limit=common.database_limit, sort=None, search=None): """ Return a list of files from the database that match the filters :param agent_id: Agent ID. :param event: Filters by event: added, readded, modified, deleted. :param filename: Filters by filename. :param filetype: Filters by filetype: file or registry. :param md5: Filters by md5 hash. :param sha1: Filters by sha1 hash. :param hash: Filters by md5 or sha1 hash. :param summary: Returns a summary grouping by filename. :param offset: First item to return. :param limit: Maximum number of items to return. :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}. :param search: Looks for items with the specified string. :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ # Connection db_url = common.database_path conn = Connection(db_url) conn.connect(conn.getDbById(str(agent_id).zfill(3))) if (conn.getDb() == None): raise OssecAPIException(1600) agent_info = Agent(agent_id).get_basic_information() if 'os' in agent_info: if 'windows' in agent_info['os']['name'].lower(): windows_agent = True else: windows_agent = False else: windows_agent = False # if 'os' in agent_info and 'platform' in agent_info['os']: # if agent_info['os']['platform'].lower() == 'windows': # windows_agent = True # else: # windows_agent = False # else: # # We do not know if it is a windows or linux agent. # # It is set to windows agent in order to avoid wrong data (uid, gid, ...) # windows_agent = True eventRequest = {"$and": []} fileRequest = {"$and": []} eventFields = { 'scanDate': 'date', 'modificationDate': 'mtime', 'size': 'size', 'user': '******', 'group': 'gname' } fileFields = {'file': 'path', 'filetype': 'type'} # Query # query = "SELECT {0} FROM fim_event, fim_file WHERE fim_event.id_file = fim_file.id AND fim_file.type = :filetype" # fileRequest['$and'].append({ # 'type': filetype # }) # if event: # # query += ' AND fim_event.type = :event' # # request['event'] = event # eventRequest['$and'].append({ # 'event': event # }) # if filename: # # query += ' AND path = :filename' # # request['filename'] = filename # fileRequest['$and'].append({ # 'path': filename # }) # if md5: # # query += ' AND md5 = :md5' # # request['md5'] = md5 # eventRequest['$and'].append({ # 'md5': md5 # }) # if sha1: # # query += ' AND sha1 = :sha1' # # request['sha1'] = sha1 # eventRequest['$and'].append({ # 'sha1': sha1 # }) # if hash: # # query += ' AND (md5 = :hash OR sha1 = :hash)' # # request['hash'] = hash # eventRequest['$and'].append({ # '$or': [ # { # 'md5': hash # }, # { # 'sha1': hash # } # ] # }) # if search: # query += " AND NOT" if bool(search['negation']) else ' AND' # query += " (" + " OR ".join(x + ' LIKE :search' for x in ('path', "date", 'size', 'md5', 'sha1', 'uname', 'gname', 'inode', 'perm')) + " )" # request['search'] = '%{0}%'.format(search['value']) if search: regex = re.compile(".*{0}.*".format(int(search['value']) if search['value'].isdigit() \ else search['value']), re.IGNORECASE) event_search_con = {"$or": []} file_search_con = {"$or": []} for x in [ 'path', "date", 'size', 'md5', 'sha1', 'uname', 'gname', 'perm' ]: if x == 'path': file_search_con["$or"].append({x: regex}) else: event_search_con["$or"].append({x: regex}) if bool(search['negation']): if event_search_con["$or"]: eventRequest["$and"].append({"$not": event_search_con}) if file_search_con["$or"]: fileRequest["$and"].append({"$not": file_search_con}) else: if event_search_con["$or"]: eventRequest["$and"].append(event_search_con) if file_search_con["$or"]: fileRequest["$and"].append(file_search_con) # Total items db_data = None events = [] if summary: db_data = conn.getDb()['fim_file'].aggregate([ { '$lookup': { 'from': 'fim_event', 'localField': '_id', 'foreignField': 'file_id', 'as': 'fim_events' } }, ], cursor={}) for sysFile in db_data: item = sysFile for fEvent in item['fim_events']: if not item.get('fim_event'): item['fim_event'] = fEvent else: if fEvent['date'] > item['fim_event']['date']: item['fim_event'] = fEvent item.pop('fim_events') if item['type'] != filetype: continue if event: if item['fim_event']['type'] != event: continue if filename: if item['path'] != filename: continue if md5: if item['fim_event']['md5'] != md5: continue if sha1: if item['fim_event']['sha1'] != md5: continue if hash: if (item['fim_event']['sha1'] != hash) and (item['fim_event']['md5'] != hash): continue if search: search_value = int(search['value']) if search['value'].isdigit( ) else search['value'] if (search_value not in item['path']) and (search_value not in item['fim_event']['date']) \ and (search_value not in item['fim_event']['size']) and (search_value not in item['fim_event']['md5']) \ and (search_value not in item['fim_event']['sha1']) and (search_value not in item['fim_event']['uname']) \ and (search_value not in item['fim_event']['gname']) and (search_value not in item['fim_event']['perm']): continue item['sha1'] = item['fim_event']['sha1'] item['uid'] = item['fim_event']['uid'] item['date'] = item['fim_event']['date'] item['gid'] = item['fim_event']['gid'] # item['mtime'] = item['fim_event']['mtime'] item['perm'] = item['fim_event']['perm'] item['md5'] = item['fim_event']['md5'] item.pop('fim_event') item['fim_event.type'] = item['type'] item.pop('type') events.append(item) # rFileRequest = fileRequest.copy() # if not rFileRequest['$and']: # rFileRequest = {} # db_data = conn.getDb()['fim_file'].find_one(rFileRequest) # # list_db_data = list(db_data) # for eFile in db_data: # rEventRequest = eventRequest.copy() # rEventRequest['$and'].append({ # 'file_id': eFile.get('_id') # }) # event_data = conn.getDb()['fim_event'].find(rEventRequest).sort(('date', -1)).limit(1) # if event_data.count() == 1: # item = list(event_data)[0] # item['type'] = eFile.get('type') # item['type'] = eFile.get('path') # print(item) # events.append(item) # query += ' group by path' # conn.execute("SELECT COUNT(*) FROM ({0}) AS TEMP".format(query.format("max(date)")), request) else: db_data = conn.getDb()['fim_event'].aggregate([ { '$lookup': { 'from': 'fim_file', 'localField': 'file_id', 'foreignField': '_id', 'as': 'fim_file' } }, ], cursor={}) for sysEvent in db_data: if sysEvent['fim_file'][0]['type'] != filetype: continue if event: if sysEvent['type'] != event: continue if filename: if sysEvent['fim_file'][0]['path'] != filename: continue if md5: if sysEvent['md5'] != md5: continue if sha1: if sysEvent['sha1'] != md5: continue if hash: if (sysEvent['sha1'] != hash) and (sysEvent['md5'] != hash): continue if search: search_value = int(search['value']) if search['value'].isdigit( ) else search['value'] if (search_value not in sysEvent['fim_file'][0]['path']) and (search_value not in sysEvent['date']) \ and (search_value not in sysEvent['size']) and (search_value not in sysEvent['md5']) \ and (search_value not in sysEvent['sha1']) and (search_value not in sysEvent['uname']) \ and (search_value not in sysEvent['gname']) and (search_value not in sysEvent['perm']): continue item = sysEvent item['fim_event.type'] = item['fim_file'][0]['type'] item['path'] = item['fim_file'][0]['path'] # print(item) item.pop('_id') item.pop('fim_file') events.append(item) data = {'totalItems': len(events)} # Sorting event_sort_con = [] file_sort_con = [] if sort: if sort['fields']: allowed_sort_fields = set(eventFields.keys() + fileFields.keys()) # Check if every element in sort['fields'] is in allowed_sort_fields if not set(sort['fields']).issubset(allowed_sort_fields): uncorrect_fields = list( map(lambda x: str(x), set(sort['fields']) - set(allowed_sort_fields))) raise OssecAPIException( 1403, 'Allowed sort fields: {0}. Fields: {1}'.format( allowed_sort_fields, uncorrect_fields)) for i in sort['fields']: # str_order = 1 if sort['order'] == 'asc' else -1 sort_order = False if sort['order'] == 'asc' else True events.sort(key=lambda e: e[i], reverse=sort_order) # if i in eventFields.keys(): # event_sort_con.append((eventFields[i], str_order)) # events.sort(key=lambda e: e[i], reverse=sort_order) # elif i in fileFields.keys(): # file_sort_con.append((fileFields[i], str_order)) else: # event_sort_con.append((eventFields["date"], 1 if sort['order'] == 'asc' else -1)) sort_order = False if sort['order'] == 'asc' else True events.sort(key=lambda e: e['date'], reverse=sort_order) else: # event_sort_con.append((eventFields["date"], -1)) events.sort(key=lambda e: e['date'], reverse=True) # if sort: # if sort['fields']: # allowed_sort_fields = fields.keys() # # Check if every element in sort['fields'] is in allowed_sort_fields # if not set(sort['fields']).issubset(allowed_sort_fields): # uncorrect_fields = list(map(lambda x: str(x), set(sort['fields']) - set(allowed_sort_fields))) # raise OssecAPIException(1403, 'Allowed sort fields: {0}. Fields: {1}'.format(allowed_sort_fields, uncorrect_fields)) # query += ' ORDER BY ' + ','.join(['{0} {1}'.format(fields[i], sort['order']) for i in sort['fields']]) # else: # query += ' ORDER BY date {0}'.format(sort['order']) # else: # query += ' ORDER BY date DESC' if limit: if limit > common.maximum_database_limit: raise OssecAPIException(1405, str(limit)) # query += ' LIMIT :offset,:limit' # request['offset'] = offset # request['limit'] = limit if offset >= 0: events = events[int(offset):(int(offset) + int(limit))] elif limit == 0: raise OssecAPIException(1406) # if summary: # select = ["max(date)", "mtime", "fim_event.type", "path"] # else: # select = ["date", "mtime", "fim_event.type", "path", "size", "perm", "uid", "gid", "md5", "sha1"] data['items'] = [] for fEvent in events: data_tuple = {} if fEvent.get('date') != None: data_tuple['scanDate'] = (fEvent.get('date') + timedelta(seconds=timeoffset)).__str__() else: data_tuple['scanDate'] = fEvent.get('date').__str__() # if fEvent.get('mtime') != None: # data_tuple['modificationDate'] = (fEvent.get('mtime') + timedelta(seconds=timeoffset)).__str__() # modificationDate # else: # data_tuple['modificationDate'] = data_tuple['scanDate'] # scanDate if fEvent.get('fim_event.type') != None: data_tuple['event'] = fEvent.get('fim_event.type') if fEvent.get('path') != None: data_tuple['file'] = fEvent.get('path') if not summary: try: permissions = filemode(int(fEvent.get('perm'), 8)) except TypeError: permissions = None if fEvent.get('size') != None: data_tuple['size'] = fEvent.get('size') if fEvent.get('md5') != None: data_tuple['md5'] = fEvent.get('md5') if fEvent.get('sha1') != None: data_tuple['sha1'] = fEvent.get('sha1') if not windows_agent: if fEvent.get('uid') != None: data_tuple['uid'] = fEvent.get('uid') if fEvent.get('gid') != None: data_tuple['gid'] = fEvent.get('gid') if fEvent.get('perm') != None: data_tuple['octalMode'] = fEvent.get('perm') if permissions: data_tuple['permissions'] = permissions data['items'].append(data_tuple) return data
class Filesys(object): def __init__(self): self.db = None def connect(self, connstr = None): self.db = Connection(connstr) self.db.connect() self.prepare_queries() def prepare_queries(self): self.db.prepare("""PREPARE get_latest_rev AS SELECT MAX(rev_id) AS rev_id FROM revision""") self.db.prepare("""PREPARE get_root(INTEGER) AS SELECT rev_id, time, id, name, size, modified FROM revision JOIN file ON root_id = id WHERE rev_id = $1""") self.db.prepare("""PREPARE get_all_roots AS SELECT rev_id,time, id, name, size, modified FROM revision JOIN file ON root_id = id""") self.db.prepare("""PREPARE get_subdirs(INTEGER) AS SELECT id, name, size, modified, md5, children, descendants, free_space, total_space FROM file_in_dir JOIN file ON file_id = id NATURAL JOIN directory NATURAL LEFT JOIN drive WHERE dir_id = $1 ORDER BY LOWER(name)""") self.db.prepare("""PREPARE get_children(INTEGER) AS SELECT id, name, size, modified, md5, children, descendants, free_space, total_space FROM file_in_dir JOIN file ON file_id = id NATURAL LEFT JOIN directory NATURAL LEFT JOIN drive WHERE dir_id = $1 ORDER BY LOWER(name)""") def get_latest_rev(self): self.db.execute("""EXECUTE get_latest_rev""") return self.db.fetchone()['rev_id'] def get_root(self, rev_id): params = {'rev_id': rev_id} self.db.execute("""EXECUTE get_root(%(rev_id)s)""", params) row = self.db.fetchone() return Item(row) def get_all_roots(self): params = {} self.db.execute("""EXECUTE get_all_roots""", params) rv = [] for row in self.db.fetchall(): rv.append(Item(row)) return rv def get_subdirs(self, id): params = {'id': id} self.db.execute("""EXECUTE get_subdirs(%(id)s)""", params) rv = [] for row in self.db.fetchall(): rv.append(Item(row)) return rv def get_children(self, id): params = {'id': id} self.db.execute("""EXECUTE get_children(%(id)s)""", params) rv = [] for row in self.db.fetchall(): rv.append(Item(row)) return rv
import cv2 from database import Connection username, password = '******', 'password' conn = Connection(username, password) conn.connect() data = conn.select_all() names_dict = {} for ids, name in data: names_dict[str(ids)] = name conn.close_connection() recognizer = cv2.face.LBPHFaceRecognizer_create() recognizer.read('trainer/trainer.yml') cascadePath = "cascade/haarcascade_frontalface_default.xml" faceCascade = cv2.CascadeClassifier(cascadePath) path = 'dataSet' cam = cv2.VideoCapture(0) fontFace = cv2.FONT_HERSHEY_SIMPLEX fontScale = 1 fontColor = (255, 255, 255) ##width_d, height_d = 720, 720 while True: ret, im = cam.read() gray = cv2.cvtColor(im, cv2.COLOR_BGR2GRAY) faces = faceCascade.detectMultiScale(gray, scaleFactor=1.1,
def __init__(self): self.mysql = Connection() self.db = self.mysql.connect() self.list_products = [] self.list_ch = []
def connect_bd(): con = Connection("localhost", "5432", "tcs7", "postgres", "nadaesimposible") con.connect() return con
class Base_action(): def __init__(self): self.mysql = Connection() self.db = self.mysql.connect() self.list_products = [] self.list_ch = [] def fill_bdd(self): """Method to fill the database""" self.mysql.cur.execute(QUERY_RESET) self.mysql.cur.execute(QUERY_FILL_BDD) self.mysql.cnx.commit() def check_category(self): """Method to check if the number category exists""" self.mysql.cur.execute(QUERY_DISPLAY_ALL_CATEGORIES) def check_product(self): """Method to check if the number product exists""" self.mysql.cur.execute(QUERY_DISPLAY_ALL_PRODUCTS) def check_substitutes(self): """Method to check if there is a subsitute""" self.mysql.cur.execute(FIND_FOOD_SUBSTITUTED) def get_categories(self): """Method to get the categories""" self.mysql.cur.execute(QUERY_DISPLAY_ALL_CATEGORIES) for row in self.mysql.cur: self.list_products.append(row[1]) print('{0} - {1}'.format(row[0], row[1])) def get_products(self): """Method to get the products""" self.mysql.cur.execute(QUERY_DISPLAY_ALL_PRODUCTS) for row in self.mysql.cur: if not row[2]: print('{0} - {1}'.format(row[0], row[1])) else: print('{0} - {1} - {2}'.format(row[0], row[1], row[2])) def display_details(self): """Method display products details""" for row in self.mysql.cur: print(('Nom : {0}' + "\n" 'Marque : {1}' + "\n" 'Score nutritionnel : {2}' + "\n" 'Calories : {3} kcal' + "\n" 'Sucres : {4} g' + "\n" 'Sels : {5} g' + "\n" 'Lipides : {6} g' + "\n" 'Protéines : {7} g' + "\n" 'Description : {8}' + "\n" 'Disponible en : {9}' + "\n" 'Image : {10}' + "\n" 'Lien : {11}' + "\n" 'Magasins : {12}' + "\n" 'Catégorie : {13} ({i})').format(row[1], row[2], row[3].upper(), row[4], \ row[5], row[6], row[7], row[8], row[9], row[10], row[11], row[12], \ row[13], row[14], i = self.list_ch[row[14] - 1])) def get_product_details(self): """Method to get product details""" self.mysql.cur.execute( QUERY_DISPLAY_PRODUCT_DETAILS.format(f1=choice_third_level)) self.display_details() def insert_substitutes(self): """Method to insert the substitutes""" self.mysql.cur.execute(FIND_A_SUBSTITUTE) for row in self.mysql.cur: self.mysql.cur.execute(INSERT_A_SUBSTITUTE.format(table1="favourite", \ table2="product", substitute=row[0])) self.mysql.cnx.commit() def display_substitutes(self): """Method to display a substitute""" self.mysql.cur.execute(FIND_FOOD_SUBSTITUTED) self.display_details() def replace_characters(self): """Method to replace characters categories""" for product in self.list_products: product = product.replace(' ', "-") product = product.replace('‘', "-") product = product.replace('à', "a") product = product.lower() self.list_products = [] self.list_ch.append(product) def insert_products(self): """Method to insert the products""" self.replace_characters() r = requests.get("https://fr.openfoodfacts.org/cgi/search.pl?action=process&tagtype_0= \ categories&tag_contains_0=contains&tag_0=" + self.list_ch[choice_second_level - 1] + \ "&sort_by=unique_scans_n&page_size=1000&axis_x=energy&axis_y=products_n&action= \ display&json=1" ) result = json.loads(r.text) for i in range(len(result["products"])): self.iD = i + 1 try: self.brand = result["products"][i]["brands"] self.nutri_score = result["products"][i]["nutrition_grade_fr"] self.calories = float( result["products"][i]["nutriments"]["energy_100g"]) # Kilojoules(kJ) to calories(cal) self.calories /= 4.184 self.sugars = result["products"][i]["nutriments"][ "sugars_100g"] self.salts = result["products"][i]["nutriments"]["salt_100g"] self.lipids = result["products"][i]["nutriments"]["fat_100g"] self.proteins = result["products"][i]["nutriments"][ "proteins_100g"] self.location_available = result["products"][i]["countries"] self.category_id = choice_second_level if result["products"][i]["product_name_fr"] == "": result["products"][i]["product_name"] else: self.name = result["products"][i]["product_name_fr"] self.description = result["products"][i]["generic_name"] self.url_image = result["products"][i]["image_small_url"] self.url_page = result["products"][i]["url"] self.stores = result["products"][i]["stores"] val = ( self.iD, self.name, self.brand, self.nutri_score, self.calories, \ self.sugars, self.salts, self.lipids,\ self.proteins, self.description, self.location_available, self.url_image, \ self.url_page, self.stores, self.category_id) self.mysql.cur.execute( QUERY_INSERT_ALL_PRODUCTS.format(table="product", f1="iD", f2="name", \ f3="brand", f4="nutri_score", f5="calories", f6="sugars", f7="salts", \ f8="lipids", f9="proteins", f10="description", \ f11="location_available", f12="url_image", f13="url_page",\ f14="stores", f15="category_id"), val) self.mysql.cnx.commit() except: pass
def get_pci(agent_id=None, offset=0, limit=common.database_limit, sort=None, search=None): """ Get all the PCI requirements used in the rootchecks of the agent. :param agent_id: Agent ID. :param offset: First item to return. :param limit: Maximum number of items to return. :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}. :param search: Looks for items with the specified string. :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)} """ fields = {} request = {"$and": [{'pci_dss': {'$ne': None}}]} # Connection db_url = common.database_path conn = Connection(db_url) conn.connect(conn.getDbById(str(agent_id).zfill(3))) if (conn.getDb() == None): raise OssecAPIException(1600) # Search if search: regex = re.compile(".*{0}.*".format(int(search['value']) if search['value'].isdigit() \ else search['value']), re.IGNORECASE) search_con = {"$or": []} search_con["$or"].append({'pci_dss': regex}) if bool(search['negation']): if search_con["$or"]: request["$and"].append({"$not": search_con}) else: if search_con["$or"]: request["$and"].append(search_con) # Total items # conn.execute(query.format('COUNT(DISTINCT pci_dss)'), request) # data = {'totalItems': conn.fetch()[0]} # Sorting sort_con = [] if sort: if sort['fields']: allowed_sort_fields = set(fields.keys()) # Check if every element in sort['fields'] is in allowed_sort_fields if not set(sort['fields']).issubset(allowed_sort_fields): uncorrect_fields = list( map(lambda x: str(x), set(sort['fields']) - set(allowed_sort_fields))) raise OssecAPIException( 1403, 'Allowed sort fields: {0}. Fields: {1}'.format( allowed_sort_fields, uncorrect_fields)) for i in sort['fields']: str_order = 1 if sort['order'] == 'asc' else -1 sort_con.append((fields[i], str_order)) else: sort_con.append(('pci_dss', 1 if sort['order'] == 'asc' else -1)) else: sort_con.append(('pci_dss', 1)) if limit: if limit > common.maximum_database_limit: raise OssecAPIException(1405, str(limit)) elif limit == 0: raise OssecAPIException(1406) if not request["$and"]: request = {} db_data = conn.getDb()['pm_event'].find(request).sort(sort_con).skip( offset).limit(limit).distinct('pci_dss') data = {} data['items'] = [] for pmEvent in db_data: data['items'].append(pmEvent) return data
from flask import Flask, render_template, redirect, url_for, request, session, make_response, flash from werkzeug.utils import secure_filename from opengraph import OpenGraph import json import prediction_models import torch from BERT import BERT # import user as User # toUser = "******" # Runtime variables for development, remove before production # fromUser = "******" # Runtime variables for development, remove before production app = Flask(__name__) app.secret_key = 'somerandomvalue' app.config['UPLOAD_FOLDER'] = 'user-content/' db = Connection(app, '192.168.12.209', 27017) #Importing all models image_model = torch.load("models/model_nsfw.pt", map_location=torch.device('cpu')) device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') text_model = BERT().to(device) def load_checkpoint(load_path, model): if load_path == None: return state_dict = torch.load(load_path, map_location=device)
def export_query_results(self, parent, name, tbl, sql, pw): """ Writes a sql query to a csv. """ start_datetime = timestr() start_time = just_time() logger = logging.getLogger( 'main.sql_exporter.ExportSql.export_query_results') logger.debug('Pulling query ' + name) fso = FSO() fso.make_dir('output') csv_path = 'output\\' + name + '_' + start_datetime + '.csv' con = Connection(table=tbl, password=pw) def result_iter(cursor, chunksize=1000): while True: results = cursor.fetchmany(chunksize) if not results: break for result in results: yield result def call_grandfather(status, done=False): if done == True: finish = just_time() else: finish = '...' parent.parent.callback( name , start_time , finish , status ) call_grandfather(status='Connecting') if con.open(): cursor = con.connection.cursor() call_grandfather(status='Executing query') try: cursor.execute(sql) with open(csv_path, 'w', newline='') as csv_file: call_grandfather(status='Writing csv') writer = csv.writer(csv_file, quoting=csv.QUOTE_ALL) writer.writerow([i[0] for i in cursor.description]) # header for r, row in enumerate(result_iter(cursor, 1000)): if r > 100000: break if r % 1000 == 0: logger.info('Writing row ' + str(r)) writer.writerow(list(row) + ['', '', '', '']) call_grandfather(status='Great Success!', done=True) fso.open_file(csv_path) except Exception as e: err = str(e) logger.error(err) call_grandfather(status=str(e), done=True) finally: con.close() parent.call_dad()
def conn_func(*args): return Connection()