示例#1
0
def clear(agent_id=None, all_agents=False):
    """
    Clears the database.

    :param agent_id: For an agent.
    :param all_agents: For all agents.
    :return: Message.
    """

    # Clear DB
    conn = Connection(common.database_path)

    regex = re.compile(r'^\d{,3}-\S+$')
    db_agents_list = []

    if not int(all_agents):
        raw_str = r'^' + "{}".format(int(agent_id)).zfill(3) + r'-\S+$'
        regex = re.compile(raw_str)

    for db_agent in conn.getDbsName():
        if (regex.search(db_agent) != None):
            db_agents_list.append(db_agent)

    if (db_agents_list.count() <= 0):
        raise OssecAPIException(1600)

    for db_agent in db_agents_list:
        conn.connect(db_agent)
        if conn.getDb() != None:
            doc = conn.getDb()['pm_event']
            if doc != None:
                doc.drop()
                conn.vacuum()
            doc = conn.getDb()['pmCounterInfo']
            if doc != None:
                doc.drop()
                conn.vacuum()

    # Clear OSSEC info
    if int(all_agents):
        rootcheck_files = glob('{0}/queue/rootcheck/*'.format(
            common.ossec_path))
    else:
        if agent_id == "000":
            rootcheck_files = [
                '{0}/queue/rootcheck/rootcheck'.format(common.ossec_path)
            ]
        else:
            agent_info = Agent(agent_id).get_basic_information()
            rootcheck_files = glob(
                '{0}/queue/rootcheck/({1}) {2}->rootcheck'.format(
                    common.ossec_path, agent_info['name'], agent_info['ip']))

    for rootcheck_file in rootcheck_files:
        if path.exists(rootcheck_file):
            remove(rootcheck_file)

    return "Rootcheck database deleted"
示例#2
0
def last_scan(agent_id):
    """
    Gets the last scan of the agent.

    :param agent_id: Agent ID.
    :return: Dictionary: end, start.
    """

    # Connection
    db_url = common.database_path
    conn = Connection(db_url)
    conn.connect(conn.getDbById(str(agent_id).zfill(3)))
    if (conn.getDb() == None):
        raise OssecAPIException(1600)

    data = {}

    lastSyscheckEndTime = None
    lastSyscheckEndTimeObj = list((conn.getDb()['pm_event'].find({
        "log":
        'Ending syscheck scan.'
    }).sort([('date_last', -1)]).limit(1)))[0]
    if (lastSyscheckEndTimeObj != None):
        lastSyscheckEndTime = lastSyscheckEndTimeObj.get('date_last')

    if lastSyscheckEndTime != None:
        data['end'] = (lastSyscheckEndTime +
                       timedelta(seconds=timeoffset)).__str__()
    else:
        data['end'] = lastSyscheckEndTime.__str__()

    lastSyscheckStartTime = None
    lastSyscheckStartTimeObj = list((conn.getDb()['pm_event'].find({
        "log":
        'Starting syscheck scan.'
    }).sort([('date_last', -1)]).limit(1)))[0]
    if (lastSyscheckStartTimeObj != None):
        lastSyscheckStartTime = lastSyscheckStartTimeObj.get('date_last')

    if lastSyscheckStartTime != None:
        data['start'] = (lastSyscheckStartTime +
                         timedelta(seconds=timeoffset)).__str__()
    else:
        data['start'] = lastSyscheckStartTime.__str__()

    return data
示例#3
0
def main():
    print "Connecting"
    db = Connection()
    db.connect()

    print "Compressing thumbnails"
    db.execute("""SELECT id FROM thumbnail""")
    for (id,) in db.cursor.fetchall():
        db.execute("""SELECT thumbnail FROM thumbnail WHERE id = %d""" % id)
        thumbnail, = db.cursor.fetchone()
        new_thumbnail = compress(id, thumbnail)
        if new_thumbnail:
            db.execute(
                """UPDATE thumbnail SET thumbnail = %(tn)s WHERE id = %(id)s""",
                {"tn": db.make_binary(new_thumbnail), "id": id},
            )
            db.commit()
            print "Compressed thumbnail %d from %d bytes to %d bytes" % (id, len(thumbnail), len(new_thumbnail))
示例#4
0
def get_pci(agent_id=None,
            offset=0,
            limit=common.database_limit,
            sort=None,
            search=None):
    """
    Get all the PCI requirements used in the rootchecks of the agent.

    :param agent_id: Agent ID.
    :param offset: First item to return.
    :param limit: Maximum number of items to return.
    :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}.
    :param search: Looks for items with the specified string.
    :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)}
    """

    fields = {}
    request = {"$and": [{'pci_dss': {'$ne': None}}]}

    # Connection
    db_url = common.database_path
    conn = Connection(db_url)
    conn.connect(conn.getDbById(str(agent_id).zfill(3)))
    if (conn.getDb() == None):
        raise OssecAPIException(1600)

    # Search
    if search:
        regex = re.compile(".*{0}.*".format(int(search['value']) if search['value'].isdigit() \
                                                                    else search['value']), re.IGNORECASE)
        search_con = {"$or": []}

        search_con["$or"].append({'pci_dss': regex})

        if bool(search['negation']):
            if search_con["$or"]:
                request["$and"].append({"$not": search_con})
        else:
            if search_con["$or"]:
                request["$and"].append(search_con)

    # Total items
    # conn.execute(query.format('COUNT(DISTINCT pci_dss)'), request)
    # data = {'totalItems': conn.fetch()[0]}

    # Sorting
    sort_con = []
    if sort:
        if sort['fields']:
            allowed_sort_fields = set(fields.keys())
            # Check if every element in sort['fields'] is in allowed_sort_fields
            if not set(sort['fields']).issubset(allowed_sort_fields):
                uncorrect_fields = list(
                    map(lambda x: str(x),
                        set(sort['fields']) - set(allowed_sort_fields)))
                raise OssecAPIException(
                    1403, 'Allowed sort fields: {0}. Fields: {1}'.format(
                        allowed_sort_fields, uncorrect_fields))

            for i in sort['fields']:
                str_order = 1 if sort['order'] == 'asc' else -1
                sort_con.append((fields[i], str_order))
        else:
            sort_con.append(('pci_dss', 1 if sort['order'] == 'asc' else -1))
    else:
        sort_con.append(('pci_dss', 1))

    if limit:
        if limit > common.maximum_database_limit:
            raise OssecAPIException(1405, str(limit))
    elif limit == 0:
        raise OssecAPIException(1406)

    if not request["$and"]:
        request = {}
    db_data = conn.getDb()['pm_event'].find(request).sort(sort_con).skip(
        offset).limit(limit).distinct('pci_dss')
    data = {}
    data['items'] = []
    for pmEvent in db_data:
        data['items'].append(pmEvent)

    return data
示例#5
0
def print_db(agent_id=None,
             status='all',
             pci=None,
             offset=0,
             limit=common.database_limit,
             sort=None,
             search=None):
    """
    Returns a list of events from the database.

    :param agent_id: Agent ID.
    :param status: Filters by status: outstanding, solved, all.
    :param pci: Filters by PCI DSS requirement.
    :param cis: Filters by CIS.
    :param offset: First item to return.
    :param limit: Maximum number of items to return.
    :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}.
    :param search: Looks for items with the specified string.
    :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)}
    """
    # Connection
    db_url = common.database_path
    conn = Connection(db_url)
    conn.connect(conn.getDbById(str(agent_id).zfill(3)))
    if (conn.getDb() == None):
        raise OssecAPIException(1600)

    request = {"$and": []}

    lastRootcheckEndTime = None
    lastRootcheckEndTimeObj = list((conn.getDb()['pm_event'].find({
        "log":
        'Ending rootcheck scan.'
    }).sort([('date_last', -1)]).limit(1)))[0]
    if (lastRootcheckEndTimeObj != None):
        lastRootcheckEndTime = lastRootcheckEndTimeObj.get(
            'date_last', datetime.now())

    fields = {
        'status': 'status',
        'event': 'log',
        'oldDay': 'date_first',
        'readDay': 'date_last'
    }

    request['$and'].append({
        'log': {
            '$nin': [
                'Starting rootcheck scan.', 'Ending rootcheck scan.',
                'Starting syscheck scan.', 'Ending syscheck scan.'
            ]
        }
    })
    if status == 'outstanding':
        if lastRootcheckEndTime != None:
            request['$and'].append({
                'date_last': {
                    '$gt': (lastRootcheckEndTime - timedelta(seconds=86400))
                }
            })
    elif status == 'solved':
        if lastRootcheckEndTime != None:
            request['$and'].append({
                'date_last': {
                    '$lte': (lastRootcheckEndTime - timedelta(seconds=86400))
                }
            })

    if pci:
        request["$and"].append({"pci_dss": pci})

    # search
    if search:
        regex = re.compile(".*{0}.*".format(int(search['value']) if search['value'].isdigit() \
                                                                    else search['value']), re.IGNORECASE)
        search_con = {"$or": []}

        for x in fields.values():
            search_con["$or"].append({x: regex})
        if bool(search['negation']):
            if search_con["$or"]:
                request["$and"].append({"$not": search_con})
        else:
            if search_con["$or"]:
                request["$and"].append(search_con)

    # Sorting
    sort_con = []
    if sort:
        if sort['fields']:
            allowed_sort_fields = set(fields.keys())
            # Check if every element in sort['fields'] is in allowed_sort_fields
            if not set(sort['fields']).issubset(allowed_sort_fields):
                uncorrect_fields = list(
                    map(lambda x: str(x),
                        set(sort['fields']) - set(allowed_sort_fields)))
                raise OssecAPIException(
                    1403, 'Allowed sort fields: {0}. Fields: {1}'.format(
                        allowed_sort_fields, uncorrect_fields))

            for i in sort['fields']:
                str_order = 1 if sort['order'] == 'asc' else -1
                sort_con.append((Agent.fields[i], str_order))
        else:
            sort_con.append(
                (fields["readDay"], 1 if sort['order'] == 'asc' else -1))
    else:
        sort_con.append((fields["readDay"], -1))

    if limit:
        if limit > common.maximum_database_limit:
            raise OssecAPIException(1405, str(limit))
    elif limit == 0:
        raise OssecAPIException(1406)

    select = ["status", "date_first", "date_last", "log", "pci_dss"]
    select_fields = {}
    for x in set(select):
        select_fields[x] = 1

    if not request["$and"]:
        request = {}

    data = {}
    db_data = conn.getDb()['pm_event'].find(request, select_fields)
    data['totalItems'] = db_data.count()
    db_data = db_data.sort(sort_con).skip(offset).limit(limit)

    # process get data
    data['items'] = []

    for pmEvent in db_data:
        pmEvent.pop('_id')
        if pmEvent.get("date_last") != None:
            if (pmEvent['date_last'] > lastRootcheckEndTime):
                pmEvent['status'] = 'outstanding'
            elif (pmEvent['date_last'] <= lastRootcheckEndTime):
                pmEvent['status'] = 'solved'

        if pmEvent.get("date_first") != None:
            pmEvent['date_first'] = (pmEvent.get("date_first") +
                                     timedelta(seconds=timeoffset)).__str__()
        else:
            pmEvent['date_first'] = pmEvent.get("date_first").__str__()

        if pmEvent.get("date_last") != None:
            pmEvent['date_last'] = (pmEvent.get("date_last") +
                                    timedelta(seconds=timeoffset)).__str__()
        else:
            pmEvent['date_last'] = pmEvent.get("date_last").__str__()

        data['items'].append(pmEvent)
    return data
def connect_bd():
    con = Connection("localhost", "5432", "tcs7", "postgres", "nadaesimposible")
    con.connect()
    return con
示例#7
0
import cv2
from database import Connection

username, password = '******', 'password'
conn = Connection(username, password)
conn.connect()
data = conn.select_all()
names_dict = {}
for ids, name in data:
    names_dict[str(ids)] = name
conn.close_connection()

recognizer = cv2.face.LBPHFaceRecognizer_create()

recognizer.read('trainer/trainer.yml')
cascadePath = "cascade/haarcascade_frontalface_default.xml"
faceCascade = cv2.CascadeClassifier(cascadePath)
path = 'dataSet'

cam = cv2.VideoCapture(0)

fontFace = cv2.FONT_HERSHEY_SIMPLEX
fontScale = 1
fontColor = (255, 255, 255)
##width_d, height_d = 720, 720

while True:
    ret, im = cam.read()
    gray = cv2.cvtColor(im, cv2.COLOR_BGR2GRAY)
    faces = faceCascade.detectMultiScale(gray,
                                         scaleFactor=1.1,
示例#8
0
def files(agent_id=None,
          event=None,
          filename=None,
          filetype='file',
          md5=None,
          sha1=None,
          hash=None,
          summary=False,
          offset=0,
          limit=common.database_limit,
          sort=None,
          search=None):
    """
    Return a list of files from the database that match the filters

    :param agent_id: Agent ID.
    :param event: Filters by event: added, readded, modified, deleted.
    :param filename: Filters by filename.
    :param filetype: Filters by filetype: file or registry.
    :param md5: Filters by md5 hash.
    :param sha1: Filters by sha1 hash.
    :param hash: Filters by md5 or sha1 hash.
    :param summary: Returns a summary grouping by filename.
    :param offset: First item to return.
    :param limit: Maximum number of items to return.
    :param sort: Sorts the items. Format: {"fields":["field1","field2"],"order":"asc|desc"}.
    :param search: Looks for items with the specified string.
    :return: Dictionary: {'items': array of items, 'totalItems': Number of items (without applying the limit)}
    """

    # Connection
    db_url = common.database_path
    conn = Connection(db_url)
    conn.connect(conn.getDbById(str(agent_id).zfill(3)))
    if (conn.getDb() == None):
        raise OssecAPIException(1600)

    agent_info = Agent(agent_id).get_basic_information()
    if 'os' in agent_info:
        if 'windows' in agent_info['os']['name'].lower():
            windows_agent = True
        else:
            windows_agent = False
    else:
        windows_agent = False

    # if 'os' in agent_info and 'platform' in agent_info['os']:
    #     if agent_info['os']['platform'].lower() == 'windows':
    #         windows_agent = True
    #     else:
    #         windows_agent = False
    # else:
    #     # We do not know if it is a windows or linux agent.
    #     # It is set to windows agent in order to avoid wrong data (uid, gid, ...)
    #     windows_agent = True

    eventRequest = {"$and": []}
    fileRequest = {"$and": []}

    eventFields = {
        'scanDate': 'date',
        'modificationDate': 'mtime',
        'size': 'size',
        'user': '******',
        'group': 'gname'
    }
    fileFields = {'file': 'path', 'filetype': 'type'}
    # Query
    # query = "SELECT {0} FROM fim_event, fim_file WHERE fim_event.id_file = fim_file.id AND fim_file.type = :filetype"

    # fileRequest['$and'].append({
    #     'type': filetype
    # })

    # if event:
    #     # query += ' AND fim_event.type = :event'
    #     # request['event'] = event
    #     eventRequest['$and'].append({
    #         'event': event
    #     })

    # if filename:
    #     # query += ' AND path = :filename'
    #     # request['filename'] = filename
    #     fileRequest['$and'].append({
    #         'path': filename
    #     })

    # if md5:
    #     # query += ' AND md5 = :md5'
    #     # request['md5'] = md5
    #     eventRequest['$and'].append({
    #         'md5': md5
    #     })

    # if sha1:
    #     # query += ' AND sha1 = :sha1'
    #     # request['sha1'] = sha1
    #     eventRequest['$and'].append({
    #         'sha1': sha1
    #     })

    # if hash:
    #     # query += ' AND (md5 = :hash OR sha1 = :hash)'
    #     # request['hash'] = hash
    #     eventRequest['$and'].append({
    #         '$or': [
    #             {
    #                 'md5': hash
    #             },
    #             {
    #                 'sha1': hash
    #             }
    #         ]
    #     })

    # if search:
    #     query += " AND NOT" if bool(search['negation']) else ' AND'
    #     query += " (" + " OR ".join(x + ' LIKE :search' for x in ('path', "date", 'size', 'md5', 'sha1', 'uname', 'gname', 'inode', 'perm')) + " )"
    #     request['search'] = '%{0}%'.format(search['value'])

    if search:
        regex = re.compile(".*{0}.*".format(int(search['value']) if search['value'].isdigit() \
                                                                    else search['value']), re.IGNORECASE)
        event_search_con = {"$or": []}
        file_search_con = {"$or": []}
        for x in [
                'path', "date", 'size', 'md5', 'sha1', 'uname', 'gname', 'perm'
        ]:
            if x == 'path':
                file_search_con["$or"].append({x: regex})
            else:
                event_search_con["$or"].append({x: regex})
        if bool(search['negation']):
            if event_search_con["$or"]:
                eventRequest["$and"].append({"$not": event_search_con})
            if file_search_con["$or"]:
                fileRequest["$and"].append({"$not": file_search_con})
        else:
            if event_search_con["$or"]:
                eventRequest["$and"].append(event_search_con)
            if file_search_con["$or"]:
                fileRequest["$and"].append(file_search_con)

    # Total items
    db_data = None

    events = []
    if summary:
        db_data = conn.getDb()['fim_file'].aggregate([
            {
                '$lookup': {
                    'from': 'fim_event',
                    'localField': '_id',
                    'foreignField': 'file_id',
                    'as': 'fim_events'
                }
            },
        ],
                                                     cursor={})
        for sysFile in db_data:
            item = sysFile
            for fEvent in item['fim_events']:
                if not item.get('fim_event'):
                    item['fim_event'] = fEvent
                else:
                    if fEvent['date'] > item['fim_event']['date']:
                        item['fim_event'] = fEvent
            item.pop('fim_events')
            if item['type'] != filetype:
                continue

            if event:
                if item['fim_event']['type'] != event:
                    continue
            if filename:
                if item['path'] != filename:
                    continue
            if md5:
                if item['fim_event']['md5'] != md5:
                    continue

            if sha1:
                if item['fim_event']['sha1'] != md5:
                    continue

            if hash:
                if (item['fim_event']['sha1'] !=
                        hash) and (item['fim_event']['md5'] != hash):
                    continue
            if search:
                search_value = int(search['value']) if search['value'].isdigit(
                ) else search['value']

                if (search_value not in item['path']) and (search_value not in item['fim_event']['date']) \
                and (search_value not in item['fim_event']['size']) and (search_value not in item['fim_event']['md5']) \
                and (search_value not in item['fim_event']['sha1']) and (search_value not in item['fim_event']['uname']) \
                and (search_value not in item['fim_event']['gname']) and (search_value not in item['fim_event']['perm']):
                    continue
            item['sha1'] = item['fim_event']['sha1']
            item['uid'] = item['fim_event']['uid']
            item['date'] = item['fim_event']['date']
            item['gid'] = item['fim_event']['gid']
            # item['mtime'] = item['fim_event']['mtime']
            item['perm'] = item['fim_event']['perm']
            item['md5'] = item['fim_event']['md5']
            item.pop('fim_event')
            item['fim_event.type'] = item['type']
            item.pop('type')
            events.append(item)

        # rFileRequest = fileRequest.copy()
        # if not rFileRequest['$and']:
        #     rFileRequest = {}
        # db_data = conn.getDb()['fim_file'].find_one(rFileRequest)
        # # list_db_data = list(db_data)
        # for eFile in db_data:
        #     rEventRequest = eventRequest.copy()
        #     rEventRequest['$and'].append({
        #         'file_id': eFile.get('_id')
        #     })
        #     event_data = conn.getDb()['fim_event'].find(rEventRequest).sort(('date', -1)).limit(1)
        #     if event_data.count() == 1:
        #         item = list(event_data)[0]
        #         item['type'] = eFile.get('type')
        #         item['type'] = eFile.get('path')
        #         print(item)
        #         events.append(item)

        # query += ' group by path'
        # conn.execute("SELECT COUNT(*) FROM ({0}) AS TEMP".format(query.format("max(date)")), request)
    else:
        db_data = conn.getDb()['fim_event'].aggregate([
            {
                '$lookup': {
                    'from': 'fim_file',
                    'localField': 'file_id',
                    'foreignField': '_id',
                    'as': 'fim_file'
                }
            },
        ],
                                                      cursor={})
        for sysEvent in db_data:
            if sysEvent['fim_file'][0]['type'] != filetype:
                continue

            if event:
                if sysEvent['type'] != event:
                    continue
            if filename:
                if sysEvent['fim_file'][0]['path'] != filename:
                    continue
            if md5:
                if sysEvent['md5'] != md5:
                    continue

            if sha1:
                if sysEvent['sha1'] != md5:
                    continue

            if hash:
                if (sysEvent['sha1'] != hash) and (sysEvent['md5'] != hash):
                    continue
            if search:
                search_value = int(search['value']) if search['value'].isdigit(
                ) else search['value']

                if (search_value not in sysEvent['fim_file'][0]['path']) and (search_value not in sysEvent['date']) \
                and (search_value not in sysEvent['size']) and (search_value not in sysEvent['md5']) \
                and (search_value not in sysEvent['sha1']) and (search_value not in sysEvent['uname']) \
                and (search_value not in sysEvent['gname']) and (search_value not in sysEvent['perm']):
                    continue
            item = sysEvent
            item['fim_event.type'] = item['fim_file'][0]['type']
            item['path'] = item['fim_file'][0]['path']
            # print(item)
            item.pop('_id')
            item.pop('fim_file')
            events.append(item)

    data = {'totalItems': len(events)}
    # Sorting
    event_sort_con = []
    file_sort_con = []
    if sort:
        if sort['fields']:
            allowed_sort_fields = set(eventFields.keys() + fileFields.keys())
            # Check if every element in sort['fields'] is in allowed_sort_fields
            if not set(sort['fields']).issubset(allowed_sort_fields):
                uncorrect_fields = list(
                    map(lambda x: str(x),
                        set(sort['fields']) - set(allowed_sort_fields)))
                raise OssecAPIException(
                    1403, 'Allowed sort fields: {0}. Fields: {1}'.format(
                        allowed_sort_fields, uncorrect_fields))

            for i in sort['fields']:
                # str_order = 1 if sort['order'] == 'asc' else -1
                sort_order = False if sort['order'] == 'asc' else True
                events.sort(key=lambda e: e[i], reverse=sort_order)
                # if i in eventFields.keys():
                #     event_sort_con.append((eventFields[i], str_order))
                #     events.sort(key=lambda e: e[i], reverse=sort_order)
                # elif i in fileFields.keys():
                #     file_sort_con.append((fileFields[i], str_order))
        else:
            # event_sort_con.append((eventFields["date"], 1 if sort['order'] == 'asc' else -1))
            sort_order = False if sort['order'] == 'asc' else True
            events.sort(key=lambda e: e['date'], reverse=sort_order)
    else:
        # event_sort_con.append((eventFields["date"], -1))
        events.sort(key=lambda e: e['date'], reverse=True)

    # if sort:
    #     if sort['fields']:
    #         allowed_sort_fields = fields.keys()
    #          # Check if every element in sort['fields'] is in allowed_sort_fields
    #         if not set(sort['fields']).issubset(allowed_sort_fields):
    #             uncorrect_fields = list(map(lambda x: str(x), set(sort['fields']) - set(allowed_sort_fields)))
    #             raise OssecAPIException(1403, 'Allowed sort fields: {0}. Fields: {1}'.format(allowed_sort_fields, uncorrect_fields))

    #         query += ' ORDER BY ' + ','.join(['{0} {1}'.format(fields[i], sort['order']) for i in sort['fields']])
    #     else:
    #         query += ' ORDER BY date {0}'.format(sort['order'])
    # else:
    #     query += ' ORDER BY date DESC'
    if limit:
        if limit > common.maximum_database_limit:
            raise OssecAPIException(1405, str(limit))
        # query += ' LIMIT :offset,:limit'
        # request['offset'] = offset
        # request['limit'] = limit
        if offset >= 0:
            events = events[int(offset):(int(offset) + int(limit))]
    elif limit == 0:
        raise OssecAPIException(1406)

    # if summary:
    #     select = ["max(date)", "mtime", "fim_event.type", "path"]
    # else:
    #     select = ["date", "mtime", "fim_event.type", "path", "size", "perm", "uid", "gid", "md5", "sha1"]

    data['items'] = []
    for fEvent in events:
        data_tuple = {}
        if fEvent.get('date') != None:
            data_tuple['scanDate'] = (fEvent.get('date') +
                                      timedelta(seconds=timeoffset)).__str__()
        else:
            data_tuple['scanDate'] = fEvent.get('date').__str__()
        # if fEvent.get('mtime') != None:
        #     data_tuple['modificationDate'] = (fEvent.get('mtime') + timedelta(seconds=timeoffset)).__str__()  # modificationDate
        # else:
        #     data_tuple['modificationDate'] = data_tuple['scanDate']  # scanDate
        if fEvent.get('fim_event.type') != None:
            data_tuple['event'] = fEvent.get('fim_event.type')
        if fEvent.get('path') != None:
            data_tuple['file'] = fEvent.get('path')

        if not summary:
            try:
                permissions = filemode(int(fEvent.get('perm'), 8))
            except TypeError:
                permissions = None

            if fEvent.get('size') != None:
                data_tuple['size'] = fEvent.get('size')
            if fEvent.get('md5') != None:
                data_tuple['md5'] = fEvent.get('md5')
            if fEvent.get('sha1') != None:
                data_tuple['sha1'] = fEvent.get('sha1')

            if not windows_agent:
                if fEvent.get('uid') != None:
                    data_tuple['uid'] = fEvent.get('uid')
                if fEvent.get('gid') != None:
                    data_tuple['gid'] = fEvent.get('gid')

                if fEvent.get('perm') != None:
                    data_tuple['octalMode'] = fEvent.get('perm')
                if permissions:
                    data_tuple['permissions'] = permissions

        data['items'].append(data_tuple)
    return data
class Base_action():
    def __init__(self):
        self.mysql = Connection()
        self.db = self.mysql.connect()
        self.list_products = []
        self.list_ch = []

    def fill_bdd(self):
        """Method to fill the database"""
        self.mysql.cur.execute(QUERY_RESET)
        self.mysql.cur.execute(QUERY_FILL_BDD)
        self.mysql.cnx.commit()

    def check_category(self):
        """Method to check if the number category exists"""
        self.mysql.cur.execute(QUERY_DISPLAY_ALL_CATEGORIES)

    def check_product(self):
        """Method to check if the number product exists"""
        self.mysql.cur.execute(QUERY_DISPLAY_ALL_PRODUCTS)

    def check_substitutes(self):
        """Method to check if there is a subsitute"""
        self.mysql.cur.execute(FIND_FOOD_SUBSTITUTED)

    def get_categories(self):
        """Method to get the categories"""
        self.mysql.cur.execute(QUERY_DISPLAY_ALL_CATEGORIES)
        for row in self.mysql.cur:
            self.list_products.append(row[1])
            print('{0} - {1}'.format(row[0], row[1]))

    def get_products(self):
        """Method to get the products"""
        self.mysql.cur.execute(QUERY_DISPLAY_ALL_PRODUCTS)
        for row in self.mysql.cur:
            if not row[2]:
                print('{0} - {1}'.format(row[0], row[1]))
            else:
                print('{0} - {1} - {2}'.format(row[0], row[1], row[2]))

    def display_details(self):
        """Method display products details"""
        for row in self.mysql.cur:
            print(('Nom : {0}' + "\n"
                   'Marque : {1}' + "\n"
                   'Score nutritionnel : {2}' + "\n"
                   'Calories : {3} kcal' + "\n"
                   'Sucres : {4} g' + "\n"
                   'Sels : {5} g' + "\n"
                   'Lipides : {6} g' + "\n"
                   'Protéines : {7} g' + "\n"
                   'Description : {8}' + "\n"
                   'Disponible en : {9}' + "\n"
                   'Image : {10}' + "\n"
                   'Lien : {11}' + "\n"
                   'Magasins : {12}' + "\n"
                   'Catégorie : {13} ({i})').format(row[1], row[2], row[3].upper(), row[4], \
                    row[5], row[6], row[7], row[8], row[9], row[10], row[11], row[12], \
                    row[13], row[14], i = self.list_ch[row[14] - 1]))

    def get_product_details(self):
        """Method to get product details"""
        self.mysql.cur.execute(
            QUERY_DISPLAY_PRODUCT_DETAILS.format(f1=choice_third_level))
        self.display_details()

    def insert_substitutes(self):
        """Method to insert the substitutes"""
        self.mysql.cur.execute(FIND_A_SUBSTITUTE)
        for row in self.mysql.cur:
            self.mysql.cur.execute(INSERT_A_SUBSTITUTE.format(table1="favourite", \
                table2="product", substitute=row[0]))
        self.mysql.cnx.commit()

    def display_substitutes(self):
        """Method to display a substitute"""
        self.mysql.cur.execute(FIND_FOOD_SUBSTITUTED)
        self.display_details()

    def replace_characters(self):
        """Method to replace characters categories"""
        for product in self.list_products:
            product = product.replace(' ', "-")
            product = product.replace('‘', "-")
            product = product.replace('à', "a")
            product = product.lower()
            self.list_products = []
            self.list_ch.append(product)

    def insert_products(self):
        """Method to insert the products"""
        self.replace_characters()
        r = requests.get("https://fr.openfoodfacts.org/cgi/search.pl?action=process&tagtype_0= \
            categories&tag_contains_0=contains&tag_0="                                                       + self.list_ch[choice_second_level - 1] + \
            "&sort_by=unique_scans_n&page_size=1000&axis_x=energy&axis_y=products_n&action= \
            display&json=1"                           )
        result = json.loads(r.text)
        for i in range(len(result["products"])):
            self.iD = i + 1
            try:
                self.brand = result["products"][i]["brands"]
                self.nutri_score = result["products"][i]["nutrition_grade_fr"]
                self.calories = float(
                    result["products"][i]["nutriments"]["energy_100g"])
                # Kilojoules(kJ) to calories(cal)
                self.calories /= 4.184
                self.sugars = result["products"][i]["nutriments"][
                    "sugars_100g"]
                self.salts = result["products"][i]["nutriments"]["salt_100g"]
                self.lipids = result["products"][i]["nutriments"]["fat_100g"]
                self.proteins = result["products"][i]["nutriments"][
                    "proteins_100g"]
                self.location_available = result["products"][i]["countries"]
                self.category_id = choice_second_level
                if result["products"][i]["product_name_fr"] == "":
                    result["products"][i]["product_name"]
                else:
                    self.name = result["products"][i]["product_name_fr"]
                self.description = result["products"][i]["generic_name"]
                self.url_image = result["products"][i]["image_small_url"]
                self.url_page = result["products"][i]["url"]
                self.stores = result["products"][i]["stores"]
                val = (
                    self.iD, self.name, self.brand, self.nutri_score, self.calories, \
                    self.sugars, self.salts, self.lipids,\
                    self.proteins, self.description, self.location_available, self.url_image, \
                    self.url_page, self.stores, self.category_id)
                self.mysql.cur.execute(
                    QUERY_INSERT_ALL_PRODUCTS.format(table="product", f1="iD", f2="name", \
                    f3="brand", f4="nutri_score", f5="calories", f6="sugars", f7="salts", \
                    f8="lipids", f9="proteins", f10="description", \
                    f11="location_available", f12="url_image", f13="url_page",\
                    f14="stores", f15="category_id"), val)
                self.mysql.cnx.commit()
            except:
                pass
示例#10
0
文件: __init__.py 项目: ejrh/filesys
class Filesys(object):

    def __init__(self):
        self.db = None


    def connect(self, connstr = None):
        self.db = Connection(connstr)
        self.db.connect()
        self.prepare_queries()


    def prepare_queries(self):
        self.db.prepare("""PREPARE get_latest_rev AS SELECT MAX(rev_id) AS rev_id FROM revision""")
        
        self.db.prepare("""PREPARE get_root(INTEGER) AS SELECT rev_id, time, id, name, size, modified FROM revision JOIN file ON root_id = id WHERE rev_id = $1""")
        
        self.db.prepare("""PREPARE get_all_roots AS SELECT rev_id,time, id, name, size, modified FROM revision JOIN file ON root_id = id""")
        
        self.db.prepare("""PREPARE get_subdirs(INTEGER) AS SELECT id, name, size, modified, md5, children, descendants, free_space, total_space FROM file_in_dir JOIN file ON file_id = id NATURAL JOIN directory NATURAL LEFT JOIN drive WHERE dir_id = $1 ORDER BY LOWER(name)""")
        
        self.db.prepare("""PREPARE get_children(INTEGER) AS SELECT id, name, size, modified, md5, children, descendants, free_space, total_space FROM file_in_dir JOIN file ON file_id = id NATURAL LEFT JOIN directory NATURAL LEFT JOIN drive WHERE dir_id = $1 ORDER BY LOWER(name)""")


    def get_latest_rev(self):
        self.db.execute("""EXECUTE get_latest_rev""")
        return self.db.fetchone()['rev_id']


    def get_root(self, rev_id):
        params = {'rev_id': rev_id}
        self.db.execute("""EXECUTE get_root(%(rev_id)s)""", params)
        row = self.db.fetchone()
        
        return Item(row)


    def get_all_roots(self):
        params = {}
        self.db.execute("""EXECUTE get_all_roots""", params)
        
        rv = []
        for row in self.db.fetchall():
            rv.append(Item(row))
        
        return rv


    def get_subdirs(self, id):
        params = {'id': id}
        self.db.execute("""EXECUTE get_subdirs(%(id)s)""", params)
        
        rv = []
        for row in self.db.fetchall():
            rv.append(Item(row))
        
        return rv


    def get_children(self, id):
        params = {'id': id}
        self.db.execute("""EXECUTE get_children(%(id)s)""", params)
        
        rv = []
        for row in self.db.fetchall():
            rv.append(Item(row))
        
        return rv