import os
import glob
import math
import yara
from config import INTEL_DIR, CUSTOM_RULES_DIR, MAL_DIR
from logger import logging, LOG_FILE, FORMATTER, TIMESTAMP, LOG_LEVEL
logger = logging.getLogger(__name__)
logger.setLevel(LOG_LEVEL)

formatter = logging.Formatter(FORMATTER, TIMESTAMP)

file_handler = logging.FileHandler(LOG_FILE)
file_handler.setFormatter(formatter)

logger.addHandler(file_handler)


class YaraCreate:
    """Yara_create

    There are 3 defined functions to craft a Yara Rule
    - new_rule(rule_name)
    - add_meata(value, key)
    - add_strings(strings, identifier, condition=None)

    E.g Yara Rule

        rule with_attachment {
                meta:
                        author = "Antonio Sanchez <*****@*****.**>"
                        reference = "http://laboratorio.blogs.hispasec.com/"
Exemple #2
0
# ============================================================================
# GameMode Class
# ============================================================================

from logger import logging, log
import time
from AudioPlay import AudioPlay

battLog = logging.getLogger('BATTERY')
batt_handler = logging.FileHandler(
    '/home/pi/Documents/ovaom/logs/battery_monitor.log')
battLog.addHandler(batt_handler)

INACTIVE_THRESHOLD = 5  # in seconds


class GameMode(object):
    ''' GameMode Class: puzzleMode and jungleMode must inherit from this class '''

    instrument = [{
        'active': 0,
        'maxPreset': 1,
        'currentPreset': 0,
        'lastSeen': -1,
        'battery': 0,
    } for i in range(0, 4)]
    prev_offline_objects = []
    prevMonitoring = 0

    def __init__(self, net):
        self.net = net
Exemple #3
0
        if incremental_run:
            write_mode = "append"
        df.write.format("mongo").mode(write_mode).option(
            "database", database).option("collection", collection).save()
        logging.info('Write to MongoDB completed successfully')

    except Exception as e:
        logging.error('Error in write_to_mongo() function: {0}'.format(e))
        raise e


if __name__ == "__main__":
    try:
        #Initializes logger
        logger = logging.getLogger()
        fhandler = logging.FileHandler(filename='user_count_by_day.log',
                                       mode='w')
        formatter = logging.Formatter(
            '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
        fhandler.setFormatter(formatter)
        logger.addHandler(fhandler)
        logger.setLevel(logging.INFO)

        #Parses the arugment provided from the command line.
        parser = argparse.ArgumentParser()
        parser.add_argument("--cass_keyspace", help="keyspace")
        parser.add_argument("--cass_table", help="table")
        parser.add_argument("--mongo_db", help="Mongo db")
        parser.add_argument("--mongo_collection", help="Mongo collection")
        parser.add_argument("--incremental_run",
                            help="Full table load or incremental run")
Exemple #4
0
    df = spark.read.option("delimiter", ",").option('header','true').csv(path)

    #Conver the event_time field to timestamp
    df = df.withColumn('event_time', to_timestamp(df['event_time'], format='yyyy-MM-dd HH:mm:ss z'))

    #Derive the field 'year' and 'week' as it serves as the partition key to the cassandra table.
    df = df.withColumn('year',F.year(df.event_time))
    df = df.withColumn("week", F.date_format(F.col("event_time"), "w"))
    return df


if __name__ == "__main__":
    try:
        #Initializes logger
        logger = logging.getLogger()
        fhandler = logging.FileHandler(filename='load_data_into_cassandra.log', mode='w')
        formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
        fhandler.setFormatter(formatter)
        logger.addHandler(fhandler)
        logger.setLevel(logging.INFO)
        
        parser = argparse.ArgumentParser()
        parser.add_argument("--cass_keyspace", help="keyspace")
        parser.add_argument("--cass_table", help="table")
        parser.add_argument("--incremental_run", help="Full table load or incremental run")
        parser.add_argument("--csv_file", help="input file")
        
        #Parses the arugment provided from the command line.
        args = parser.parse_args()
        if not (args.cass_keyspace and args.cass_table and args.incremental_run and args.csv_file):
            logging.error("Command line arguments are missing. Possibly --cass_keyspace --cass_table --csv_file --incremental_run ")