Пример #1
0
    def _load(self, account):
        self.account = account

        # Create home directory
        utils.make_dir('')
        self.configfile = utils.get_root_filename('config.json')

        # Create user directory
        userfolder = "%s.%s" % (account['username'], account['api'])
        utils.make_dir(userfolder)

        self.msg.info(
            self.name, 'Trackma v{0} - using account {1}({2}).'.format(
                utils.VERSION, account['username'], account['api']))
        self.msg.info(self.name, 'Reading config files...')
        try:
            self.config = utils.parse_config(self.configfile,
                                             utils.config_defaults)
        except IOError:
            raise utils.EngineFatal("Couldn't open config file.")

        # Load hook file
        if os.path.exists(utils.get_root_filename('hook.py')):
            import sys
            sys.path[0:0] = [utils.get_root()]
            try:
                self.msg.info(self.name, "Importing user hooks (hook.py)...")
                global hook
                import hook
                self.hooks_available = True
            except ImportError:
                self.msg.warn(self.name, "Error importing hooks.")
            del sys.path[0]
Пример #2
0
def main():
    parser = _get_arg_parser()
    args = parser.parse_args()

    try:
        # Validate CLI arguments
        _validate_args(args)

        # Retrieve CLI arguments
        fn = args.file
        hash = args.hash

        # Parse input file
        root = utils.get_root(fn)

        # Build dicts
        _init_dicts(root)

        # Evaluate hash observables
        _evaluate(root, hash)

        # Find matching Indicators
        results = _get_matching_indicators()

        # Print results
        _print_results(results)

    except errors.ArgumentError as ex:
        _error(ex)
        sys.exit(EXIT_FAILURE)
    except Exception as ex:
        logging.exception(ex)
        sys.exit(EXIT_FAILURE)
Пример #3
0
    def _load(self, account):
        self.account = account

        # Create home directory
        utils.make_dir("")
        self.configfile = utils.get_root_filename("config.json")

        # Create user directory
        userfolder = "%s.%s" % (account["username"], account["api"])
        utils.make_dir(userfolder)

        self.msg.info(
            self.name,
            "Trackma v{0} - using account {1}({2}).".format(utils.VERSION, account["username"], account["api"]),
        )
        self.msg.info(self.name, "Reading config files...")
        try:
            self.config = utils.parse_config(self.configfile, utils.config_defaults)
        except IOError:
            raise utils.EngineFatal("Couldn't open config file.")

        # Load hook file
        if os.path.exists(utils.get_root_filename("hook.py")):
            import sys

            sys.path[0:0] = [utils.get_root()]
            try:
                self.msg.info(self.name, "Importing user hooks (hook.py)...")
                global hook
                import hook

                self.hooks_available = True
            except ImportError:
                self.msg.warn(self.name, "Error importing hooks.")
            del sys.path[0]
Пример #4
0
    def test_browser(self):
        src_dir = os.path.dirname(__file__)
        dst_dir = utils.get_root()
        
        shutil.copyfile("%s/test_browser.html" % src_dir, "%s/test_browser.html" % dst_dir)
        shutil.copyfile("%s/test_browser.js"   % src_dir, "%s/test_browser.js"   % dst_dir)

        utils.create_dir("a-dir")
        
        webbrowser.open("http://localhost:%d/test_browser.html" % utils.get_port())
        
        print "waiting for response from browser test: "
        wait = 5
        while wait > 0:
            contents = utils.read_file("test_browser.results.txt")
            if contents: break
            
            print "%d" % wait
            time.sleep(1)
            wait -= 1

        self.assertTrue(contents != None)
        self.assertEquals("OK", contents)
Пример #5
0
from sklearn.model_selection import train_test_split
from sklearn.metrics import roc_auc_score

from keras.models import Model
from keras.layers import Input, Dense, Embedding, SpatialDropout1D, concatenate
from keras.layers import GRU, Bidirectional, GlobalAveragePooling1D, GlobalMaxPooling1D
from keras.preprocessing import text, sequence
from keras.callbacks import TensorBoard

from utils import get_logger, get_root

np.random.seed(42)
warnings.filterwarnings('ignore')
os.environ['OMP_NUM_THREADS'] = '4'

DIR_ROOT = get_root()
DIR_ASSETS = os.path.join(DIR_ROOT, 'assets')
MODEL_PATH = os.path.join(DIR_ASSETS, 'model')
LOG_PATH = os.path.join(DIR_ASSETS, 'tb_logs')
EMBEDDING_FILE = os.path.join(DIR_ASSETS, 'embedding',
                              'fasttext-crawl-300d-2m', 'crawl-300d-2M.vec')
DATA_FILE = os.path.join(DIR_ASSETS, 'data', 'train.csv')

MAX_FEATURES = 30000
MAXLEN = 100
EMBED_SIZE = 300
TRAIN_SIZE = 0.95
BATCH_SIZE = 32
EPOCHS = 2

Пример #6
0
def main():
    # Training settings
    parser = argparse.ArgumentParser(description='Embedding extraction module')
    parser.add_argument('--net',
                        default='lenet5',
                        help='DNN name (default=lenet5)')
    parser.add_argument('--root',
                        default='data',
                        help='rootpath (default=data)')
    parser.add_argument('--dataset',
                        default='imagenet',
                        help='dataset (default=imagenet)')
    parser.add_argument('--tensor_folder',
                        default='tensor_pub',
                        help='tensor_folder (default=tensor_pub)')
    parser.add_argument('--layer-info',
                        default='layer_info',
                        help='layer-info (default=layer_info)')
    parser.add_argument('--gpu-id',
                        default='1',
                        type=str,
                        help='id(s) for CUDA_VISIBLE_DEVICES')
    parser.add_argument('-j',
                        '--workers',
                        default=8,
                        type=int,
                        metavar='N',
                        help='number of data loading workers (default: 8)')
    parser.add_argument('-b',
                        '--batch-size',
                        default=1,
                        type=int,
                        metavar='N',
                        help='should be 1')
    args = parser.parse_args()
    use_cuda = True
    # Define what device we are using
    print("CUDA Available: ", torch.cuda.is_available())

    root = args.root
    dataset = args.dataset
    net = args.net
    tensor_folder = args.tensor_folder
    layers, cols = utils.get_layer_info(root, dataset, net, args.layer_info)
    print(dataset)
    print(root, dataset, net)

    os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu_id

    if dataset.startswith('imagenet'):
        if net == 'resnet50':
            model = utils.load_resnet50_model(True)
        elif net == 'vgg16':
            model = utils.load_vgg_model(pretrained=True, net=net)
        else:
            model = utils.load_resnet_model(pretrained=True)

        sub_models = utils.load_imagenet_sub_models(
            utils.get_model_root(root, dataset, net), layers, net, cols)
        # sub_models = utils.load_resnet_sub_models(utils.get_model_root(root,
        # dataset, net), layers, net)
        test_loader = utils.load_imagenet_test(args.batch_size, args.workers)
        anatomy(model, sub_models, test_loader, root, dataset, tensor_folder,
                net, layers)

    else:  # cifar10, cifar100, mnist
        device = torch.device("cuda" if (
            use_cuda and torch.cuda.is_available()) else "cpu")
        nclass = 10
        if dataset == 'cifar100':
            nclass = 100
        model = utils.load_model(
            net, device, utils.get_pretrained_model(root, dataset, net),
            dataset)
        weight_models = utils.load_weight_models(
            net, device, utils.get_model_root(root, dataset, net), layers,
            cols, nclass)
        if dataset == 'mnist':
            train_loader, test_loader = utils.load_mnist(
                utils.get_root(root, dataset, 'data', net))
        elif dataset == 'cifar10':
            train_loader, test_loader = utils.load_cifar10(
                utils.get_root(root, dataset, 'data', net))
        elif dataset == 'cifar100':
            train_loader, test_loader = utils.load_cifar100(
                utils.get_root(root, dataset, 'data', net))
        else:  #default mnist
            train_loader, test_loader = utils.load_mnist(
                utils.get_root(root, dataset, 'data', net))
        anatomy(model, weight_models, test_loader, root, dataset,
                tensor_folder, net, layers)
Пример #7
0
def test_api_version():
  rootobject = utils.get_root()
  assert rootobject['api_version'] >= 2
Пример #8
0
# date: Dec. 3, 2018

# modules
import os
import sys

# add current directory to sys.path
# needed for Flask app to work
current_dir = os.path.dirname(os.path.realpath(__file__))
sys.path.append(current_dir)

# custom modules
from train_classifier import Preprocess  # need to import the Preprocess class as reference for unpickling
from utils import get_root, load_pipeline, get_logger

ROOT_DIR = get_root()
MODEL_PATH = os.path.join(ROOT_DIR, 'assets', 'model')
PREPROCESSOR_FILE = os.path.join(MODEL_PATH, 'preprocessor.pkl')
ARCHITECTURE_FILE = os.path.join(MODEL_PATH, 'gru_architecture.json')
WEIGHTS_FILE = os.path.join(MODEL_PATH, 'gru_weights.h5')


class PredictionPipeline(object):
    def __init__(self, preprocessor, model):
        self.preprocessor = preprocessor
        self.model = model

    def predict(self, text):
        features = self.preprocessor.transform_texts(text)
        pred = self.model.predict(features)
        return pred