Ejemplo n.º 1
0
def main(configs, args):
    global net, dataloader, optimizer, lr_scheduler, writer, epochs, logger
    best_acc = 0

    torch.manual_seed(6666)
    configs = init_configs(configs)
    net = build_model(configs)
    net = init_model(net, configs)
    net = net.cuda().train()
    print(net)

    if args.debug:
        configs.log_dir = os.path.join('debug', configs.log_dir)
        configs.ckpt.save_config_path = os.path.join('debug', configs.ckpt.save_config_path)
        configs.ckpt.save_model_path = os.path.join('debug', configs.ckpt.save_model_path)
        configs.ckpt.save_optim_path = os.path.join('debug', configs.ckpt.save_optim_path)

    check_dir(configs.log_dir)
    if not configs.do_test:
        config_path = configs.ckpt.save_config_path
        torch.save({'configs': configs}, os.path.join(config_path, 'configs.pth'))

    logger = create_logger(configs.log_dir, configs.cfg_name)
    writer = SummaryWriter(configs.log_dir)

    for name, param in net.named_parameters():
        print('%s required grad is %s' % (name, param.requires_grad))

    dataloader = build_dataset(configs)
    optimizer = build_optimizer(net.parameters(), configs.optimizer)
    optimizer = init_optim(optimizer, configs)
    lr_scheduler = get_lr_scheduler(configs.training)

    max_iterations = configs.training.max_episodes
    test_every_iterations = configs.testing.test_every_episodes
    for iteration in range(1, max_iterations + 1):
        try:
            if iteration % test_every_iterations == 0 or configs.do_test or (args.debug and args.debug_test):
                epochs += 1
                acc = test('test', configs)
                optim_path = configs.ckpt.save_optim_path
                model_path = configs.ckpt.save_model_path
                z, images, labels = extract_features('test', configs)
                if not configs.do_test:
                    torch.save({'model': net.state_dict()}, os.path.join(model_path, 'model_%d.pth' % iteration))
                    torch.save({'optim': optimizer.state_dict()}, os.path.join(optim_path, 'optim_%d.pth' % iteration))
                    torch.save({'z': z.numpy(),
                                'labels': labels,
                                'images': images},
                               os.path.join(model_path, 'results_%d.pth' % iteration))
                    if acc > best_acc:
                        best_acc = acc
                        torch.save({'model': net.state_dict()}, os.path.join(model_path, 'model_best.pth'))
                        torch.save({'optim': optimizer.state_dict()}, os.path.join(optim_path, 'optim_best.pth'))
                if configs.do_test or (args.debug and args.debug_test):
                    return
            train(iteration, configs)
        except KeyboardInterrupt:
            import ipdb
            ipdb.set_trace()
Ejemplo n.º 2
0
def make_global_parameters(hparams):
    torch.manual_seed(hparams.seed)

    global logger
    main_info = hparams.main_info
    model_info = hparams.model_info
    log_dir = main_info['log_dir']
    logger = create_logger(log_dir, model_info['name'])
    logger.info(hparams._items)

    global saver
    save_path = main_info['save_dir']
    saver = Saver(1e30, 'ENTROPY', hparams, save_path)
Ejemplo n.º 3
0
def main() -> None:
    parser = ArgumentParser()

    parser.add_argument(
        'applications',
        help=
        "Path to the directory, where applications are installed. This folder will be traversed recursively.",
    )
    parser.add_argument(
        'output',
        help="Path to where the results should be stored",
    )

    args = parser.parse_args()

    applications_directory = os.path.expanduser(args.applications)
    output_directory = os.path.expanduser(args.output)

    if not os.path.exists(applications_directory):
        print("Directory does not exist: {}".format(applications_directory),
              file=sys.stderr)
        exit(1)

    exit_watcher = SignalIntelligence()

    logger = create_logger('appnalyser_driver')

    logger.info("appnalyser_driver starting")

    for application_directory in iterate_applications(applications_directory):
        if exit_watcher.should_exit:
            break

        appnalyse(application_directory, applications_directory,
                  output_directory, logger)

    logger.info("appnalyser_driver stopping")
Ejemplo n.º 4
0
from misc.logger import create_logger
import logging

import abc
from enum import Enum, auto


class ResultCount(Enum):
    NONE_OR_SINGLE = auto()
    SINGLE         = auto()
    MULTIPLE       = auto()


# This top level logger is not used at all. Its settings (formatting, output file, ...)
# are used by lower-level loggers (those used in the individual classes)
module_logger = create_logger("extractor")


class AbstractExtractor(abc.ABC):
    """Abstract base class for the plugin infrastructure"""
    def __init__(self):
        self.logger = logging.getLogger("extractor.{}".format(self.__class__.resource_type()))

    @classmethod
    @abc.abstractmethod
    def resource_type(cls):
        """The type of resource extracted by the particular resource.
        This should be a single word only -- meaning something like "info", "executable", ...
        This resource type is used for better log messages."""
        pass
Ejemplo n.º 5
0
import lief

from .common import extract_rpaths, resolve_library_path, load_cmd_is_weak
from .lief_extensions import macho_parse_quick

import os
import subprocess
import tempfile

from misc import plist
from misc.logger import create_logger
from extern.tools import tool_named

logger = create_logger('binary')


class Binary:
    """
    Wrapper class around lief binary object. Supports extracting libraries for an application
    and allows extracting entitlements from binary. Note that lief is rather slow when parsing
    binaries. Therefore, some operations are implemented as static methods and do not use lief.
    """
    def __init__(self, filepath, loader_path=None, executable_path=None):
        try:
            if not lief.is_macho(filepath):
                raise ValueError("Executable has wrong format")
            self.filepath = filepath
            self.containing_folder = os.path.dirname(filepath)
            self.parsed_binary = macho_parse_quick(filepath)

            # For more information about @loader_path and @executable_path,
Ejemplo n.º 6
0
 def __init__(self, name: str) -> None:
     self.name = name
     self.logger = create_logger(name)
Ejemplo n.º 7
0
def make_global_parameters(hparams):
    # logger_configs: output_path, cfg_name
    global logger
    logger_configs = hparams.logger_configs
    logger = create_logger(logger_configs['output_path'], logger_configs['cfg_name'])
Ejemplo n.º 8
0
        - microphone, camera for now.
"""

import misc.logger as logging_utils
import misc.app_utils as app_utils
from bundle.bundle import Bundle
from extern.tools import tool_named

import termcolor
from typing import List
import argparse
import abc
import subprocess
import json

logger = logging_utils.create_logger('appnalyser')

COLOR_NEUTRAL = None
COLOR_POSITIVE = "green"
COLOR_NEGATIVE = "red"


class AbstractAppChecker(abc.ABC):
    def __init__(self):
        pass

    @classmethod
    @abc.abstractmethod
    def description(cls):
        """A concise description of the function of this checker"""
        pass
Ejemplo n.º 9
0
(c) Jakob Rieck 2018

Tool to install (and purchase) apps from the MAS, in an automatic fashion
"""

import time
import argparse
import enum

from os import popen

from misc.logger import create_logger
from misc.os_support import os_is_compatible
import misc.itunes_api as itunes_api

logger = create_logger('appstaller')


class Operation(str, enum.Enum):
    PURCHASE = "purchase"
    INSTALL = "install"
    UPGRADE = "upgrade"

    def __str__(self) -> str:
        return self.value


class MacApp:
    """
    The MacApp class wraps common operations such as getting the
    current price.
Ejemplo n.º 10
0
(c) Jakob Rieck 2018

Tool to search for updates for purchased apps and "purchase" newly released free apps.
After appdater runs, appxtractor can be run again to extract information from new apps.
"""

from misc.logger import create_logger
from misc.date_utils import Date
import os.path
import os
import argparse
import jsonlines
import datetime

logger = create_logger('appdater')


def infos_from_itunes_dump(dump_path):
    """Given a Mac App Store dump in jsonlines format (-> mas-crawl), extracts the information
    as a dictionary mapping bundleId to whole entry."""
    assert(os.path.isfile(dump_path))

    result = dict()

    with jsonlines.open(dump_path, mode='r') as reader:
        for obj in reader:
            if "bundleId" not in obj or "version" not in obj or "price" not in obj:
                logger.error("Object does not contain required keys. Skipping.")
                continue
Ejemplo n.º 11
0
import argparse
import signal
import sys
import tempfile

from termcolor import colored

import extractors.base

from bundle.bundle import Bundle
from misc.logger import create_logger
from misc.archives import extract_zip, extract_tar, extract_gzip
import dmglib


logger = create_logger('appxtractor')
# Instantiate the extractors once
info_extractors = [ cls() for cls in extractors.base.all_extractors() ]


class SignalIntelligence:
    """A simple class to encapsulate reacting to signals (SIGINT, SIGTERM) and to exit the program
    gracefully in the event these signals are delivered."""
    should_exit = False

    def __init__(self):
        signal.signal(signal.SIGINT, self.process_signal)
        signal.signal(signal.SIGTERM, self.process_signal)

    def process_signal(self, signum, frame):
        self.should_exit = True