Esempio n. 1
0
def check_app(app):
    failedssltest = False
    badrequests = []

    # Get mitmproxy log file location
    if app.endswith('.log'):
        flowfile = app
        jsonfile = '%s.%s' % (flowfile, json_output)
        if os.path.exists(flowfile):
            sys.stdout = Logger('%s.%s' % (flowfile, report_output))
    else:
        flowfile = os.path.join('results', app, 'ssltest.log')
        jsonfile = os.path.join(os.path.dirname(flowfile), json_output)
        if os.path.exists(flowfile):
            sys.stdout = Logger(os.path.join('results', app, report_output))

    if os.path.exists(flowfile):
        badsslmsgs = []

        with open(flowfile, "rb") as logfile:
            freader = io.FlowReader(logfile)
            pp = pprint.PrettyPrinter(indent=4)
            try:
                for msg in freader.stream():
                    scheme = msg.request.scheme
                    if scheme == 'https':
                        failedssltest = True
                        badsslmsgs.append(msg)
                if failedssltest:
                    ssl_failed.append(app)
                    print(
                        color.bright(
                            '%s fails to validate SSL certificates properly' %
                            app))
                    print('Offending URIs accessed:')
                    for msg in badsslmsgs:
                        method = msg.request.method
                        uri = msg.request.pretty_url
                        request = '%s %s' % (method, uri)
                        badrequests.append(request)
                        request = color.bright(color.red((request)))
                        print(request)
                else:
                    print('No HTTPS traffic detected for app %s' % app)
                    ssl_passed.append(app)
            except FlowReadException as e:
                print("Flow file corrupted: {}".format(e))

        report = {}
        report['app'] = app
        report['testtime'] = os.path.getmtime(flowfile)
        report['failedtest'] = failedssltest
        report['ssl_failed'] = badrequests

        with open(jsonfile, 'w') as fp:
            json.dump(report, fp)

    else:
        ssl_notest.append(app)
Esempio n. 2
0
def main():
    global args, searchterm

    parser = argparse.ArgumentParser(
        description='Search captured traffic for a pattern')
    parser.add_argument('app_or_capture',
                        metavar='appname',
                        help='Application name or network capture file')
    parser.add_argument('searchterm', type=str, help='String to search for')
    parser.add_argument('-v',
                        '--verbose',
                        dest='verbose',
                        action='store_true',
                        help='display packet contents')
    parser.add_argument('-m',
                        '--multi',
                        dest='multi',
                        action='store_true',
                        help='search multiple encodings')

    args = parser.parse_args()

    app = args.app_or_capture
    searchterm = args.searchterm
    appdir = os.path.join('results', app)
    search_output = get_search_outname(searchterm)
    if os.path.isdir(appdir):
        sys.stdout = Logger(os.path.join(appdir, search_output))

    if args.app_or_capture:
        # Check only one app
        # Option to use full packets perhaps specified
        if args.multi:
            check_multi(app, searchterm)
        else:
            check_app(app, searchterm)
    else:
        # Check all apps tested
        for entry in os.listdir('results'):
            if os.path.isdir(os.path.join('results', entry)):
                app = entry
                if args.multi:
                    check_multi(app, searchterm)
                else:
                    check_app(app, searchterm)
            elif os.path.isdir(os.path.join('results', entry.lower())):
                app = entry
                if args.multi:
                    check_multi(app, searchterm)
                else:
                    check_app(app, searchterm)
    print('')
    # Flush stdout log file
    sys.stdout = sys.__stdout__
    # Copy log file to universally-named one
    copy2(os.path.join(appdir, search_output),
          os.path.join(appdir, report_output))
    eprint(color.bright('Done!'))
 def __init__(self, data_hash=None, uri=None, activity_archive_uri=None, name=None):
     self.class_name = self.__class__.__name__
     self.log = Logger().get_logger()
     super(Activity, self).__init__()
     if (data_hash == None and uri == None):
         self.log.info("No data provided - assuming creation of new Activity")
     elif (data_hash != None and uri != None):
         self.data_hash = data_hash
         self.uri = uri
         self.absolute_url = self._get_absolute_url()
         self.log.info("Instantiated Activity object with url=%s" % self.absolute_url)
Esempio n. 4
0
 def __init__(self, data_hash=None, uri=None):
     self.log = Logger().get_logger()
     self.class_name = self.__class__.__name__
     super(SpaceController, self).__init__()
     if data_hash == None and uri == None:
         self.log.info(
             "No data provided - assuming creation of new LiveActivity")
     else:
         self.data_hash = data_hash
         self.uri = uri
         self.absolute_url = self._get_absolute_url()
         self.log.info("Instantiated Activity object with url=%s" %
                       self.absolute_url)
Esempio n. 5
0
 def __init__(self, data_hash=None, uri=None):
     self.log = Logger().get_logger()
     self.class_name = self.__class__.__name__
     super(LiveActivityGroup, self).__init__()
     if data_hash == None and uri == None:
         self.log.info(
             "No data_hash and uri provided for LiveActivityGroup constructor, assuming creation"
         )
     else:
         self.data_hash = data_hash
         self.uri = uri
         self.absolute_url = self._get_absolute_url()
         self.log.info("Instantiated Activity object with url=%s" %
                       self.absolute_url)
Esempio n. 6
0
 def __init__(self,
              host='lg-head',
              port='8080',
              prefix='/interactivespaces',
              logfile_path='ispaces-client.log'):
     """
     :param host: default value is lg-head
     :param port: default value is 8080
     :param prefix: default value is /interactivespaces
     :todo: refactor filter_* methods because they're not DRY
     """
     self.host, self.port, self.prefix = host, port, prefix
     self.log = Logger(logfile_path=logfile_path).get_logger()
     self.uri = "http://%s:%s%s" % (self.host, self.port, prefix)
     super(Master, self).__init__()
Esempio n. 7
0
def deploy_logs():
    if not os.path.exists(hp.save_path):
        os.makedirs(hp.save_path)

    dir_success = False
    for sfx in range(1):  # todo legacy
        candidate_path = hp.save_path + '/' + hp.this_run_name + '_' + str(os.getpid()) + '/'
        if not os.path.exists(candidate_path):
            hp.this_run_path = candidate_path
            os.makedirs(hp.this_run_path)
            dir_success = True
            break
    if not dir_success:
        print('run name already exists!')

    sys.stdout = Logger(hp.this_run_path+'log.log')
    print('results are in:', hp.this_run_path)
    print('description: ', hp.description)
def deploy_logs():
    if not os.path.exists(hp.save_path):
        os.makedirs(hp.save_path)

    dir_success = False
    for sfx in range(1):  # todo legacy
        candidate_path = hp.save_path + '/' + hp.this_run_name + '_' + str(os.getpid()) + '/'
        if not os.path.exists(candidate_path):
            hp.this_run_path = candidate_path
            os.makedirs(hp.this_run_path)
            dir_success = Truecnn_net = cnn_one_img(n_timesteps = sample, input_size = 28, input_dim = 1)
            break
    if not dir_success:
        error('run name already exists!')

    sys.stdout = Logger(hp.this_run_path+'log.log')
    print('results are in:', hp.this_run_path)
    print('description: ', hp.description)
Esempio n. 9
0
 def __init__(self,
              gan_type,
              batch_size,
              img_size,
              img_chan,
              discriminator_fn=None,
              generator_fn=None):
     self.gan_types = [
         "DCGAN", "WGAN", "WGAN-GP", "LSGAN", "SNGAN", "RSGAN", "RaSGAN"
     ]
     assert gan_type in self.gan_types, "[error] not implemented gan_type `{}` specified. choose from following.\r\n{}".format(
         gan_type, self.gan_types)
     self.gan_type = gan_type
     self.batch_size = batch_size
     self.img_size = img_size
     self.img_chan = img_chan
     self.logger = Logger()
     self.n_disc_update = 1  # number of times to update discriminator (critic)
     self._init(discriminator_fn, generator_fn)
Esempio n. 10
0
 def __init__(self, data_hash=None, uri=None):
     """
     When called with constructor_args and other vars set to None, new
     LiveActivity will be created.
     :param data_hash: should be master API liveActivity json, may be blank
     :param uri: should be a link to "view.json" of the given live activity
     """
     self.log = Logger().get_logger()
     self.class_name = self.__class__.__name__
     super(LiveActivity, self).__init__()
     if (data_hash == None and uri == None):
         self.log.info(
             "No data provided - assuming creation of new LiveActivity")
     elif (data_hash != None and uri != None):
         self.data_hash = data_hash
         self.uri = uri
         self.absolute_url = self._get_absolute_url()
         self.log.info("Instantiated LiveActivity object with url=%s" %
                       self.absolute_url)
 def __init__(self):
     self.log = Logger().get_logger()
     super(Metadatable, self).__init__()
 def __init__(self):
     self.log = Logger().get_logger()
Esempio n. 13
0
def check_app(app, fullpacket=False, force=False):
    '''
    Check application based on app name in Tapioca results
    '''

    dnscacheloaded = False
    targetscacheloaded = False
    largewarned = False

    # load local network from config
    net.set_local()

    # Get pcap file location
    if app.endswith('.pcap'):
        pcapfile = app
        if os.path.exists(pcapfile):
            sys.stdout = Logger('%s.%s' % (pcapfile, report_output))
    else:
        pcapfile = os.path.join('results', app, 'tcpdump.pcap')
        if os.path.exists(pcapfile):
            sys.stdout = Logger(os.path.join('results', app, report_output))

    if os.path.exists(pcapfile):

        pcapdir = os.path.dirname(pcapfile)
        dnspkl = os.path.join(pcapdir, '.dnsmap.pkl')
        targetspkl = os.path.join(pcapdir, '.targets.pkl')

        eprint(color.bright('Checking app %s...' % color.cyan(app)))

        if os.path.exists(dnspkl) and not force:
            eprint('Loading cached DNS info...')
            with open(dnspkl, 'rb') as pklhandle:
                try:
                    net.dnsmap = pickle.load(pklhandle)
                    net.dnsreqs = pickle.load(pklhandle)
                    dnscacheloaded = True
                except:
                    pass

        if not dnscacheloaded:
            if os.path.getsize(pcapfile) > 100000000:
                # Over 100MB
                eprint(
                    color.bright(
                        color.yellow(
                            'Warning: capture size is large. Please be patient.'
                        )))
                largewarned = True
            # Get captured DNS info for IP addresses
            eprint('Getting DNS info...')
            dnspackets = pyshark.FileCapture(pcapfile,
                                             keep_packets=False,
                                             display_filter='dns')
            dnspackets.apply_on_packets(net.get_dns_info, timeout=1000)
            with open(dnspkl, 'wb') as pklhandle:
                pickle.dump(net.dnsmap,
                            pklhandle,
                            protocol=pickle.HIGHEST_PROTOCOL)
                pickle.dump(net.dnsreqs,
                            pklhandle,
                            protocol=pickle.HIGHEST_PROTOCOL)

#        if os.path.exists(targetspkl) and not force:
#            eprint('Loading cached targets...')
#            with open(targetspkl, 'rb') as pklhandle:
#                try:
#                    net.targets = pickle.load(pklhandle)
#                    targetscacheloaded = True
#                except:
#                    pass

        if not targetscacheloaded:
            if fullpacket:
                packets = pyshark.FileCapture(pcapfile, keep_packets=False)
                # Get hosts contacted
                eprint('Getting hosts contacted...')
                packets.apply_on_packets(net.get_hosts_contacted_fullpacket,
                                         timeout=1000)
            else:
                packets = pyshark.FileCapture(pcapfile,
                                              keep_packets=False,
                                              only_summaries=True)
                # Get hosts contacted
                eprint('Getting hosts contacted...')
                packets.apply_on_packets(net.get_hosts_contacted, timeout=1000)


#                with open(targetspkl, 'wb') as pklhandle:
#                    pickle.dump(
# net.targets, pklhandle, protocol=pickle.HIGHEST_PROTOCOL)

# Print report
        generate_report(app, fullpacket=fullpacket, pcapfile=pcapfile)

        # Reset globals
        net.clear()
    def __init__(self, args):

        self.input_dir = args.input_dir
        self.log_dir = args.log_dir
        self.model_dir = args.model_dir
        self.max_qst_length = args.max_qst_length
        self.max_num_ans = args.max_num_ans
        self.embed_size = args.embed_size
        self.word_embed_size = args.word_embed_size
        self.num_layers = args.num_layers
        self.hidden_size = args.hidden_size
        self.lr = args.lr
        self.step_size = args.step_size
        self.gamma = args.gamma
        self.num_epochs = args.num_epochs
        self.batch_size = args.batch_size
        self.num_workers = args.num_workers
        self.save_step = args.save_step
        self.l1_coef = args.l1_coef
        self.l2_coef = args.l2_coef
        self.save_path = args.save_path

        self.noise_dim = 100
        self.beta1 = 0.5
        self.logger = Logger('vqa-gan')
        self.checkpoints_path = 'checkpoints'

        self.data_loader = get_loader(input_dir=self.input_dir,
                                      input_vqa_train='train.npy',
                                      input_vqa_valid='valid.npy',
                                      max_qst_length=self.max_qst_length,
                                      max_num_ans=self.max_num_ans,
                                      batch_size=self.batch_size,
                                      num_workers=self.num_workers)

        qst_vocab_size = self.data_loader['train'].dataset.qst_vocab.vocab_size
        ans_vocab_size = self.data_loader['train'].dataset.ans_vocab.vocab_size
        self.ans_unk_idx = self.data_loader['train'].dataset.ans_vocab.unk2idx

        self.generator = Generator(embed_size=self.embed_size,
                                   qst_vocab_size=qst_vocab_size,
                                   ans_vocab_size=ans_vocab_size,
                                   word_embed_size=self.word_embed_size,
                                   num_layers=self.num_layers,
                                   hidden_size=self.hidden_size,
                                   img_feature_size=512).to(device)

        self.discriminator = Discriminator(
            embed_size=self.embed_size,
            ans_vocab_size=ans_vocab_size,
            word_embed_size=self.word_embed_size,
            num_layers=self.num_layers,
            hidden_size=self.hidden_size).to(device)


        paramsD = list(self.discriminator.qst_encoder.parameters()) \
                + list(self.discriminator.img_encoder.fc.parameters()) \
                + list(self.discriminator.fc1.parameters()) \
                + list(self.discriminator.fc2.parameters())

        self.optimD = torch.optim.Adam(paramsD,
                                       lr=self.lr * 2,
                                       betas=(self.beta1, 0.999))
        self.optimG = torch.optim.Adam(self.generator.parameters(),
                                       lr=self.lr,
                                       betas=(self.beta1, 0.999))
 def __init__(self):
     """
        Should be responsible for communication with the API
     """
     self.log = Logger().get_logger()
 def __init__(self):
     self.log = Logger().get_logger()
     super(Shutdownable, self).__init__()
 def __init__(self):
     self.log = Logger().get_logger()
     super(Startupable, self).__init__()
Esempio n. 18
0
    def __init__(self,
                 generator: Union[Generator, nn.DataParallel],
                 discriminator: Union[Discriminator, nn.DataParallel],
                 training_dataset: DataLoader,
                 validation_dataset: Dataset,
                 validation_dataset_fid: DataLoader,
                 vgg16: Union[VGG16, nn.DataParallel] = VGG16(),
                 generator_optimizer: torch.optim.Optimizer = None,
                 discriminator_optimizer: torch.optim.Optimizer = None,
                 generator_loss: nn.Module = LSGANGeneratorLoss(),
                 discriminator_loss: nn.Module = LSGANDiscriminatorLoss(),
                 semantic_reconstruction_loss: nn.Module = SemanticReconstructionLoss(),
                 diversity_loss: nn.Module = DiversityLoss(),
                 save_data_path: str = 'saved_data') -> None:
        '''
        Constructor
        :param generator: (nn.Module, nn.DataParallel) Generator network
        :param discriminator: (nn.Module, nn.DataParallel) Discriminator network
        :param training_dataset: (DataLoader) Training dataset
        :param vgg16: (nn.Module, nn.DataParallel) VGG16 module
        :param generator_optimizer: (torch.optim.Optimizer) Optimizer of the generator network
        :param discriminator_optimizer: (torch.optim.Optimizer) Optimizer of the discriminator network
        :param generator_loss: (nn.Module) Generator loss function
        :param discriminator_loss: (nn.Module) Discriminator loss function
        :param semantic_reconstruction_loss: (nn.Module) Semantic reconstruction loss function
        :param diversity_loss: (nn.Module) Diversity loss function
        '''
        # Save parameters
        self.generator = generator
        self.discriminator = discriminator
        self.training_dataset = training_dataset
        self.validation_dataset = validation_dataset
        self.validation_dataset_fid = validation_dataset_fid
        self.vgg16 = vgg16
        self.generator_optimizer = generator_optimizer
        self.discriminator_optimizer = discriminator_optimizer
        self.generator_loss = generator_loss
        self.discriminator_loss = discriminator_loss
        self.semantic_reconstruction_loss = semantic_reconstruction_loss
        self.diversity_loss = diversity_loss
        self.latent_dimensions = self.generator.module.latent_dimensions \
            if isinstance(self.generator, nn.DataParallel) else self.generator.latent_dimensions
        # Init logger
        self.logger = Logger()
        # Make directories to save logs, plots and models during training
        time_and_date = str(datetime.now())
        self.path_save_models = os.path.join(save_data_path, 'models_' + time_and_date)
        if not os.path.exists(self.path_save_models):
            os.makedirs(self.path_save_models)
        self.path_save_plots = os.path.join(save_data_path, 'plots_' + time_and_date)
        if not os.path.exists(self.path_save_plots):
            os.makedirs(self.path_save_plots)
        self.path_save_metrics = os.path.join(save_data_path, 'metrics_' + time_and_date)
        if not os.path.exists(self.path_save_metrics):
            os.makedirs(self.path_save_metrics)
        # Make indexes for validation plots
        validation_plot_indexes = np.random.choice(range(len(self.validation_dataset_fid.dataset)), 49, replace=False)
        # Plot and save validation images used to plot generated images
        self.validation_images_to_plot, _, self.validation_masks = image_label_list_of_masks_collate_function(
            [self.validation_dataset_fid.dataset[index] for index in validation_plot_indexes])

        torchvision.utils.save_image(misc.normalize_0_1_batch(self.validation_images_to_plot),
                                     os.path.join(self.path_save_plots, 'validation_images.png'), nrow=7)
        # Plot masks
        torchvision.utils.save_image(self.validation_masks[0],
                                     os.path.join(self.path_save_plots, 'validation_masks.png'),
                                     nrow=7)
        # Generate latents for validation
        self.validation_latents = torch.randn(49, self.latent_dimensions, dtype=torch.float32)
        # Log hyperparameter
        self.logger.hyperparameter['generator'] = str(self.generator)
        self.logger.hyperparameter['discriminator'] = str(self.discriminator)
        self.logger.hyperparameter['vgg16'] = str(self.vgg16)
        self.logger.hyperparameter['generator_optimizer'] = str(self.generator_optimizer)
        self.logger.hyperparameter['discriminator_optimizer'] = str(self.discriminator_optimizer)
        self.logger.hyperparameter['generator_loss'] = str(self.generator_loss)
        self.logger.hyperparameter['discriminator_loss'] = str(self.discriminator_loss)
        self.logger.hyperparameter['diversity_loss'] = str(self.diversity_loss)
        self.logger.hyperparameter['discriminator_loss'] = str(self.semantic_reconstruction_loss)
 def __init__(self):
     self.log = Logger().get_logger()
     super(Activatable, self).__init__()
Esempio n. 20
0
def check_app(app, force=False):
    '''
    Check application based on app name in Tapioca results
    '''

    dnscacheloaded = False
    largewarned = False

    # Get pcap file location
    if app.endswith('.pcap'):
        pcapfile = app
        if os.path.exists(pcapfile):
            sys.stdout = Logger('%s.%s' % (pcapfile, report_output))
    else:
        pcapfile = os.path.join('results', app, 'tcpdump.pcap')
        if os.path.exists(pcapfile):
            sys.stdout = Logger(os.path.join('results', app, report_output))

    if os.path.exists(pcapfile):

        pcapdir = os.path.dirname(pcapfile)
        dnspkl = os.path.join(pcapdir, '.dnsmap.pkl')

        eprint(color.bright('Checking app %s...' % color.cyan(app)))

        if os.path.exists(dnspkl) and not force:
            eprint('Loading cached DNS info...')
            with open(dnspkl, 'rb') as pklhandle:
                try:
                    net.dnsmap = pickle.load(pklhandle)
                    dnscacheloaded = True
                except:
                    pass

        if not dnscacheloaded:
            if os.path.getsize(pcapfile) > 100000000:
                # Over 100MB
                eprint(
                    color.bright(
                        color.yellow(
                            'Warning: capture size is large. Please be patient.'
                        )))
                largewarned = True

            # Get captured DNS info for IP addresses
            eprint('Getting DNS info...')
            dnspackets = pyshark.FileCapture(pcapfile,
                                             keep_packets=False,
                                             display_filter='dns')
            dnspackets.apply_on_packets(net.get_dns_info, timeout=1000)
            with open(dnspkl, 'wb') as pklhandle:
                pickle.dump(net.dnsmap,
                            pklhandle,
                            protocol=pickle.HIGHEST_PROTOCOL)

        if os.path.getsize(pcapfile) > 100000000 and not largewarned:
            # Over 100MB
            eprint(
                color.bright(
                    color.yellow(
                        'Warning: capture size is large. Please be patient.')))
            largewarned = True

        sslpackets = pyshark.FileCapture(pcapfile,
                                         keep_packets=False,
                                         display_filter='ssl')

        eprint('Getting SSL info from capture...')
        # get_indexed_ssl_info(cap)
        sslpackets.apply_on_packets(net.get_ssl_info, timeout=1000)

        dtlspackets = pyshark.FileCapture(pcapfile,
                                          keep_packets=False,
                                          display_filter='dtls')

        eprint('Getting DTLS info from capture...')
        dtlspackets.apply_on_packets(net.get_dtls_info, timeout=1000)

        # Print report
        generate_report(app, pcapfile=pcapfile)

        # Reset globals
        net.clear()
 def __init__(self):
     self.log = Logger().get_logger()
     super(Configable, self).__init__()
 def __init__(self):
     self.log = Logger().get_logger()
     super(Deployable, self).__init__()
 def __init__(self,
              generator: Union[Generator, nn.DataParallel],
              discriminator: Union[Discriminator, nn.DataParallel],
              training_dataset: DataLoader,
              validation_dataset: DataLoader,
              vgg16: Union[VGG16, nn.DataParallel] = VGG16(),
              generator_optimizer: torch.optim.Optimizer = None,
              discriminator_optimizer: torch.optim.Optimizer = None,
              generator_loss: nn.Module = LSGANGeneratorLoss(),
              discriminator_loss: nn.Module = LSGANDiscriminatorLoss(),
              semantic_reconstruction_loss: nn.Module = SemanticReconstructionLoss(),
              diversity_loss: nn.Module = DiversityLoss(),
              save_data_path: str = 'saved_data') -> None:
     '''
     Constructor
     :param generator: (nn.Module, nn.DataParallel) Generator network
     :param discriminator: (nn.Module, nn.DataParallel) Discriminator network
     :param training_dataset: (DataLoader) Training dataset
     :param vgg16: (nn.Module, nn.DataParallel) VGG16 module
     :param generator_optimizer: (torch.optim.Optimizer) Optimizer of the generator network
     :param discriminator_optimizer: (torch.optim.Optimizer) Optimizer of the discriminator network
     :param generator_loss: (nn.Module) Generator loss function
     :param discriminator_loss: (nn.Module) Discriminator loss function
     :param semantic_reconstruction_loss: (nn.Module) Semantic reconstruction loss function
     :param diversity_loss: (nn.Module) Diversity loss function
     '''
     # Save parameters
     self.generator = generator
     self.discriminator = discriminator
     self.training_dataset = training_dataset
     self.validation_dataset_fid = validation_dataset
     self.vgg16 = vgg16
     self.generator_optimizer = generator_optimizer
     self.discriminator_optimizer = discriminator_optimizer
     self.generator_loss = generator_loss
     self.discriminator_loss = discriminator_loss
     self.semantic_reconstruction_loss = semantic_reconstruction_loss
     self.diversity_loss = diversity_loss
     self.latent_dimensions = self.generator.module.latent_dimensions \
         if isinstance(self.generator, nn.DataParallel) else self.generator.latent_dimensions
     # Calc no gradients for weights of vgg16
     for parameter in self.vgg16.parameters():
         parameter.requires_grad = False
     # Init logger
     self.logger = Logger()
     # Make directories to save logs, plots and models during training
     time_and_date = str(datetime.now())
     self.path_save_models = os.path.join(save_data_path, 'models_' + time_and_date)
     if not os.path.exists(self.path_save_models):
         os.makedirs(self.path_save_models)
     self.path_save_plots = os.path.join(save_data_path, 'plots_' + time_and_date)
     if not os.path.exists(self.path_save_plots):
         os.makedirs(self.path_save_plots)
     self.path_save_metrics = os.path.join(save_data_path, 'metrics_' + time_and_date)
     if not os.path.exists(self.path_save_metrics):
         os.makedirs(self.path_save_metrics)
     # Log hyperparameter
     self.logger.hyperparameter['generator'] = str(self.generator)
     self.logger.hyperparameter['discriminator'] = str(self.discriminator)
     self.logger.hyperparameter['vgg16'] = str(self.vgg16)
     self.logger.hyperparameter['generator_optimizer'] = str(self.generator_optimizer)
     self.logger.hyperparameter['discriminator_optimizer'] = str(self.discriminator_optimizer)
     self.logger.hyperparameter['generator_loss'] = str(self.generator_loss)
     self.logger.hyperparameter['discriminator_loss'] = str(self.discriminator_loss)
     self.logger.hyperparameter['diversity_loss'] = str(self.diversity_loss)
     self.logger.hyperparameter['discriminator_loss'] = str(self.semantic_reconstruction_loss)
    def __init__(self):
        self.routes = {
                       'Master': {
                            'get_activities' : '/activity/all.json',
                            'get_live_activities' : '/liveactivity/all.json',
                            'get_live_activity_groups' : '/liveactivitygroup/all.json',
                            'get_spaces' : '/space/all.json',
                            'get_space_controllers' : '/spacecontroller/all.json',
                            'get_named_scripts' : '/admin/namedscript/all.json',
                            'new_live_activity_group' : '/liveactivitygroup/new',
                            'new_space' : '/space/new.json',
                            'new_controller' : '/spacecontroller/new.json',
                            'new_named_script' : '/admin/namedscript/new.json'
                            },
                       'Activity' : {
                            'view' : '/activity/%s/view.json',
                            'upload' : '/activity/upload',
                            'delete' : '/activity/%s/delete.html'
                            },
                       'LiveActivity' : {
                            'status' : '/liveactivity/%s/status.json',
                            'view' : '/liveactivity/%s/view.json',
                            'new' : '/liveactivity/new',
                            'delete' : '/liveactivity/%s/delete.html',
                            'shutdown' : '/liveactivity/%s/shutdown.json',
                            'startup' : '/liveactivity/%s/startup.json',
                            'activate' : '/liveactivity/%s/activate.json',
                            'deactivate' : '/liveactivity/%s/deactivate.json',
                            'deploy' : '/liveactivity/%s/deploy.json',
                            'configure' : '/liveactivity/%s/configure.json',
                            'clean_tmp' : '/liveactivity/%s/cleantmpdata.json',
                            'clean_permanent' : '/liveactivity/%s/cleanpermanentdata.json',
                            'metadata' : '/liveactivity/%s/metadata/edit',
                            'config' : '/liveactivity/%s/config/edit'
                            },
                       'LiveActivityGroup' : {
                            'view' : '/liveactivitygroup/%s/view.json',
                            'new' : '/liveactivitygroup/new',
                            'status' : '/liveactivitygroup/%s/liveactivitystatus.json',
                            'delete' : '/liveactivitygroup/%s/delete.html',
                            'shutdown' : '/liveactivitygroup/%s/shutdown.json',
                            'startup' : '/liveactivitygroup/%s/startup.json',
                            'activate' : '/liveactivitygroup/%s/activate.json',
                            'deactivate' : '/liveactivitygroup/%s/deactivate.json',
                            'deploy' : '/liveactivitygroup/%s/deploy.json',
                            'configure' : '/liveactivitygroup/%s/configure.json',
                            'metadata' : '/liveactivitygroup/%s/metadata/edit',
                            'edit' : '/liveactivitygroup/%s/edit.json'
                            },
                       'Space' : {
                            'view' : '/space/%s/view.json',
                            'status' : '/space/%s/status.json',
                            'delete' : '/space/%s/delete.html',
                            'new' : '/space/new',
                            'shutdown' : '/space/%s/shutdown.json',
                            'startup' : '/space/%s/startup.json',
                            'activate' : '/space/%s/activate.json',
                            'deactivate' : '/space/%s/deactivate.json',
                            'deploy' : '/space/%s/deploy.json',
                            'configure' : '/space/%s/configure.json',
                            'metadata' : '/space/%s/metadata/edit',
                            'edit' : '/space/%s/edit.json'
                            },
                       'SpaceController' :{
                            'new' : '/spacecontroller/new',
                            'status': '/spacecontroller/%s/status.json',
                            'delete': '/spacecontroller/%s/delete.html',
                            'shutdown': '/spacecontroller/%s/shutdown.json',
                            'deploy': '/spacecontroller/%s/deploy.json',
                            'connect' : '/spacecontroller/%s/connect.json',
                            'disconnect' : '/spacecontroller/%s/disconnect.json'
                            }
                        }

        self.log = Logger().get_logger()