class Serializer(object):
    """
        Should be responsible for representing instances of classes in desired form
    """
    def __init__(self):
        self.log = Logger().get_logger()
        self.log.debug("Instantiated Serializer with data %s" % self.data_hash)
        pass

    def __repr__(self):
        return str(self.data_hash)

    def __str__(self):
        return self.data_hash

    def _assign_attributes(self):
        """
            Should change the key names from original API keys to the ones we want
            :return: None
        """
        for new_key, old_key in self.attributes.iteritems():
            try:
                self.data[new_key] = self.data_hash[old_key]
            except SerializerException, e:
                """ possible do something fancy here """
            except Exception, e:
                self.log.info("Could not assign attribute %s while operating on Object: %s because %s" % (old_key, self.data_hash, e))
class Searcher(dict):
    """
         Class responsible for search methods on strings
    """
    def __init__(self):
        self.log = Logger().get_logger()

    def wrap(self, regular_expression):
        """
        Turns windows/search type of regexp into python re regexp.
        It adds explicit begining '^' and end '$' to matcher and converts
        all wildcards to re wildcard '(.*)'
        :rtype: str
        :param string: regular_expression
        """
        return '^' + regular_expression.replace('*', '(.*)') + '$'

    def match(self, string, regular_expression):
        """
        Match string with a regular expression
        :rtype: bool
        :param string: given string for matching
        :param regular_expression: expression to be run against the given string
        """
        regular_expression = self.wrap(regular_expression)

        self.log.info("Trying to match regexp: %s against string: %s" % (regular_expression, string))

        regex = re.compile(regular_expression)
        if re.match(regex, string):
            return True
        else:
            return False
class Space(Fetchable, Statusable, Deletable, Shutdownable, 
            Startupable, Activatable, Configurable, Metadatable,
            Deployable, Cleanable):
    """
        @summary: Space is a LiveActivityGroup aggregator
    """
    def __init__(self, data_hash, uri, name=None, ):
        self.log = Logger().get_logger()
        self.data_hash = data_hash
        self.uri = uri
        self.absolute_url = self._get_absolute_url()
        self.class_name = self.__class__.__name__
        super(Space, self).__init__()
        self.log.info("Instantiated Activity object with url=%s" % self.absolute_url)
        
    def __repr__(self):
        return str(self.data_hash)
    
    def __str__(self):
        return self.data_hash 
        
    def create(self, live_activity_group_name, live_activity_names):
        """
            @summary: Should be responsible for creating space
            @param live_activity_group_name: string
            @param live_activity_names: list of existing names
        """
        raise NotImplementedError
    
    def to_json(self):
        """ 
            @summary: Should selected attributes in json form defined by the template
        """
        self.serializer = SpaceSerializer(self.data_hash)
        return self.serializer.to_json()

    
    def id(self):
        return self.data_hash['id']
    
    def name(self):
        """ 
            @param: Should return live activity name
        """
        return self.data_hash['name']  
  
    def description(self):
        """ 
            @param: Should return Space description 
        """
        return self.data_hash['description']    
    
    """ Private methods below """
    
    def _get_absolute_url(self):
        live_activity_group_id = self.data_hash['id']
        url = "%s/space/%s/view.json" % (self.uri, live_activity_group_id)
        return url  
 def __init__(self, data_hash, uri, name=None, ):
     self.log = Logger().get_logger()
     self.data_hash = data_hash
     self.uri = uri
     self.absolute_url = self._get_absolute_url()
     self.class_name = self.__class__.__name__
     super(Space, self).__init__()
     self.log.info("Instantiated Activity object with url=%s" % self.absolute_url)
 def __init__(self, data_hash=None, uri=None):
     self.log = Logger().get_logger()
     self.class_name = self.__class__.__name__
     super(LiveActivityGroup, self).__init__()
     if data_hash==None and uri==None:
         self.log.info("No data_hash and uri provided for LiveActivityGroup constructor, assuming creation")
     else:
         self.data_hash = data_hash
         self.uri = uri
         self.absolute_url = self._get_absolute_url()
         self.log.info("Instantiated Activity object with url=%s" % self.absolute_url)
 def __init__(self, host='lg-head', port='8080', prefix='/interactivespaces'):
     """ 
         @param host: default value is lg-head 
         @param port: default value is 8080
         @param prefix: default value is /interactivespaces
         @todo: refactor filter_* methods because they're not DRY
     """
     self.host, self.port, self.prefix = host, port, prefix
     self.log = Logger().get_logger()
     self.uri = "http://%s:%s%s" % (self.host, self.port, prefix)
     super(Master, self).__init__()
 def __init__(self, data_hash=None, uri=None):
     self.log = Logger().get_logger()
     self.class_name = self.__class__.__name__
     super(SpaceController, self).__init__()
     if data_hash==None and uri==None:
         self.log.info("No data provided - assuming creation of new LiveActivity")
     else:
         self.data_hash = data_hash
         self.uri = uri
         self.absolute_url = self._get_absolute_url()
         self.log.info("Instantiated Activity object with url=%s" % self.absolute_url)
 def __init__(self, data_hash=None, uri=None, activity_archive_uri=None, name=None):
     self.log = Logger().get_logger()
     super(Activity, self).__init__()
     
     if (data_hash==None and uri==None):
         self.log.info("No data provided - assuming creation of new Activity")
     elif (data_hash!=None and uri!=None):
         self.data_hash = data_hash
         self.uri = uri
         self.absolute_url = self._get_absolute_url()
         self.log.info("Instantiated Activity object with url=%s" % self.absolute_url)
 def __init__(self, data_hash=None, uri=None):
     self.log = Logger().get_logger()
     self.class_name = self.__class__.__name__
     super(Space, self).__init__()
     if (data_hash==None and uri==None):
         self.log.info("No data provided - assuming creation of new Space")
     elif (data_hash!=None and uri!=None):
         self.data_hash = data_hash
         self.uri = uri
         self.absolute_url = self._get_absolute_url()
         self.log.info("Instantiated Space object with url=%s" % self.absolute_url)
class Shutdownable(Communicable):
    """
    Should be responsible for sending "shutdown" command to live activities,
    controllers, spaces and live groups.
    """

    def __init__(self):
        self.log = Logger().get_logger()
        super(Shutdownable, self).__init__()

    def send_shutdown(self):
        shutdown_route = Path().get_route_for(self.class_name, 'shutdown') % self.data_hash['id']
        if self._send_shutdown_request(shutdown_route):
            self.log.info("Successfully sent shutdown for url=%s" % self.absolute_url) 
            return True
        else:
            return False

    def _send_shutdown_request(self, shutdown_route):
        """
        Makes a shutdown request
        """
        url = "%s%s" % (self.uri, shutdown_route)
        self.log.info("Sending 'shutdown' GET request to url=%s" %url)
        try:
            response = self._api_get_json(url)
        except urllib2.HTTPError, e:
            response = None
            self.log.error("Could not send 'shutdown' GET request because %s" % e)
            if e=='HTTP Error 500: No connection to controller in 5000 milliseconds':
                raise CommunicableException('HTTP Error 500: No connection to controller in 5000 milliseconds')
        if response:
            return True
        else:
            return False
class Deletable(Communicable):
    """
        @summary: Should be responsible for the deletion of an object
    """
    def __init__(self):
        self.log = Logger().get_logger()
        super(Deletable, self).__init__()
        
    def send_delete(self):
        """
            @summary: sends the "delete" GET request to a route
        """
        delete_route = Path().get_route_for(self.class_name, 'delete') % self.data_hash['id']
        if self._send_delete_request(delete_route):
            self.log.info("Successfully sent 'delete' to url=%s" % self.absolute_url)
            return True
        else:
            return False
        
    def _send_delete_request(self, delete_route):
        """
            @rtype: bool
        """
        url = "%s%s" % (self.uri, delete_route)
        self.log.info("Sending 'delete' to url=%s" %url)
        try:
            response = self._api_get_html(url)
        except urllib2.HTTPError, e:
            response = None
            self.log.error("Could not send 'delete' because %s" % e)
        if response:
            return True
        else:
            return False
class Startupable(Communicable):
    """ 
        @summary: Should be responsible for sending "startup" command to live activities,
        controllers, spaces and live groups.
    """
    
    def __init__(self):
        self.log = Logger().get_logger()
        super(Startupable, self).__init__()
      
    def send_startup(self):
        startup_route = Path().get_route_for(self.class_name, 'startup') % self.data_hash['id']
        if self._send_startup_request(startup_route):
            self.log.info("Successfully sent 'startup' for url=%s" % self.absolute_url) 
            return True
        else:
            return False
          
    def _send_startup_request(self, startup_route):
        """ 
            @summary: makes a startup request
        """
        url = "%s%s" % (self.uri, startup_route)
        self.log.info("Sending 'startup' GET request to url=%s" %url)
        try:
            response = self._api_get_json(url)
        except urllib2.HTTPError, e:
            response = None
            self.log.error("Could not send 'startup' GET request because %s" % e)
        if response:
            return True
        else:
            return False
class Configurable(Communicable):
    """
        @summary: Should be responsible for sending the "configure" action
    """
    def __init__(self):
        self.log = Logger().get_logger()
        super(Configurable, self).__init__()
        
    def send_configure(self):
        configure_route = Path().get_route_for(self.class_name, 'configure') % self.data_hash['id']
        if self._send_configure_request(configure_route):
            self.log.info("Successfully sent 'configure' for url=%s" % self.absolute_url) 
            return True
        else:
            return False        

    def _send_configure_request(self, configure_route):
        """ 
            @summary: makes a 'configure' request
        """
        url = "%s%s" % (self.uri, configure_route)
        self.log.info("Sending configure GET request to url=%s" %url)
        try:
            response = self._api_get_json(url)
        except urllib2.HTTPError, e:
            response = None
            self.log.error("Could not send configure GET request because %s" % e)
        if response:
            return True
        else:
            return False
class Deployable(Communicable):
    """
    Should be responsible for sending the "deploy" action
    """
    def __init__(self):
        self.log = Logger().get_logger()
        super(Deployable, self).__init__()

    def send_deploy(self):
        deploy_route = Path().get_route_for(self.class_name, 'deploy') % self.data_hash['id']
        if self._send_deploy_request(deploy_route):
            self.log.info("Successfully sent 'deploy' for url=%s" % self.absolute_url) 
            return True
        else:
            return False

    def _send_deploy_request(self, deploy_route):
        """
            Makes a 'deploy' request
        """
        url = "%s%s" % (self.uri, deploy_route)
        self.log.info("Sending deploy GET request to url=%s" %url)
        try:
            response = self._api_get_json(url)
        except urllib2.HTTPError, e:
            response = None
            self.log.error("Could not send deploy GET request because %s" % e)
        if response:
            return True
        else:
            return False
 def __init__(self, data_hash=None, uri=None):
     """
         @summary: when called with constructor_args and other vars set to None, new
         LiveActivity will be created
         @param data_hash: should be master API liveActivity json, may be blank
         @param uri: should be a link to "view.json" of the given live activity
     """
     self.log = Logger().get_logger()
     self.class_name = self.__class__.__name__
     super(LiveActivity, self).__init__()
     if (data_hash==None and uri==None):
         self.log.info("No data provided - assuming creation of new LiveActivity")
     elif (data_hash!=None and uri!=None):
         self.data_hash = data_hash
         self.uri = uri
         self.absolute_url = self._get_absolute_url()
         self.log.info("Instantiated LiveActivity object with url=%s" % self.absolute_url)
class Cleanable(Communicable):
    """
    Should be responsible for permanent clean and cleaning the tmp
    """
    def __init__(self):
        self.log = Logger().get_logger()
        super(Cleanable, self).__init__()

    def send_clean_permanent(self):
        configure_route = Path().get_route_for(self.class_name, 'clean_permanent') % self.data_hash['id']
        if self._send_cleanable_request(configure_route):
            self.log.info("Successfully sent 'clean_permanent' for url=%s" % self.absolute_url)
            return True
        else:
            return False

    def send_clean_tmp(self):
        configure_route = Path().get_route_for(self.class_name, 'clean_tmp') % self.data_hash['id']
        if self._send_cleanable_request(configure_route):
            self.log.info("Successfully sent 'clean_tmp' for url=%s" % self.absolute_url)
            return True
        else:
            return False

    def _send_cleanable_request(self, cleanable_route):
        """
        Makes a cleanable request
        """
        url = "%s%s" % (self.uri, cleanable_route)
        self.log.info("Sending cleanable GET request to url=%s" %url)
        try:
            response = self._api_get_json(url)
        except urllib2.HTTPError, e:
            response = None
            self.log.error("Could not send cleanable GET request because %s" % e)
            if e=='HTTP Error 500: No connection to controller in 5000 milliseconds':
                raise CommunicableException('HTTP Error 500: No connection to controller in 5000 milliseconds')
        if response:
            return True
        else:
            return False
Exemple #17
0
def deploy_logs():
    if not os.path.exists(hp.save_path):
        os.makedirs(hp.save_path)

    dir_success = False
    for sfx in range(1):  # todo legacy
        candidate_path = hp.save_path + '/' + hp.this_run_name + '_' + str(
            os.getpid()) + '/'
        if not os.path.exists(candidate_path):
            hp.this_run_path = candidate_path
            os.makedirs(hp.this_run_path)
            dir_success = True
            break
    if not dir_success:
        print('run name already exists!')

    sys.stdout = Logger(hp.this_run_path + 'log.log')
    print('results are in:', hp.this_run_path)
    print('description: ', hp.description)
class Connectable(Communicable):
    """
    Should be responsible for connecting/disconnecting space controllers
    """
    def __init__(self):
        self.log = Logger().get_logger()
        super(Connectable, self).__init__()

    def send_connect(self):
        connect_route = Path().get_route_for(self.class_name, 'connect') % self.data_hash['id']
        if self._send_connectable_request(connect_route):
            self.log.info("Successfully sent 'connect' for url=%s" % self.absolute_url)
            return True
        else:
            return False

    def send_disconnect(self):
        disconnect_route = Path().get_route_for(self.class_name, 'disconnect') % self.data_hash['id']
        if self._send_connectable_request(disconnect_route):
            self.log.info("Successfully sent 'disconnect' for url=%s" % self.absolute_url)
            return True
        else:
            return False

    def _send_connectable_request(self, connectable_route):
        """
        Makes a connectable request
        """
        url = "%s%s" % (self.uri, connectable_route)
        self.log.info("Sending connectable GET request to url=%s" %url)
        try:
            response = self._api_get_json(url)
        except urllib2.HTTPError, e:
            response = None
            self.log.error("Could not send connectable GET request because %s" % e)
        if response:
            return True
        else:
            return False
Exemple #19
0
def deploy_logs():
    if not os.path.exists(hp.save_path):
        os.makedirs(hp.save_path)

    dir_success = False
    for sfx in range(1):  # todo legacy
        candidate_path = hp.save_path + '/' + hp.this_run_name + '_' + str(
            os.getpid()) + '/'
        if not os.path.exists(candidate_path):
            hp.this_run_path = candidate_path
            os.makedirs(hp.this_run_path)
            dir_success = Truecnn_net = cnn_one_img(n_timesteps=sample,
                                                    input_size=28,
                                                    input_dim=1)
            break
    if not dir_success:
        error('run name already exists!')

    sys.stdout = Logger(hp.this_run_path + 'log.log')
    print('results are in:', hp.this_run_path)
    print('description: ', hp.description)
class Statusable(Communicable):
    """
    Should be responsible for _refreshing_ status of the object,
    which means that it will send "status" command to IS Controllers.
    In order to fetch the fresh and most up-to-date status you should use
    .fetch() method on the object.
    """
    def __init__(self):
        self.log = Logger().get_logger()
        super(Statusable, self).__init__()

    def send_status_refresh(self):
        """
        Extracts self.data_hash and self.class_name from children class
        and finds out to which route send GET request to ands sends it
        """
        refresh_route = Path().get_route_for(self.class_name,
                                             'status') % self.data_hash['id']
        if self._send_status_refresh(refresh_route):
            self.log.info("Successfully refreshed status for url=%s" %
                          self.absolute_url)
            return True
        else:
            return False

    def _send_status_refresh(self, refresh_route):
        """
        Should tell master to retrieve status info from controller
        so master has the most up to date info from the controller
        
        :param refresh_route: status.json route for specific class
        
        :rtype: bool
        """
        url = "%s%s" % (self.uri, refresh_route)
        self.log.info("Sending status refresh to url=%s" % url)
        try:
            response = self._api_get_json(url)
        except urllib2.HTTPError, e:
            response = None
            self.log.error("Could not send status refresh because %s" % e)
        if response:
            return True
        else:
            return False
class Statusable(Communicable):
    """
    Should be responsible for _refreshing_ status of the object,
    which means that it will send "status" command to IS Controllers.
    In order to fetch the fresh and most up-to-date status you should use
    .fetch() method on the object.
    """

    def __init__(self):
        self.log = Logger().get_logger()
        super(Statusable, self).__init__()

    def send_status_refresh(self):
        """
        Extracts self.data_hash and self.class_name from children class
        and finds out to which route send GET request to ands sends it
        """
        refresh_route = Path().get_route_for(self.class_name, 'status') % self.data_hash['id']
        if self._send_status_refresh(refresh_route):
            self.log.info("Successfully refreshed status for url=%s" % self.absolute_url)
            return True
        else:
            return False

    def _send_status_refresh(self, refresh_route):
        """
        Should tell master to retrieve status info from controller
        so master has the most up to date info from the controller
        
        :param refresh_route: status.json route for specific class
        
        :rtype: bool
        """
        url = "%s%s" % (self.uri, refresh_route)
        self.log.info("Sending status refresh to url=%s" %url)
        try:
            response = self._api_get_json(url)
        except urllib2.HTTPError, e:
            response = None
            self.log.error("Could not send status refresh because %s" % e)
        if response:
            return True
        else:
            return False
class Metadatable(Communicable):
    """
    Should be responsible for setting metadata
    """
    def __init__(self):
        self.log = Logger().get_logger()
        super(Metadatable, self).__init__()

    def set_metadata(self, metadata_dictionary):
        """
        Accepts dictionary of keys that will be unpacked to "key=value" strings and
        makes a request overwriting any previous metadata
        :rtype: bool
        :param metadata_args: Dictionary with keys and values
        """
        metadata = {"values": self._unpack_metadata(metadata_dictionary)}
        self.log.info("Updating metadata of %s with %s" %
                      (self.class_name, metadata))
        metadata_route = Path().get_route_for(
            self.class_name, 'metadata') % self.data_hash['id']
        if self._send_metadatable_request(metadata_route, metadata):
            self.log.info("Successfully sent metadata for url=%s" %
                          self.absolute_url)
            return True
        else:
            return False

    def _unpack_metadata(self, metadata_dictionary):
        """
        Accepts dictionary and converts it to string
        :rtype: string
        :param metadata_dictionary: dict containing metadata
        """
        metadata_text = ""
        try:
            for key, value in metadata_dictionary.iteritems():
                metadata_text = metadata_text + ("\r\n") + key + "=" + value
            return metadata_text
        except Exception, e:
            self.log.error(
                "Could not unpack supplied metadata dictionary because %s" % e)
            raise
class Startupable(Communicable):
    """
    Should be responsible for sending "startup" command to live activities,
    controllers, spaces and live groups.
    """
    def __init__(self):
        self.log = Logger().get_logger()
        super(Startupable, self).__init__()

    def send_startup(self):
        startup_route = Path().get_route_for(self.class_name,
                                             'startup') % self.data_hash['id']
        if self._send_startup_request(startup_route):
            self.log.info("Successfully sent 'startup' for url=%s" %
                          self.absolute_url)
            return True
        else:
            return False

    def _send_startup_request(self, startup_route):
        """
        Makes a startup request
        """
        url = "%s%s" % (self.uri, startup_route)
        self.log.info("Sending 'startup' GET request to url=%s" % url)
        try:
            response = self._api_get_json(url)
        except urllib2.HTTPError, e:
            response = None
            self.log.error("Could not send 'startup' GET request because %s" %
                           e)
            if e == 'HTTP Error 500: No connection to controller in 5000 milliseconds':
                raise CommunicableException(
                    'HTTP Error 500: No connection to controller in 5000 milliseconds'
                )
        if response:
            return True
        else:
            return False
class Metadatable(Communicable):
    """
        @summary: Should be responsible for setting metadata
    """
    def __init__(self):
        self.log = Logger().get_logger()
        super(Metadatable, self).__init__()
        
    def set_metadata(self, metadata_dictionary):
        """
            @summary: Accepts dictionary of keys that will be unpacked to "key=value" strings and
            makes a request overwriting any previous metadata
            @rtype: bool
            @param metadata_args: Dictionary with keys and values
        """
        metadata = {"values" : self._unpack_metadata(metadata_dictionary)}
        self.log.info("Updating metadata of %s with %s" % (self.class_name, metadata))
        metadata_route = Path().get_route_for(self.class_name, 'metadata') % self.data_hash['id']
        if self._send_metadatable_request(metadata_route, metadata):
            self.log.info("Successfully sent metadata for url=%s" % self.absolute_url) 
            return True
        else:
            return False
    
    def _unpack_metadata(self, metadata_dictionary):
        """
            @summary: accepts dictionary and converts it to string
            @rtype: string
            @param metadata_dictionary: dict containing metadata 
        """
        metadata_text = ""
        try:
            for key, value in metadata_dictionary.iteritems():
                metadata_text = metadata_text + ("\r\n") + key + "=" + value
            return metadata_text
        except Exception, e:
            self.log.error("Could not unpack supplied metadata dictionary because %s" % e)
            raise
class Deletable(Communicable):
    """
    Should be responsible for the deletion of an object
    """
    def __init__(self):
        self.log = Logger().get_logger()
        super(Deletable, self).__init__()

    def send_delete(self):
        """
        Sends the "delete" GET request to a route
        """
        delete_route = Path().get_route_for(self.class_name,
                                            'delete') % self.data_hash['id']
        if self._send_delete_request(delete_route):
            self.log.info("Successfully sent 'delete' to url=%s" %
                          self.absolute_url)
            return True
        else:
            return False

    def _send_delete_request(self, delete_route):
        """
        :rtype: bool
        """
        url = "%s%s" % (self.uri, delete_route)
        self.log.info("Sending 'delete' to url=%s" % url)
        try:
            response = self._api_get_html(url)
        except urllib2.HTTPError, e:
            response = None
            self.log.error("Could not send 'delete' because %s" % e)
        if response:
            return True
        else:
            return False
 def __init__(self):
     self.log = Logger().get_logger()
     super(Activatable, self).__init__()
 def __init__(self):
     self.log = Logger().get_logger()
     super(Deployable, self).__init__()
 def __init__(self):
     self.log = Logger().get_logger()
     super(Deployable, self).__init__()
class Configable(Communicable):
    """
    Should be responsible for setting the 'Edit config' section.
    Name of thix mixin is such because there's already "configure" action for live activities
    """
    def __init__(self):
        self.log = Logger().get_logger()
        super(Configable, self).__init__()

    def get_config(self):
        config_route = Path().get_route_for(self.class_name,
                                            'config') % self.data_hash['id']
        self.log.info("Getting config of %s" % (self.class_name))
        response = self._send_configable_get_request(config_route)
        if response:
            self.log.info("Successfully got config from url=%s" %
                          self.absolute_url)
            return self._scrap_config(response)
        else:
            return False

    def set_config(self, config_dictionary):
        """
        Accepts dictionary of keys that will be unpacked to "key=value" strings and
        makes a request overwriting any previous config
        :rtype: bool
        :param config_dictionary: Dictionary with keys and values
        """
        config = {"values": self._unpack_config(config_dictionary)}
        self.log.info("Updating config of %s with %s" %
                      (self.class_name, config))
        config_route = Path().get_route_for(self.class_name,
                                            'config') % self.data_hash['id']
        if self._send_configable_set_request(config_route, config):
            self.log.info("Successfully sent config for url=%s" %
                          self.absolute_url)
            return True
        else:
            return False

    def _scrap_config(self, html):
        import BeautifulSoup
        soup = BeautifulSoup.BeautifulSoup(html)
        self.log.info("Received config response: %s" % soup)
        textarea = soup.findAll('textarea')[0].text.split('\n')
        config_dict = self._pack_config(textarea)
        self.log.info("Scrapped config: %s" % config_dict)
        return config_dict

    def _pack_config(self, config_list):
        """
        Accepts list of strings and converts it into dictionary
        :rtype: dict
        :param config_string: string containing scraped config
        """
        config_dict = {}
        self.log.info("Textarea: %s" % config_list)
        try:
            for config_item in config_list:
                key, value = config_item.split('=')
                self.log.info("Assigning %s to %s" % (key, value))
                config_dict[key] = value
        except:
            self.log.info(
                "Could not do a _pack_config for scraped config on item: %s for config_list %s"
                % (config_item, config_list))
            config_dict = {}
        return config_dict

    def _unpack_config(self, config_dictionary):
        """
        Accepts dictionary and converts it to string
        :rtype: string
        :param config_dictionary: dict containing config
        """
        config_text = ""
        try:
            for key, value in config_dictionary.iteritems():
                config_text = config_text + ("\r\n") + key + "=" + value
            return config_text
        except Exception, e:
            self.log.error(
                "Could not unpack supplied config dictionary because %s" % e)
            raise
Exemple #30
0
class LiveActivity(Fetchable, Statusable, Deletable, Shutdownable, Startupable,
                   Activatable, Configurable, Cleanable, Metadatable,
                   Deployable, Configable):
    """
    Should be responsible for managing single LiveActivity
    :todo: .new() should return instance of fetched live activity
    """
    def __init__(self, data_hash=None, uri=None):
        """
        When called with constructor_args and other vars set to None, new
        LiveActivity will be created.
        :param data_hash: should be master API liveActivity json, may be blank
        :param uri: should be a link to "view.json" of the given live activity
        """
        self.log = Logger().get_logger()
        self.class_name = self.__class__.__name__
        super(LiveActivity, self).__init__()
        if (data_hash == None and uri == None):
            self.log.info(
                "No data provided - assuming creation of new LiveActivity")
        elif (data_hash != None and uri != None):
            self.data_hash = data_hash
            self.uri = uri
            self.absolute_url = self._get_absolute_url()
            self.log.info("Instantiated LiveActivity object with url=%s" %
                          self.absolute_url)

    def __repr__(self):
        return str(self.data_hash)

    def new(self, uri, new_data_hash):
        """
        Used to create new live activity through API and set the "uri" so that we
        can operate on this instance of LiveActivity right away after .new() returns True

        :param new_data_hash: dictionary of a following structure::

            {"live_activity_name" : "",\
            "live_activity_description" : "",\
            "activity_id" : "",\
            "controller_id" : ""}

        :param uri: "http://some_server/prefix" (passed by master)

        :rtype: new LiveActivity object or False
        """
        self.log.info("Creating new Live Activity with arguments: %s" %
                      new_data_hash)
        route = Path().get_route_for('LiveActivity', 'new')
        url = "%s%s" % (uri, route)
        request_response = self._api_post_json(url, new_data_hash)
        if request_response.url:
            self.absolute_url = request_response.url.replace(
                "view.html", "view.json")
            self.fetch()
            self.log.info(
                "Created new LiveActivity with url=%s, data_hash is now %s" %
                (self.absolute_url, self.data_hash))
            return self
        else:
            self.log.info("Created new LiveActivity %s but returned False" %
                          self)
            return False

    def to_json(self):
        """
        Should selected attributes in json form defined by the template
        """
        self.serializer = LiveActivitySerializer(self.data_hash)
        return self.serializer.to_json()

    def name(self):
        """
        Should return live activity name
        """
        return self.data_hash['name']

    def status(self):
        """
        Should return status that is currently held in the object instance
        """
        try:
            status_data = self.data_hash['active']['runtimeState']
            return status_data
        except LiveActivityException("Activity not running or non existent"):
            return "UNKNOWN"

    def identifying_name(self):
        """
        Should return LiveActivity identifying name
        """
        return self.data_hash['activity']['identifyingName']

    def version(self):
        """
        Should return LiveActivity version
        """
        return self.data_hash['activity']['version']

    def metadata(self):
        """
        Should return LiveActivity metadata
        """
        return self.data_hash['metadata']

    def config(self):
        """
        Should return LiveActivity config
        """
        return self.get_config()

    def id(self):
        """
        Should return LiveActivity id

        :rtype: string
        """
        return self.data_hash['id']

    def controller(self):
        """
        Should return LiveActivity controller data

        :rtype: string
        """
        return self.data_hash['controller']['name']

    """ Private methods below this text """

    def _get_absolute_url(self):
        """
        :rtype: string
        """
        route = Path().get_route_for(self.class_name,
                                     'view') % self.data_hash['id']
        url = "%s%s" % (self.uri, route)
        return url
Exemple #31
0
def check_app(app, fullpacket=False, force=False):
    '''
    Check application based on app name in Tapioca results
    '''

    dnscacheloaded = False
    targetscacheloaded = False
    largewarned = False

    # load local network from config
    net.set_local()

    # Get pcap file location
    if app.endswith('.pcap'):
        pcapfile = app
        if os.path.exists(pcapfile):
            sys.stdout = Logger('%s.%s' % (pcapfile, report_output))
    else:
        pcapfile = os.path.join('results', app, 'tcpdump.pcap')
        if os.path.exists(pcapfile):
            sys.stdout = Logger(os.path.join('results', app, report_output))

    if os.path.exists(pcapfile):

        pcapdir = os.path.dirname(pcapfile)
        dnspkl = os.path.join(pcapdir, '.dnsmap.pkl')
        targetspkl = os.path.join(pcapdir, '.targets.pkl')

        eprint(color.bright('Checking app %s...' % color.cyan(app)))

        if os.path.exists(dnspkl) and not force:
            eprint('Loading cached DNS info...')
            with open(dnspkl, 'rb') as pklhandle:
                try:
                    net.dnsmap = pickle.load(pklhandle)
                    net.dnsreqs = pickle.load(pklhandle)
                    dnscacheloaded = True
                except:
                    pass

        if not dnscacheloaded:
            if os.path.getsize(pcapfile) > 100000000:
                # Over 100MB
                eprint(
                    color.bright(
                        color.yellow(
                            'Warning: capture size is large. Please be patient.'
                        )))
                largewarned = True
            # Get captured DNS info for IP addresses
            eprint('Getting DNS info...')
            dnspackets = pyshark.FileCapture(pcapfile,
                                             keep_packets=False,
                                             display_filter='dns')
            dnspackets.apply_on_packets(net.get_dns_info, timeout=1000)
            with open(dnspkl, 'wb') as pklhandle:
                pickle.dump(net.dnsmap,
                            pklhandle,
                            protocol=pickle.HIGHEST_PROTOCOL)
                pickle.dump(net.dnsreqs,
                            pklhandle,
                            protocol=pickle.HIGHEST_PROTOCOL)

#        if os.path.exists(targetspkl) and not force:
#            eprint('Loading cached targets...')
#            with open(targetspkl, 'rb') as pklhandle:
#                try:
#                    net.targets = pickle.load(pklhandle)
#                    targetscacheloaded = True
#                except:
#                    pass

        if not targetscacheloaded:
            if fullpacket:
                packets = pyshark.FileCapture(pcapfile, keep_packets=False)
                # Get hosts contacted
                eprint('Getting hosts contacted...')
                packets.apply_on_packets(net.get_hosts_contacted_fullpacket,
                                         timeout=1000)
            else:
                packets = pyshark.FileCapture(pcapfile,
                                              keep_packets=False,
                                              only_summaries=True)
                # Get hosts contacted
                eprint('Getting hosts contacted...')
                packets.apply_on_packets(net.get_hosts_contacted, timeout=1000)


#                with open(targetspkl, 'wb') as pklhandle:
#                    pickle.dump(
# net.targets, pklhandle, protocol=pickle.HIGHEST_PROTOCOL)

# Print report
        generate_report(app, fullpacket=fullpacket, pcapfile=pcapfile)

        # Reset globals
        net.clear()
 def __init__(self):
     self.log = Logger().get_logger()
     super(Metadatable, self).__init__()
 def __init__(self):
     self.log = Logger().get_logger()
     super(Connectable, self).__init__()
Exemple #34
0
class Space(Fetchable, Statusable, Deletable, Shutdownable,
            Startupable, Activatable, Configurable, Metadatable,
            Deployable, Cleanable):
    """
    Space is a LiveActivityGroup container
    """
    def __init__(self, data_hash=None, uri=None):
        self.log = Logger().get_logger()
        self.class_name = self.__class__.__name__
        super(Space, self).__init__()
        if (data_hash==None and uri==None):
            self.log.info("No data provided - assuming creation of new Space")
        elif (data_hash!=None and uri!=None):
            self.data_hash = data_hash
            self.uri = uri
            self.absolute_url = self._get_absolute_url()
            self.log.info("Instantiated Space object with url=%s" % self.absolute_url)

    def __repr__(self):
        return str(self.data_hash)

    def __str__(self):
        return str(self.data_hash)

    def new(self, uri, constructor_args):
        """
        Used to create new space through API and set the "uri" so that we
        can operate on this instance of Space right away after .new() returns True

        :param constructor_args: dictionary with following structure::

            {\
            'space.name' : 'space_name',\
            'space.description' : 'space_description',\
            '_eventId_save' : 'Save',\
            'liveActivityGroupIds' : [1,2,666]\
            }

        :param uri: "http://some_server/prefix" (passed by master)

        :rtype: new LiveActivityGroup object or False
        """

        self.log.info("Creating new Space with arguments: %s" % constructor_args)
        route = Path().get_route_for('Space', 'new')
        url = "%s%s" % (uri, route)
        request_response = self._api_post_json(url, constructor_args)
        if request_response.url:
            self.absolute_url = request_response.url.replace("view.html", "view.json")
            self.fetch()
            self.log.info("Created new Space with url=%s, data_hash is now %s" % (self.absolute_url, self.data_hash))
            return self
        else:
            self.log.info("Created new Space %s but returned False" % self)
            return False

    def to_json(self):
        """
        Should selected attributes in json form defined by the template
        """
        self.serializer = SpaceSerializer(self.data_hash)
        return self.serializer.to_json()

    def id(self):
        return self.data_hash['id']

    def name(self):
        """
        :param: Should return Space name
        """
        return self.data_hash['name']

    def description(self):
        """
        :param: Should return Space description
        """
        return self.data_hash['description']

    def metadata(self):
        """
        :param: Should return Space metadata
        """
        return self.data_hash['metadata']

    def live_activity_groups(self):
        """
        :param: Should return Space metadata
        """
        return self.data_hash['liveActivityGroups']

    def set_live_activity_groups(self, live_activity_groups_list):
        """
        Used to set new live acitivity groups list for space

        :param: dictionary with following structure::

            {\
            'space.name' : 'name of the space',\
            'liveActivityGroupsIds' : [1,2,666]\
            }

        :rtype: new Space object
        """
        params = { 'space.name' : self.name(),
                   'liveActivityGroupIds' : live_activity_groups_list,
                   'space.description' : self.description()
                 }

        self.log.info("Updating Space with arguments: %s" % params)
        route = Path().get_route_for('Space', 'edit') % self.id()
        url = "%s%s" % (self.uri, route)
        request_response = self._api_post_json_no_cookies(url, params)
        if request_response.url:
            self.absolute_url = request_response.url.replace("view.html", "view.json")
            self.fetch()
            self.log.info("Updated Space with url=%s, data_hash is now %s" % (self.absolute_url, self.data_hash))
            return self
        else:
            self.log.info("Updated Space %s but returned False" % self)
            return False

    """ Private methods below """

    def _get_absolute_url(self):
        live_activity_group_id = self.data_hash['id']
        url = "%s/space/%s/view.json" % (self.uri, live_activity_group_id)
        return url
 def __init__(self):
     self.log = Logger().get_logger()
     super(Metadatable, self).__init__()
class ModelWrapper(object):
    '''
    Model wrapper implements training, validation and inference of the whole adversarial architecture
    '''

    def __init__(self,
                 generator: Union[Generator, nn.DataParallel],
                 discriminator: Union[Discriminator, nn.DataParallel],
                 training_dataset: DataLoader,
                 validation_dataset: DataLoader,
                 vgg16: Union[VGG16, nn.DataParallel] = VGG16(),
                 generator_optimizer: torch.optim.Optimizer = None,
                 discriminator_optimizer: torch.optim.Optimizer = None,
                 generator_loss: nn.Module = LSGANGeneratorLoss(),
                 discriminator_loss: nn.Module = LSGANDiscriminatorLoss(),
                 semantic_reconstruction_loss: nn.Module = SemanticReconstructionLoss(),
                 diversity_loss: nn.Module = DiversityLoss(),
                 save_data_path: str = 'saved_data') -> None:
        '''
        Constructor
        :param generator: (nn.Module, nn.DataParallel) Generator network
        :param discriminator: (nn.Module, nn.DataParallel) Discriminator network
        :param training_dataset: (DataLoader) Training dataset
        :param vgg16: (nn.Module, nn.DataParallel) VGG16 module
        :param generator_optimizer: (torch.optim.Optimizer) Optimizer of the generator network
        :param discriminator_optimizer: (torch.optim.Optimizer) Optimizer of the discriminator network
        :param generator_loss: (nn.Module) Generator loss function
        :param discriminator_loss: (nn.Module) Discriminator loss function
        :param semantic_reconstruction_loss: (nn.Module) Semantic reconstruction loss function
        :param diversity_loss: (nn.Module) Diversity loss function
        '''
        # Save parameters
        self.generator = generator
        self.discriminator = discriminator
        self.training_dataset = training_dataset
        self.validation_dataset_fid = validation_dataset
        self.vgg16 = vgg16
        self.generator_optimizer = generator_optimizer
        self.discriminator_optimizer = discriminator_optimizer
        self.generator_loss = generator_loss
        self.discriminator_loss = discriminator_loss
        self.semantic_reconstruction_loss = semantic_reconstruction_loss
        self.diversity_loss = diversity_loss
        self.latent_dimensions = self.generator.module.latent_dimensions \
            if isinstance(self.generator, nn.DataParallel) else self.generator.latent_dimensions
        # Calc no gradients for weights of vgg16
        for parameter in self.vgg16.parameters():
            parameter.requires_grad = False
        # Init logger
        self.logger = Logger()
        # Make directories to save logs, plots and models during training
        time_and_date = str(datetime.now())
        self.path_save_models = os.path.join(save_data_path, 'models_' + time_and_date)
        if not os.path.exists(self.path_save_models):
            os.makedirs(self.path_save_models)
        self.path_save_plots = os.path.join(save_data_path, 'plots_' + time_and_date)
        if not os.path.exists(self.path_save_plots):
            os.makedirs(self.path_save_plots)
        self.path_save_metrics = os.path.join(save_data_path, 'metrics_' + time_and_date)
        if not os.path.exists(self.path_save_metrics):
            os.makedirs(self.path_save_metrics)
        # Log hyperparameter
        self.logger.hyperparameter['generator'] = str(self.generator)
        self.logger.hyperparameter['discriminator'] = str(self.discriminator)
        self.logger.hyperparameter['vgg16'] = str(self.vgg16)
        self.logger.hyperparameter['generator_optimizer'] = str(self.generator_optimizer)
        self.logger.hyperparameter['discriminator_optimizer'] = str(self.discriminator_optimizer)
        self.logger.hyperparameter['generator_loss'] = str(self.generator_loss)
        self.logger.hyperparameter['discriminator_loss'] = str(self.discriminator_loss)
        self.logger.hyperparameter['diversity_loss'] = str(self.diversity_loss)
        self.logger.hyperparameter['discriminator_loss'] = str(self.semantic_reconstruction_loss)

    def train(self, epochs: int = 20, validate_after_n_iterations: int = 100000, device: str = 'cuda',
              save_model_after_n_epochs: int = 1, w_rec: float = 0.1, w_div: float = 0.1) -> None:
        """
        Training method
        :param epochs: (int) Number of epochs to perform
        :param validate_after_n_iterations: (int) Number of iterations after the model gets validated
        :param device: (str) Device to be used
        :param save_model_after_n_epochs: (int) Epochs to perform after model gets saved
        :param w_rec: (float) Weight factor for the reconstruction loss
        :param w_div: (float) Weight factor for the diversity loss
        """
        # Save weights factors
        self.logger.hyperparameter['w_rec'] = str(w_rec)
        self.logger.hyperparameter['w_div'] = str(w_div)
        # Adopt to batch size
        validate_after_n_iterations = (validate_after_n_iterations // self.training_dataset.batch_size) \
                                      * self.training_dataset.batch_size
        # Models into training mode
        self.generator.train()
        self.discriminator.train()
        # Vgg16 into eval mode
        self.vgg16.eval()
        # Models to device
        self.generator.to(device)
        self.discriminator.to(device)
        self.vgg16.to(device)
        # Init progress bar
        self.progress_bar = tqdm(total=epochs * len(self.training_dataset.dataset), dynamic_ncols=True)
        # Initial validation
        self.progress_bar.set_description('Validation')
        self.inference(device=device)
        fid = self.validate()
        # Main loop
        for epoch in range(epochs):
            # Ensure models are in the right mode
            self.generator.train()
            self.discriminator.train()
            self.vgg16.eval()
            for images_real, labels, masks in self.training_dataset:
                ############ Discriminator training ############
                # Update progress bar with batch size
                self.progress_bar.update(n=images_real.shape[0])
                # Reset gradients
                self.generator.zero_grad()
                self.discriminator.zero_grad()
                # Data to device
                images_real = images_real.to(device)
                labels = labels.to(device)
                for index in range(len(masks)):
                    masks[index] = masks[index].to(device)
                # Get features of images from vgg16 model
                with torch.no_grad():
                    features_real = self.vgg16(images_real)
                    # Generate random noise vector
                    noise_vector = torch.randn((images_real.shape[0], self.latent_dimensions),
                                               dtype=torch.float32, device=device, requires_grad=True)
                    # Generate fake images
                    images_fake = self.generator(input=noise_vector, features=features_real, masks=masks,
                                                 class_id=labels.float())
                # Discriminator prediction real
                prediction_real = self.discriminator(images_real, labels)
                # Discriminator prediction fake
                prediction_fake = self.discriminator(images_fake, labels)
                # Get discriminator loss
                loss_discriminator_real, loss_discriminator_fake = self.discriminator_loss(prediction_real,
                                                                                           prediction_fake)
                # Calc gradients
                (loss_discriminator_real + loss_discriminator_fake).backward()
                # Optimize discriminator
                self.discriminator_optimizer.step()
                ############ Generator training ############
                # Reset gradients of generator and discriminator
                self.generator.zero_grad()
                self.discriminator.zero_grad()
                # Init new noise vector
                noise_vector = torch.randn((images_real.shape[0], self.latent_dimensions),
                                           dtype=torch.float32, device=device, requires_grad=True)
                # Generate new fake images
                images_fake = self.generator(input=noise_vector, features=features_real, masks=masks,
                                             class_id=labels.float())
                # Discriminator prediction fake
                prediction_fake = self.discriminator(images_fake, labels)
                # Get generator loss
                loss_generator = self.generator_loss(prediction_fake)
                # Get diversity loss
                loss_generator_diversity = w_div * self.diversity_loss(images_fake, noise_vector)
                # Get features of fake images
                features_fake = self.vgg16(images_fake)
                # Calc semantic reconstruction loss
                loss_generator_semantic_reconstruction = \
                    w_rec * self.semantic_reconstruction_loss(features_real, features_fake, masks)
                # Calc complied loss
                loss_generator_complied = loss_generator + loss_generator_semantic_reconstruction \
                                          + loss_generator_diversity
                # Calc gradients
                loss_generator_complied.backward()
                # Optimize generator
                self.generator_optimizer.step()
                # Show losses in progress bar description
                self.progress_bar.set_description(
                    'FID={:.4f}, Loss Div={:.4f}, Loss Rec={:.4f}, Loss G={:.4f}, Loss D={:.4f}'.format(
                        fid, loss_generator_diversity.item(), loss_generator_semantic_reconstruction.item(),
                        loss_generator.item(), (loss_discriminator_fake + loss_discriminator_real).item()))
                # Log losses
                self.logger.log(metric_name='loss_discriminator_real', value=loss_discriminator_real.item())
                self.logger.log(metric_name='loss_discriminator_fake', value=loss_discriminator_fake.item())
                self.logger.log(metric_name='loss_generator', value=loss_generator.item())
                self.logger.log(metric_name='loss_generator_semantic_reconstruction',
                                value=loss_generator_semantic_reconstruction.item())
                self.logger.log(metric_name='loss_generator_diversity', value=loss_generator_diversity.item())
                self.logger.log(metric_name='iterations', value=self.progress_bar.n)
                self.logger.log(metric_name='epoch', value=epoch)
                # Validate model
                if self.progress_bar.n % validate_after_n_iterations == 0:
                    self.progress_bar.set_description('Validation')
                    fid = self.validate()
                    self.inference(device=device)
                    # Log fid
                    self.logger.log(metric_name='fid', value=fid)
                    self.logger.log(metric_name='iterations_fid', value=self.progress_bar.n)
                    # Save all logs
                    self.logger.save_metrics(self.path_save_metrics)
            if epoch % save_model_after_n_epochs == 0:
                torch.save(
                    {"generator": self.generator.module.state_dict()
                    if isinstance(self.generator, nn.DataParallel) else self.generator.state_dict(),
                     "discriminator": self.discriminator.module.state_dict()
                     if isinstance(self.discriminator, nn.DataParallel) else self.discriminator.state_dict(),
                     "generator_optimizer": self.generator_optimizer.state_dict(),
                     "discriminator_optimizer": self.discriminator_optimizer.state_dict()},
                    os.path.join(self.path_save_models, 'checkpoint_{}.pt'.format(str(epoch).zfill(3))))
            self.inference(device=device)
            # Save all logs
            self.logger.save_metrics(self.path_save_metrics)
        # Close progress bar
        self.progress_bar.close()

    @torch.no_grad()
    def validate(self) -> float:
        '''
        FID score gets estimated
        :param plot: (bool) True if samples should be plotted
        :return: (float, float) IS and FID score
        '''
        # Generator into validation mode
        self.generator.eval()
        self.vgg16.eval()
        # Calc FID score
        fid = frechet_inception_distance(dataset_real=self.validation_dataset_fid,
                                         generator=self.generator, vgg16=self.vgg16)
        # Back into train mode
        self.generator.train()
        return fid

    @torch.no_grad()
    def inference(self, device: str = 'cuda') -> None:
        '''
        Random images for different feature levels are generated and saved
        '''
        # Models to device
        self.generator.to(device)
        self.vgg16.to(device)
        # Generator into eval mode
        self.generator.eval()
        # Get random images form validation dataset
        images, labels, _ = image_label_list_of_masks_collate_function(
            [self.validation_dataset_fid.dataset[index] for index in
             np.random.choice(range(len(self.validation_dataset_fid)), replace=False, size=7)])
        # Get list of masks for different layers
        masks_levels = [get_masks_for_inference(layer, add_batch_size=True, device=device) for layer in range(7)]
        # Init tensor of fake images to store all fake images
        fake_images = torch.empty(7 ** 2, images.shape[1], images.shape[2], images.shape[3],
                                  dtype=torch.float32, device=device)
        # Init counter
        counter = 0
        # Loop over all image and masks
        for image, label in zip(images, labels):
            # Data to device
            image = image.to(device)[None]
            label = label.to(device)[None]
            for masks in masks_levels:
                # Generate fake images
                if isinstance(self.generator, nn.DataParallel):
                    fake_image = self.generator.module(
                        input=torch.randn(1, self.latent_dimensions, dtype=torch.float32, device=device),
                        features=self.vgg16(image),
                        masks=masks,
                        class_id=label.float())
                else:
                    fake_image = self.generator(
                        input=torch.randn(1, self.latent_dimensions, dtype=torch.float32, device=device),
                        features=self.vgg16(image),
                        masks=masks,
                        class_id=label.float())
                # Save fake images
                fake_images[counter] = fake_image.squeeze(dim=0)
                # Increment counter
                counter += 1
        # Save tensor as image
        torchvision.utils.save_image(
            misc.normalize_0_1_batch(fake_images),
            os.path.join(self.path_save_plots, 'predictions_{}.png'.format(self.progress_bar.n)), nrow=7)
        # Back into training mode
        self.generator.train()
class Activity(Fetchable, Deletable):
    """
       Should be responsible for managing a single activity
    """
    def __init__(self, data_hash=None, uri=None, activity_archive_uri=None, name=None):
        self.class_name = self.__class__.__name__
        self.log = Logger().get_logger()
        super(Activity, self).__init__()
        if (data_hash == None and uri == None):
            self.log.info("No data provided - assuming creation of new Activity")
        elif (data_hash != None and uri != None):
            self.data_hash = data_hash
            self.uri = uri
            self.absolute_url = self._get_absolute_url()
            self.log.info("Instantiated Activity object with url=%s" % self.absolute_url)

    def __repr__(self):
        return str(self.data_hash)

    def new(self, uri, constructor_args):
        """
            Method to keep naming convention of .new() methods
        """
        new_activity = self.upload(uri, constructor_args['zip_file_handler'])
        return new_activity

    def upload(self, uri, zip_file_handler):
        """
        Should make a deployment of the activity with following steps:
            - receive handler to a local zipfile
            - upload it to the API
            - save
            - set instance variables for the object

        :return: False or URL to a new Activity
        :param uri: stirng
        :param zip_file_handler: 'file' class instance
        :rtype: new Activity object or False
        """
        self.log.info("Uploading new Activity from file %s" % zip_file_handler)
        route = Path().get_route_for('Activity', 'upload')
        url = "%s%s" % (uri, route)
        payload = {"_eventId_save" : "Save"}
        request_response = self._api_post_json(url, payload, zip_file_handler)
        return self.check_upload_response(request_response)

    def check_upload_response(self, request_response):
        """
            Dirty workaround for nasty html redirect
        """
        if request_response.url:
            self.absolute_url = request_response.url.replace("view.html", "view.json")
            self.fetch()
            self.log.info("Created new Activity with url=%s, data_hash is now %s" % (self.absolute_url, self.data_hash))
            return self
        else:
            self.log.info("Created new Activity %s but returned False" % self)
            return False

    def to_json(self):
        """
            Should selected attributes in json form defined by the template
        """
        self.serializer = ActivitySerializer(self.data_hash)
        return self.serializer.to_json()

    def fetch(self):
        """ Should retrieve data from Master API"""
        self.data_hash = self._refresh_object(self.absolute_url)
        return self

    def name(self):
        """ Should return live activity name"""
        return self.data_hash['activity']['name']

    def identifying_name(self):
        """ Should return identifying name """
        return self.data_hash['activity']['identifyingName']

    def version(self):
        """ Should return Activity version """
        return self.data_hash['activity']['version']

    def id(self):
        """ Should return Activity id """
        return self.data_hash['activity']['id']

    def description(self):
        """ Should return Activity description """
        return self.data_hash['activity']['description']

    """ Private methods below"""

    def _get_absolute_url(self):
        """
            Initial data hash without subattributes that comes
            from the all.json method
        """
        activity_id = self.data_hash['id']
        url = "%s/activity/%s/view.json" % (self.uri, activity_id)
        return url
 def __init__(self):
     self.log = Logger().get_logger()
     super(Activatable, self).__init__()
 def __init__(self):
     """
        Should be responsible for communication with the API
     """
     self.log = Logger().get_logger()
class Trainer(object):
    def __init__(self, args):

        self.input_dir = args.input_dir
        self.log_dir = args.log_dir
        self.model_dir = args.model_dir
        self.max_qst_length = args.max_qst_length
        self.max_num_ans = args.max_num_ans
        self.embed_size = args.embed_size
        self.word_embed_size = args.word_embed_size
        self.num_layers = args.num_layers
        self.hidden_size = args.hidden_size
        self.lr = args.lr
        self.step_size = args.step_size
        self.gamma = args.gamma
        self.num_epochs = args.num_epochs
        self.batch_size = args.batch_size
        self.num_workers = args.num_workers
        self.save_step = args.save_step
        self.l1_coef = args.l1_coef
        self.l2_coef = args.l2_coef
        self.save_path = args.save_path

        self.noise_dim = 100
        self.beta1 = 0.5
        self.logger = Logger('vqa-gan')
        self.checkpoints_path = 'checkpoints'

        self.data_loader = get_loader(input_dir=self.input_dir,
                                      input_vqa_train='train.npy',
                                      input_vqa_valid='valid.npy',
                                      max_qst_length=self.max_qst_length,
                                      max_num_ans=self.max_num_ans,
                                      batch_size=self.batch_size,
                                      num_workers=self.num_workers)

        qst_vocab_size = self.data_loader['train'].dataset.qst_vocab.vocab_size
        ans_vocab_size = self.data_loader['train'].dataset.ans_vocab.vocab_size
        self.ans_unk_idx = self.data_loader['train'].dataset.ans_vocab.unk2idx

        self.generator = Generator(embed_size=self.embed_size,
                                   qst_vocab_size=qst_vocab_size,
                                   ans_vocab_size=ans_vocab_size,
                                   word_embed_size=self.word_embed_size,
                                   num_layers=self.num_layers,
                                   hidden_size=self.hidden_size,
                                   img_feature_size=512).to(device)

        self.discriminator = Discriminator(
            embed_size=self.embed_size,
            ans_vocab_size=ans_vocab_size,
            word_embed_size=self.word_embed_size,
            num_layers=self.num_layers,
            hidden_size=self.hidden_size).to(device)


        paramsD = list(self.discriminator.qst_encoder.parameters()) \
                + list(self.discriminator.img_encoder.fc.parameters()) \
                + list(self.discriminator.fc1.parameters()) \
                + list(self.discriminator.fc2.parameters())

        self.optimD = torch.optim.Adam(paramsD,
                                       lr=self.lr * 2,
                                       betas=(self.beta1, 0.999))
        self.optimG = torch.optim.Adam(self.generator.parameters(),
                                       lr=self.lr,
                                       betas=(self.beta1, 0.999))

    def train(self):
        criterion = nn.CrossEntropyLoss()
        l2_loss = nn.MSELoss()
        l1_loss = nn.L1Loss()
        iteration = 0

        for epoch in range(self.num_epochs):

            running_loss = 0.0
            running_corr_exp1 = 0
            running_corr_exp2 = 0

            #training phase
            self.generator.train()
            self.discriminator.train()

            for batch_sample in tqdm(self.data_loader['train']):

                iteration += 1

                image = batch_sample['image'].to(device)
                #wrong_image = batch_sample['wrong_image'].to(device)
                question = batch_sample['question'].to(device)
                label = batch_sample['answer_label'].to(device)
                multi_choice = batch_sample[
                    'answer_multi_choice']  # not tensor, list.
                '''
                self.logger.draw(image, wrong_image)

                self.optimD.zero_grad()
                self.optimG.zero_grad()

                noise = Variable(torch.randn(image.size(0), 100)).to(device)
                noise = noise.view(noise.size(0), 100, 1, 1)

                output = self.generator(question, label, noise)
                qst_emb = self.generator.gen_qst_emb(question)
                intermediate, prediction = self.discriminator(output, qst_emb)

                loss = criterion(prediction, label)
                '''

                # Train the discriminator
                # add a new loss to discriminator to identify real and fake
                self.generator.zero_grad()
                self.discriminator.zero_grad()
                self.optimG.zero_grad()
                self.optimD.zero_grad()

                qst_emb = self.generator.gen_qst_emb(question)
                activation_real, activation_real2, outputs = self.discriminator(
                    image, qst_emb)
                real_loss = criterion(outputs, label)
                real_score = outputs

                _, pred_exp1 = torch.max(outputs, 1)  # [batch_size]
                _, pred_exp2 = torch.max(outputs, 1)  # [batch_size]
                pred_exp2[pred_exp2 == self.ans_unk_idx] = -9999
                running_loss += real_loss.item()
                running_corr_exp1 += torch.stack([
                    (ans == pred_exp1.cpu()) for ans in multi_choice
                ]).any(dim=0).sum()
                running_corr_exp2 += torch.stack([
                    (ans == pred_exp2.cpu()) for ans in multi_choice
                ]).any(dim=0).sum()

                noise = Variable(torch.randn(image.size(0), 100)).to(device)
                noise = noise.view(noise.size(0), 100, 1, 1)

                fake_images = self.generator(question, label, noise,
                                             activation_real, activation_real2)
                _, _, outputs = self.discriminator(fake_images, qst_emb)
                fake_loss = criterion(outputs, label)
                fake_score = outputs

                d_loss = real_loss + fake_loss

                d_loss.backward()
                self.optimD.step()

                # Train the generator
                self.generator.zero_grad()
                self.discriminator.zero_grad()
                self.optimG.zero_grad()
                self.optimD.zero_grad()

                qst_emb = self.generator.gen_qst_emb(question)
                noise = Variable(torch.randn(image.size(0), 100)).to(device)
                noise = noise.view(noise.size(0), 100, 1, 1)

                activation_real, activation_real2, _ = self.discriminator(
                    image, qst_emb)
                fake_images = self.generator(question, label, noise,
                                             activation_real, activation_real2)
                activation_fake, _, outputs = self.discriminator(
                    fake_images, qst_emb)

                activation_fake = torch.mean(activation_fake, 0)
                activation_real = torch.mean(activation_real, 0)

                #======= Generator Loss function============
                # This is a customized loss function, the first term is the regular cross entropy loss
                # The second term is feature matching loss, this measure the distance between the real and generated
                # images statistics by comparing intermediate layers activations
                # The third term is L1 distance between the generated and real images, this is helpful for the conditional case
                # because it links the embedding feature vector directly to certain pixel values.
                #===========================================
                g_loss = criterion(outputs, label) \
                         + self.l2_coef * l2_loss(activation_fake, activation_real.detach()) \
                         + self.l1_coef * l1_loss(fake_images, image)

                g_loss.backward()
                self.optimG.step()

                if iteration % 5 == 0:
                    self.logger.log_iteration_gan(epoch, d_loss, g_loss,
                                                  real_score, fake_score)
                    self.logger.draw(image, fake_images)

            self.logger.plot_epoch_w_scores(epoch)

            if (epoch + 1) % 5 == 0:
                Utils.save_checkpoint(self.discriminator, self.generator,
                                      self.checkpoints_path, self.save_path,
                                      epoch)

            # Print the average loss and accuracy in an epoch.
            batch_step_size = len(
                self.data_loader['train'].dataset) / self.batch_size
            epoch_loss = running_loss / batch_step_size
            epoch_acc_exp1 = running_corr_exp1.double() / len(
                self.data_loader['train'].dataset)  # multiple choice
            epoch_acc_exp2 = running_corr_exp2.double() / len(
                self.data_loader['train'].dataset)  # multiple choice

            print(
                '| {} SET | Epoch [{:02d}/{:02d}], Loss: {:.4f}, Acc(Exp1): {:.4f}, Acc(Exp2): {:.4f} \n'
                .format('train', epoch, self.num_epochs - 1, epoch_loss,
                        epoch_acc_exp1, epoch_acc_exp2))

            # Log the loss and accuracy in an epoch.
            with open(
                    os.path.join(self.log_dir,
                                 '{}-log-epoch-{:02}.txt').format(
                                     'train', epoch + 1), 'w') as f:
                f.write(
                    str(epoch + 1) + '\t' + str(epoch_loss) + '\t' +
                    str(epoch_acc_exp1.item()) + '\t' +
                    str(epoch_acc_exp2.item()))

            #validation phase
            self.generator.eval()
            self.discriminator.eval()

            running_loss = 0.0
            running_corr_exp1 = 0
            running_corr_exp2 = 0

            for batch_sample in tqdm(self.data_loader['valid']):

                iteration += 1

                image = batch_sample['image'].to(device)
                #wrong_image = batch_sample['wrong_image'].to(device)
                question = batch_sample['question'].to(device)
                label = batch_sample['answer_label'].to(device)
                multi_choice = batch_sample[
                    'answer_multi_choice']  # not tensor, list.

                with torch.no_grad():
                    qst_emb = self.generator.gen_qst_emb(question)
                    _, _, outputs = self.discriminator(image, qst_emb)
                    _, pred_exp1 = torch.max(outputs, 1)  # [batch_size]
                    _, pred_exp2 = torch.max(outputs, 1)  # [batch_size]
                    loss = criterion(outputs, label)

                # Evaluation metric of 'multiple choice'
                # Exp1: our model prediction to '<unk>' IS accepted as the answer.
                # Exp2: our model prediction to '<unk>' is NOT accepted as the answer.
                pred_exp2[pred_exp2 == self.ans_unk_idx] = -9999
                running_loss += loss.item()
                running_corr_exp1 += torch.stack([
                    (ans == pred_exp1.cpu()) for ans in multi_choice
                ]).any(dim=0).sum()
                running_corr_exp2 += torch.stack([
                    (ans == pred_exp2.cpu()) for ans in multi_choice
                ]).any(dim=0).sum()

            # Print the average loss and accuracy in an epoch.
            batch_step_size = len(
                self.data_loader['valid'].dataset) / self.batch_size
            epoch_loss = running_loss / batch_step_size
            epoch_acc_exp1 = running_corr_exp1.double() / len(
                self.data_loader['valid'].dataset)  # multiple choice
            epoch_acc_exp2 = running_corr_exp2.double() / len(
                self.data_loader['valid'].dataset)  # multiple choice

            print(
                '| {} SET | Epoch [{:02d}/{:02d}], Loss: {:.4f}, Acc(Exp1): {:.4f}, Acc(Exp2): {:.4f} \n'
                .format('valid', epoch, self.num_epochs - 1, epoch_loss,
                        epoch_acc_exp1, epoch_acc_exp2))

            # Log the loss and accuracy in an epoch.
            with open(
                    os.path.join(self.log_dir,
                                 '{}-log-epoch-{:02}.txt').format(
                                     'valid', epoch + 1), 'w') as f:
                f.write(
                    str(epoch + 1) + '\t' + str(epoch_loss) + '\t' +
                    str(epoch_acc_exp1.item()) + '\t' +
                    str(epoch_acc_exp2.item()))
            '''    
                iteration += 1
                right_images = sample['right_images']
                right_embed = sample['right_embed']
                wrong_images = sample['wrong_images']

                right_images = Variable(right_images.float()).to(device)
                right_embed = Variable(right_embed.float()).to(device)
                wrong_images = Variable(wrong_images.float()).to(device)

                real_labels = torch.ones(right_images.size(0))
                fake_labels = torch.zeros(right_images.size(0))

                # ======== One sided label smoothing ==========
                # Helps preventing the discriminator from overpowering the
                # generator adding penalty when the discriminator is too confident
                # =============================================
                smoothed_real_labels = torch.FloatTensor(Utils.smooth_label(real_labels.numpy(), -0.1))

                real_labels = Variable(real_labels).to(device)
                smoothed_real_labels = Variable(smoothed_real_labels).to(device)
                fake_labels = Variable(fake_labels).to(device)

                # Train the discriminator
                self.discriminator.zero_grad()
                outputs, activation_real = self.discriminator(right_images, right_embed)
                real_loss = criterion(outputs, smoothed_real_labels)
                real_score = outputs

                noise = Variable(torch.randn(right_images.size(0), 100)).to(device)
                noise = noise.view(noise.size(0), 100, 1, 1)
                fake_images = self.generator(right_embed, noise)
                outputs, _ = self.discriminator(fake_images, right_embed)
                fake_loss = criterion(outputs, fake_labels)
                fake_score = outputs

                d_loss = real_loss + fake_loss

                d_loss.backward()
                self.optimD.step()

                # Train the generator
                self.generator.zero_grad()
                noise = Variable(torch.randn(right_images.size(0), 100)).to(device)
                noise = noise.view(noise.size(0), 100, 1, 1)
                fake_images = self.generator(right_embed, noise)
                outputs, activation_fake = self.discriminator(fake_images, right_embed)
                _, activation_real = self.discriminator(right_images, right_embed)

                activation_fake = torch.mean(activation_fake, 0)
                activation_real = torch.mean(activation_real, 0)


                #======= Generator Loss function============
                # This is a customized loss function, the first term is the regular cross entropy loss
                # The second term is feature matching loss, this measure the distance between the real and generated
                # images statistics by comparing intermediate layers activations
                # The third term is L1 distance between the generated and real images, this is helpful for the conditional case
                # because it links the embedding feature vector directly to certain pixel values.
                #===========================================
                g_loss = criterion(outputs, real_labels) \
                         + self.l2_coef * l2_loss(activation_fake, activation_real.detach()) \
                         + self.l1_coef * l1_loss(fake_images, right_images)

                g_loss.backward()
                self.optimG.step()

                if iteration % 5 == 0:
                    self.logger.log_iteration_gan(epoch,d_loss, g_loss, real_score, fake_score)
                    self.logger.draw(right_images, fake_images)

            self.logger.plot_epoch_w_scores(epoch)

            if (epoch) % 10 == 0:
                Utils.save_checkpoint(self.discriminator, self.generator, self.checkpoints_path, self.save_path, epoch)
            '''

    def demo(self):
        self.generator.load_state_dict(torch.load('./checkpoints/gen_29.pth'))
        self.discriminator.load_state_dict(
            torch.load('./checkpoints/disc_29.pth'))
        self.generator.eval()
        self.discriminator.eval()

        dataiter = iter(self.data_loader['valid'])
        batch_sample = dataiter.next()

        image = batch_sample['image'].to(device)
        question = batch_sample['question'].to(device)
        label = batch_sample['answer_label'].to(device)
        multi_choice = batch_sample['answer_multi_choice']  # not tensor, list.

        noise = Variable(torch.randn(image.size(0), 100)).to(device)
        noise = noise.view(noise.size(0), 100, 1, 1)

        with torch.no_grad():
            qst_emb = self.generator.gen_qst_emb(question)
            activation_real, activation_real2, outputs = self.discriminator(
                image, qst_emb)
            fake_images = self.generator(question, label, noise,
                                         activation_real, activation_real2)
            _, _, outputs = self.discriminator(fake_images, qst_emb)
            _, pred_exp1 = torch.max(outputs, 1)  # [batch_size]

        for i in range(8):
            print([
                self.data_loader['valid'].dataset.qst_vocab.idx2word(idx)
                for idx in question[i].tolist()
            ])
            print('ground truth: ', [
                self.data_loader['valid'].dataset.ans_vocab.idx2word(
                    label[i].tolist())
            ])
            print('fake answer: ', [
                self.data_loader['valid'].dataset.ans_vocab.idx2word(
                    pred_exp1[i].tolist())
            ])
        #print([self.data_loader['valid'].dataset.ans_vocab.idx2word(idx) for idx in label[0].tolist()])

        self.logger.draw(image, fake_images)
 def __init__(self):
     self.log = Logger().get_logger()
     super(Cleanable, self).__init__()
class Communicable(object):
    def __init__(self):
        """
           Should be responsible for communication with the API
        """
        self.log = Logger().get_logger()

    def _compose_url(self,
                     uri,
                     class_name=None,
                     method_name=None,
                     context=None,
                     action=None):
        """
        Should compose URL trying to do that in two steps:
            1. return if object that tries to retrieve the url
            has route that is already staticaly defined
            2. try to compose the custom route on the basis of URL data

        :rtype: string
        """
        if class_name and method_name:
            self.log.info(
                "Composing url for class_name '%s' and method name '%s'" %
                (class_name, method_name))
            static_route = Path().get_route_for(class_name, method_name)
            if static_route:
                self.log.info("Returned auto url %s" % (static_route))
                url = "%s%s" % (uri, static_route)
                return url

        elif context and action:
            url = "%s%s%s" % (uri, context, action)
            self.log.info("Composed url %s" % (url))
            return url
        else:
            self.log.info("Could not compose an url.")
            raise CommunicableException

    def _urlopen(self, url, data=None):
        """Helper for opening urls."""
        return urllib2.urlopen(url, data)

    def _api_get_json(self, url):
        """
        Sends a json request to the master. Returns only ['data'] part of the json response

        :rtype: dict or bool
        """
        try:
            response = urllib2.urlopen(url)
            data = json.loads(response.read())
        except urllib2.URLError, e:
            self.log.error(
                "Could not communicate with Master API becasue: %s" % e)
            print "Could not communicate with Master API because %s" % e
            sys.exit(1)

        try:
            out_data = data['data']
        except Exception:
            out_data = None

        if data['result'] != 'success':
            self.log.info("Could not retrieve data for URL=%s" % url)
            return {}

        if out_data:
            return out_data
        else:
            return {}
 def __init__(self):
     self.log = Logger().get_logger()
     super(Shutdownable, self).__init__()
 def __init__(self):
     self.log = Logger().get_logger()
     super(Startupable, self).__init__()
Exemple #45
0
class LiveActivityGroup(Fetchable, Statusable, Deletable, Shutdownable,
                        Startupable, Activatable, Configurable, Metadatable,
                        Deployable):
    """
    Should be responsible for managing single live activity group
    """
    def __init__(self, data_hash=None, uri=None):
        self.log = Logger().get_logger()
        self.class_name = self.__class__.__name__
        super(LiveActivityGroup, self).__init__()
        if data_hash == None and uri == None:
            self.log.info(
                "No data_hash and uri provided for LiveActivityGroup constructor, assuming creation"
            )
        else:
            self.data_hash = data_hash
            self.uri = uri
            self.absolute_url = self._get_absolute_url()
            self.log.info("Instantiated Activity object with url=%s" %
                          self.absolute_url)

    def __repr__(self):
        return str(self.data_hash)

    def new(self, uri, constructor_args):
        """
        Used to create new live activity group through API and set the "uri" so that we
        can operate on this instance of LiveActivityGroup right away after .new() returns True

        :param constructor_args: dictionary with following structure::

            {\
            'liveActivityGroup.name' : 'live_activity_group_name',\
            'liveActivityGroup.description' : 'live_activity_group_description',\
            '_eventId_save' : 'Save',\
            'liveActivityIds' : [1,2,666]\
            }

        :param uri: "http://some_server/prefix" (passed by master)

        :rtype: new LiveActivityGroup object or False
        """

        self.log.info("Creating new LiveActivityGroup with arguments: %s" %
                      constructor_args)
        route = Path().get_route_for('LiveActivityGroup', 'new')
        url = "%s%s" % (uri, route)
        request_response = self._api_post_json(url, constructor_args)
        if request_response.url:
            self.absolute_url = request_response.url.replace(
                "view.html", "view.json")
            self.fetch()
            self.log.info(
                "Created new LiveActivityGroup with url=%s, data_hash is now %s"
                % (self.absolute_url, self.data_hash))
            return self
        else:
            self.log.info(
                "Created new LiveActivityGroup %s but returned False" % self)
            return False

    def set_live_activities(self, live_activities_list):
        """
        Used to set new live activities list

        :param: dictionary with following structure::

            {\
            'liveActivityGroup.name' : 'live_activity_group_name',\
            'liveActivityIds' : [1,2,666]\
            }

        :param uri: "http://some_server/prefix" (passed by master)

        :rtype: new LiveActivityGroup object or False
        """
        params = {
            'liveActivityGroup.name': self.name(),
            'liveActivityIds': live_activities_list,
            'liveActivityGroup.description': self.description()
        }
        self.log.info("Updating LiveActivityGroup with arguments: %s" % params)
        route = Path().get_route_for('LiveActivityGroup', 'edit') % self.id()
        url = "%s%s" % (self.uri, route)
        request_response = self._api_post_json_no_cookies(url, params)
        if request_response.url:
            self.absolute_url = request_response.url.replace(
                "view.html", "view.json")
            self.fetch()
            self.log.info(
                "Updated LiveActivityGroup with url=%s, data_hash is now %s" %
                (self.absolute_url, self.data_hash))
            return self
        else:
            self.log.info("Updated LiveActivityGroup %s but returned False" %
                          self)
            return False

    def to_json(self):
        """
        Should selected attributes in json form defined by the template
        """
        self.serializer = LiveActivityGroupSerializer(self.data_hash)
        return self.serializer.to_json()

    def id(self):
        return self.data_hash['id']

    def name(self):
        """ Should return live activity group name"""
        return self.data_hash['name']

    def live_activities(self):
        """ Should return list of live LiveActivity instances"""
        live_activities = []
        data = self.data_hash['liveActivities']
        for live_activity_data in data:
            try:
                status = live_activity_data['active']['runtimeState']
            except Exception:
                status = 'UNKNOWN'
            live_activity_group_live_activity = LiveActivity(
                data_hash=live_activity_data, uri=self.uri)
            live_activities.append(live_activity_group_live_activity)
        return live_activities

    def description(self):
        """ Should return Live Activity Group description """
        return self.data_hash['description']

    """ Private methods below """

    def metadata(self):
        """ Should return Live Activity Group metadata """
        return self.data_hash['metadata']

    """ Private methods below """

    def _get_absolute_url(self):
        live_activity_group_id = self.data_hash['id']
        url = "%s/liveactivitygroup/%s/view.json" % (self.uri,
                                                     live_activity_group_id)
        return url
 def __init__(self):
     """ 
         @summary: Should be responsible for communication with the API
     """
     self.log = Logger().get_logger()
    def __init__(self, args):

        self.input_dir = args.input_dir
        self.log_dir = args.log_dir
        self.model_dir = args.model_dir
        self.max_qst_length = args.max_qst_length
        self.max_num_ans = args.max_num_ans
        self.embed_size = args.embed_size
        self.word_embed_size = args.word_embed_size
        self.num_layers = args.num_layers
        self.hidden_size = args.hidden_size
        self.lr = args.lr
        self.step_size = args.step_size
        self.gamma = args.gamma
        self.num_epochs = args.num_epochs
        self.batch_size = args.batch_size
        self.num_workers = args.num_workers
        self.save_step = args.save_step
        self.l1_coef = args.l1_coef
        self.l2_coef = args.l2_coef
        self.save_path = args.save_path

        self.noise_dim = 100
        self.beta1 = 0.5
        self.logger = Logger('vqa-gan')
        self.checkpoints_path = 'checkpoints'

        self.data_loader = get_loader(input_dir=self.input_dir,
                                      input_vqa_train='train.npy',
                                      input_vqa_valid='valid.npy',
                                      max_qst_length=self.max_qst_length,
                                      max_num_ans=self.max_num_ans,
                                      batch_size=self.batch_size,
                                      num_workers=self.num_workers)

        qst_vocab_size = self.data_loader['train'].dataset.qst_vocab.vocab_size
        ans_vocab_size = self.data_loader['train'].dataset.ans_vocab.vocab_size
        self.ans_unk_idx = self.data_loader['train'].dataset.ans_vocab.unk2idx

        self.generator = Generator(embed_size=self.embed_size,
                                   qst_vocab_size=qst_vocab_size,
                                   ans_vocab_size=ans_vocab_size,
                                   word_embed_size=self.word_embed_size,
                                   num_layers=self.num_layers,
                                   hidden_size=self.hidden_size,
                                   img_feature_size=512).to(device)

        self.discriminator = Discriminator(
            embed_size=self.embed_size,
            ans_vocab_size=ans_vocab_size,
            word_embed_size=self.word_embed_size,
            num_layers=self.num_layers,
            hidden_size=self.hidden_size).to(device)


        paramsD = list(self.discriminator.qst_encoder.parameters()) \
                + list(self.discriminator.img_encoder.fc.parameters()) \
                + list(self.discriminator.fc1.parameters()) \
                + list(self.discriminator.fc2.parameters())

        self.optimD = torch.optim.Adam(paramsD,
                                       lr=self.lr * 2,
                                       betas=(self.beta1, 0.999))
        self.optimG = torch.optim.Adam(self.generator.parameters(),
                                       lr=self.lr,
                                       betas=(self.beta1, 0.999))
Exemple #48
0
class GAN:
    def __init__(self,
                 gan_type,
                 batch_size,
                 img_size,
                 img_chan,
                 discriminator_fn=None,
                 generator_fn=None):
        self.gan_types = [
            "DCGAN", "WGAN", "WGAN-GP", "LSGAN", "SNGAN", "RSGAN", "RaSGAN"
        ]
        assert gan_type in self.gan_types, "[error] not implemented gan_type `{}` specified. choose from following.\r\n{}".format(
            gan_type, self.gan_types)
        self.gan_type = gan_type
        self.batch_size = batch_size
        self.img_size = img_size
        self.img_chan = img_chan
        self.logger = Logger()
        self.n_disc_update = 1  # number of times to update discriminator (critic)
        self._init(discriminator_fn, generator_fn)

    def _init(self, discriminator_fn, generator_fn):
        self.Z = tf.placeholder(tf.float32, shape=[self.batch_size, 100])
        self.img = tf.placeholder(tf.float32,
                                  shape=[
                                      self.batch_size, self.img_size,
                                      self.img_size, self.img_chan
                                  ])
        D = Discriminator("Discriminator", self.batch_size, self.img_size,
                          self.img_chan, discriminator_fn)
        G = Generator("Generator", self.batch_size, self.img_size,
                      self.img_chan, generator_fn)

        # with tf.variable_scope(self.gan_type):
        self.fake_img = G(self.Z)
        eps = 1e-14
        self.summaries = []
        if self.gan_type == "DCGAN":
            # paper: Unsupervised Representation Learning with Deep Convolutional Generative Adversarial Networks
            # https://arxiv.org/abs/1511.06434
            self.fake_logit = tf.nn.sigmoid(D(self.fake_img))
            self.real_logit = tf.nn.sigmoid(D(self.img, reuse=True))
            self.d_loss = -(tf.reduce_mean(tf.log(self.real_logit + eps)) +
                            tf.reduce_mean(tf.log(1 - self.fake_logit + eps)))
            self.g_loss = -tf.reduce_mean(tf.log(self.fake_logit + eps))
            self.opt_D = tf.train.AdamOptimizer(2e-4, beta1=0.5).minimize(
                self.d_loss, var_list=D.var)
            self.opt_G = tf.train.AdamOptimizer(2e-4, beta1=0.5).minimize(
                self.g_loss, var_list=G.var)
        elif self.gan_type == "WGAN":
            # paper: Wasserstein GAN
            # https://arxiv.org/abs/1701.07875
            self.fake_logit = D(self.fake_img)
            self.real_logit = D(self.img, reuse=True)
            self.d_loss = -(tf.reduce_mean(self.real_logit) -
                            tf.reduce_mean(self.fake_logit))
            self.g_loss = -tf.reduce_mean(self.fake_logit)
            self.clip = []
            for _, var in enumerate(D.var):
                self.clip.append(var.assign(tf.clip_by_value(var, -0.01,
                                                             0.01)))
            self.opt_D = tf.train.RMSPropOptimizer(5e-5).minimize(
                self.d_loss, var_list=D.var)
            self.opt_G = tf.train.RMSPropOptimizer(5e-5).minimize(
                self.g_loss, var_list=G.var)
            self.n_disc_update = 5
        elif self.gan_type == "WGAN-GP":
            # paper: Improved Training of Wasserstein GANs
            # https://arxiv.org/abs/1704.00028
            self.fake_logit = D(self.fake_img)
            self.real_logit = D(self.img, reuse=True)
            e = tf.random_uniform([self.batch_size, 1, 1, 1], 0, 1)
            x_hat = e * self.img + (1 - e) * self.fake_img
            grad = tf.gradients(D(x_hat, reuse=True), x_hat)[0]
            self.d_loss = tf.reduce_mean(
                self.fake_logit - self.real_logit) + 10 * tf.reduce_mean(
                    tf.square(
                        tf.sqrt(tf.reduce_sum(tf.square(grad), axis=[1, 2, 3]))
                        - 1))
            self.g_loss = tf.reduce_mean(-self.fake_logit)
            self.opt_D = tf.train.AdamOptimizer(
                1e-4, beta1=0., beta2=0.9).minimize(self.d_loss,
                                                    var_list=D.var)
            self.opt_G = tf.train.AdamOptimizer(
                1e-4, beta1=0., beta2=0.9).minimize(self.g_loss,
                                                    var_list=G.var)
            self.n_disc_update = 5
        elif self.gan_type == "LSGAN":
            # paper: Least Squares Generative Adversarial Networks
            # https://arxiv.org/abs/1611.04076
            self.fake_logit = D(self.fake_img)
            self.real_logit = D(self.img, reuse=True)
            self.d_loss = tf.reduce_mean(0.5 * tf.square(self.real_logit - 1) +
                                         0.5 * tf.square(self.fake_logit))
            self.g_loss = tf.reduce_mean(0.5 * tf.square(self.fake_logit - 1))
            self.opt_D = tf.train.AdamOptimizer(2e-4, beta1=0.5).minimize(
                self.d_loss, var_list=D.var)
            self.opt_G = tf.train.AdamOptimizer(2e-4, beta1=0.5).minimize(
                self.g_loss, var_list=G.var)
        elif self.gan_type == "SNGAN":
            # paper: Spectral Normalization for Generative Adversarial Networks
            # https://arxiv.org/abs/1802.05957
            self.fake_logit = tf.nn.sigmoid(D(self.fake_img, enable_sn=True))
            self.real_logit = tf.nn.sigmoid(
                D(self.img, reuse=True, enable_sn=True))
            self.d_loss = -(tf.reduce_mean(
                tf.log(self.real_logit + eps) +
                tf.log(1 - self.fake_logit + eps)))
            self.g_loss = -tf.reduce_mean(tf.log(self.fake_logit + eps))
            self.opt_D = tf.train.AdamOptimizer(2e-4, beta1=0.5).minimize(
                self.d_loss, var_list=D.var)
            self.opt_G = tf.train.AdamOptimizer(2e-4, beta1=0.5).minimize(
                self.g_loss, var_list=G.var)
        else:
            raise NotImplementedError
        # statistics
        with tf.variable_scope("statictics"):
            if self.gan_type in ["DCGAN", "SNGAN"]:
                self.summaries.append(
                    tf.summary.scalar(
                        "accuracy",
                        (tf.reduce_mean(
                            tf.cast(self.fake_logit < 0.5, tf.float32)) +
                         tf.reduce_mean(
                             tf.cast(self.real_logit > 0.5, tf.float32))) /
                        2.))
                self.summaries.append(
                    tf.summary.scalar("kl_divergence",
                                      self.calc_kl_divergence()))
                self.summaries.append(
                    tf.summary.scalar("js_divergence",
                                      self.calc_js_divergence()))
            elif self.gan_type in ["WGAN", "WGAN-GP"]:
                self.summaries.append(
                    tf.summary.scalar("wasserstein_estimate",
                                      tf.abs(self.d_loss)))
            self.summaries.append(tf.summary.scalar("d_loss", self.d_loss))
            self.summaries.append(tf.summary.scalar("g_loss", self.g_loss))

        self.sess = tf.Session()
        self.sess.run(tf.global_variables_initializer())
        self.logger.log_graph(sess=self.sess)

    def __call__(self, dataset, n_epoch, test_batch_interval):
        saver = tf.train.Saver()

        print("[info] start training")
        n_trained_step = 0
        for epoch in range(n_epoch):
            for _ in tqdm(
                    range(dataset.shape[0] //
                          (self.batch_size * self.n_disc_update) - 1)):
                idx = n_trained_step // dataset.shape[0]

                # update discriminator
                average_d_loss = 0.
                for _ in range(self.n_disc_update):
                    batch = dataset[idx:idx + self.batch_size]
                    idx += self.batch_size
                    n_trained_step += self.batch_size
                    d_loss, _, *summaries = self.sess.run(
                        [self.d_loss, self.opt_D] + self.summaries,
                        feed_dict={
                            self.img:
                            batch,
                            self.Z:
                            np.random.standard_normal([self.batch_size, 100])
                        })
                    if self.gan_type is "WGAN":
                        self.sess.run(self.clip)
                    average_d_loss += d_loss
                average_d_loss /= self.n_disc_update

                # update generator
                g_loss, _ = self.sess.run(
                    [self.g_loss, self.opt_G],
                    feed_dict={
                        self.img: batch,
                        self.Z:
                        np.random.standard_normal([self.batch_size, 100])
                    })

                self.logger.write_tf_summary(summaries, n_trained_step)

                # test
                if (n_trained_step /
                        self.batch_size) % test_batch_interval == 0:
                    self.test(batch, n_trained_step)

            print(
                "[info] epoch: {0: 4}, step: {1: 7}, d_loss: {2: 8.4f}, g_loss: {3: 8.4f}"
                .format(epoch, n_trained_step, average_d_loss, g_loss))

        self.logger.generate_animation()
        saver.save(
            self.sess,
            self.logger.dir + "/{0:07}_model.ckpt".format(n_trained_step))

    def test(self, batch, n_trained_step):
        z = np.random.standard_normal([self.batch_size, 100])
        imgs = self.sess.run(self.fake_img,
                             feed_dict={
                                 self.img: batch,
                                 self.Z: z
                             })
        self.logger.save_img(imgs, n_trained_step)

    def calc_js_divergence(self):
        m = (self.fake_logit + self.real_logit) / 2.
        return tf.reduce_mean(
            (self.fake_logit * tf.log(self.fake_logit / m) +
             self.real_logit * tf.log(self.real_logit / m)) / 2.)

    def calc_kl_divergence(self):
        return tf.reduce_mean(self.fake_logit * tf.log(self.fake_logit / 0.5) +
                              (1. - self.fake_logit) *
                              tf.log((1. - self.fake_logit) / 2.))
 def __init__(self):
     self.log = Logger().get_logger()
     self.log.debug("Instantiated Serializer with data %s" % self.data_hash)
     pass
class Master(Communicable):
    """
        @summary: This is the main class with all the logic needed for 
        high level stuff. You will typically use instance of Master for all your scripts.
    """
    def __init__(self, host='lg-head', port='8080', prefix='/interactivespaces'):
        """ 
            @param host: default value is lg-head 
            @param port: default value is 8080
            @param prefix: default value is /interactivespaces
            @todo: refactor filter_* methods because they're not DRY
        """
        self.host, self.port, self.prefix = host, port, prefix
        self.log = Logger().get_logger()
        self.uri = "http://%s:%s%s" % (self.host, self.port, prefix)
        super(Master, self).__init__()
        
    def get_activities(self, search_pattern=None):
        """
            Retrieves a list of Activity objects
            @rtype: list
            @param search_pattern: dictionary of regexps used for searching through Activities
                - example regexp dict: {
                                        "activity_name" : "regexp",
                                        "activity_version" : "regexp" 
                                        }
                - every search_pattern dictionary key may be blank/null
        """
        url = self._compose_url(class_name='Master', method_name='get_activities', uri=self.uri)
        self.log.info("Trying to retrieve url=%s" % url)
        response = self._api_get_json(url)
        self.log.info('Got response for "get_activities" %s ' % str(response))
        self.log.info('get_activities returned %s objects' % str(len(response)))
        activities = self._filter_activities(response, search_pattern)
        return activities

    def get_activity(self, search_pattern=None):
        """
            Retrieves a list of Activity objects
            @rtype: list
            @param search_pattern: dictionary of regexps used for searching through Activities
                - example regexp dict: {
                                        "activity_name" : "regexp",
                                        "activity_version" : "regexp" 
                                        }
                - every search_pattern dictionary key may be blank/null
        """
        url = self._compose_url(class_name='Master', method_name='get_activities', uri=self.uri)
        self.log.info("Trying to retrieve url=%s" % url)
        response = self._api_get_json(url)
        self.log.info('Got response for "get_activities" %s ' % str(response))
        self.log.info('get_activities returned %s objects' % str(len(response)))
        activity = self._filter_activities(response, search_pattern)
        if len(activity) > 1:
            raise MasterException("get_activity returned more than one row (%s)" % len(activity))
        elif isinstance(activity[0], Activity):
            activity[0].fetch()
            self.log.info("get_activity returned Activity:%s" % str(activity))
            return activity
        else:
            raise MasterException("Could not get specific activity for given search pattern")
    
    def get_live_activities(self, search_pattern=None):
        """
            Retrieves a list of LiveActivity objects
            @rtype: list
            @param search_pattern: dictionary of regexps used for searching through LiveActivity names
                - example regexp dict: {
                                        "live_activity_name" : "regexp",
                                        "space_controller_name" : "regexp"
                                        }
                - each search_pattern dictionary key may be blank/null
        """
        url = self._compose_url(class_name='Master', method_name='get_live_activities', uri=self.uri)
        self.log.info("Trying to retrieve url=%s" % url)
        response = self._api_get_json(url)
        self.log.debug('Got response for "get_live_activities" %s ' % str(response))
        self.log.info('get_live_activities returned %s objects' % str(len(response)))
        live_activities = self._filter_live_activities(response, search_pattern)
        return live_activities
    
    def get_live_activity(self, search_pattern=None):
        """
            Retrieves a list of LiveActivity objects
            @rtype: LiveActivity or False
            @param search_pattern: dictionary of regexps used for searching through LiveActivity names
                - example regexp dict: {
                                        "live_activity_name" : "GE ViewSync Master on Node A",
                                        "space_controller_name" : "ISCtlDispAScreen00"
                                        }
                - each search_pattern dictionary key may be blank/null
        """
        url = self._compose_url(class_name='Master', method_name='get_live_activities', uri=self.uri)
        self.log.info("Trying to retrieve url=%s" % url)
        response = self._api_get_json(url)
        self.log.debug('Got response for "get_live_activities" %s ' % str(response))
        self.log.info('get_live_activities returned %s objects' % str(len(response)))
        live_activity = self._filter_live_activities(response, search_pattern)
        if len(live_activity) > 1:
            raise MasterException("get_live_activity returned more than one row (%s)" % len(live_activity))
        elif isinstance(live_activity[0], LiveActivity):
            live_activity[0].fetch()
            self.log.info("get_live_activity returned LiveActivity:%s" % live_activity)
            return live_activity[0]
        else:
            raise MasterException("Could not get specific live activity for given search pattern")

    
    def get_live_activity_groups(self, search_pattern=None):
        """
            Retrieves a list of live activity groups.
            @rtype: list
            @param search_pattern: dictionary of regexps used for searching through LiveActivity names
                - example regexp dict: {
                                        "live_activity_group_name" : "regexp"
                                        }
        """
        url = self._compose_url(class_name='Master', method_name='get_live_activity_groups', uri=self.uri)
        self.log.info("Trying to retrieve url=%s" % url)
        response = self._api_get_json(url)
        self.log.debug('Got response for "get_live_activity_groups" %s ' % str(response))
        self.log.info('get_live_activity_groups returned %s objects' % str(len(response)))
        live_activity_groups = self._filter_live_activity_groups(response, search_pattern)
        return live_activity_groups
    
    def get_live_activity_group(self, search_pattern=None):
        """
            Retrieves a list of live activity groups.
            @rtype: list
            @param search_pattern: dictionary of regexps used for searching through LiveActivity names
                - example regexp dict: {
                                        "live_activity_group_name" : "regexp"
                                        }
        """
        url = self._compose_url(class_name='Master', method_name='get_live_activity_groups', uri=self.uri)
        self.log.info("Trying to retrieve url=%s" % url)
        response = self._api_get_json(url)
        self.log.debug('Got response for "get_live_activity_groups" %s ' % str(response))
        self.log.info('get_live_activity_groups returned %s objects' % str(len(response)))
        live_activity_group = self._filter_live_activity_groups(response, search_pattern)
        if len(live_activity_group) > 1:
            raise MasterException("get_live_activity_group returned more than one row (%s)" % len(live_activity_group))
        elif isinstance(live_activity_group[0], LiveActivityGroup):
            live_activity_group[0].fetch()
            self.log.info("get_live_activity_group returned LiveActivityGroup:%s" % str(live_activity_group))
            return live_activity_group[0]
        else:
            raise MasterException("Could not get specific live activity group for given search pattern")

    def get_spaces(self, search_pattern=None):
        """
            @summary: Retrieves a list of live activity groups.
            @rtype: list
            @param search_pattern: dictionary containing space name regexp
                - example regexp dict: {"space_name" : "regexp"}
        """
        url = self._compose_url(class_name='Master', method_name='get_spaces', uri=self.uri)
        self.log.info("Trying to retrieve url=%s" % url)
        response = self._api_get_json(url)
        self.log.debug('Got response for "get_spaces" %s ' % str(response))
        spaces = self._filter_spaces(response, search_pattern)
        return spaces
    
    def get_space(self, search_pattern=None):
        """
            @summary: Retrieves a Space
            @rtype: Space
            @param search_pattern: dictionary containing space name regexp
                - example regexp dict: {"space_name" : "regexp"}
        """
        url = self._compose_url(class_name='Master', method_name='get_spaces', uri=self.uri)
        self.log.info("Trying to retrieve url=%s" % url)
        response = self._api_get_json(url)
        self.log.debug('Got response for "get_spaces" %s ' % str(response))
        space = self._filter_spaces(response, search_pattern)
        if len(space) > 1:
            raise MasterException("get_space returned more than one row (%s)" % len(space))
        elif isinstance(space[0], Space):
            space[0].fetch()
            self.log.info("get_space returned Space:%s" % str(space))
            return space[0]
        else:
            raise MasterException("Could not get specific space for given search pattern")

    def get_space_controllers(self, search_pattern=None):
        """
            Retrieves a list of live space controllers.
            @rtype: list
            @param search_pattern: dictionary containing regexps and strings
                - example regexp dict: {
                                        "state" : "STRING",
                                        "mode" : "STRING",
                                        "name" : "regexp",
                                        "uuid" : "STRING"
                                        }
        """
        url = self._compose_url(class_name='Master', method_name='get_space_controllers', uri=self.uri)
        self.log.info("Trying to retrieve url=%s" % url)
        response = self._api_get_json(url)
        self.log.debug('Got response for "get_controllers" %s ' % str(response))
        space_controllers = self._filter_space_controllers(response, search_pattern)
        return space_controllers

    def get_space_controller(self, search_pattern=None):
        """
            Retrieves a list of live space controllers.
            @rtype: SpaceController
            @param search_pattern: dictionary containing regexps and strings
                - example regexp dict: {
                                        "space_controller_state" : "STRING",
                                        "space_controller_mode" : "STRING",
                                        "space_controller_name" : "regexp",
                                        "space_controller_uuid" : "STRING"
                                        }
        """
        url = self._compose_url(class_name='Master', method_name='get_space_controllers', uri=self.uri)
        self.log.info("Trying to retrieve url=%s" % url)
        response = self._api_get_json(url)
        self.log.debug('Got response for "get_controllers" %s ' % str(response))
        space_controller = self._filter_space_controllers(response, search_pattern)
        if len(space_controller) > 1:
            raise MasterException("get_space_controller returned more than one row")
        elif isinstance(space_controller[0], SpaceController):
            space_controller[0].fetch()
            self.log.info("get_space_controller returned SpaceController:%s" % str(space_controller))
            return space_controller[0]
        else:
            raise MasterException("Could not get specific space controller for given search pattern")
        
    def get_named_scripts(self, pattern=None):
        """Retrieves a list of named scripts."""
        raise NotImplementedError

    def new_live_activity(self, constructor_args):
        """
            @summary: creates a new live activity and returns it
            @param constructor_args: - dictionary containing all of below keys:
                {
                "live_activity_name": "string containing name of a new live activity (mandatory)"
                "live_activity_description" : "string containing description" 
                "activity_name" : "string containing activity name"
                "space_controller_name" : "string containing controller name"
                }
            @rtype: LiveActivity
        """
         
        unpacked_arguments={}
        unpacked_arguments['activityId'] = self.get_activity({"activity_name" : constructor_args['activity_name']}).id()
        unpacked_arguments['controllerId'] = self.get_space_controller({"space_controller_name" : constructor_args['space_controller_name']}).id()
        unpacked_arguments['live_activity_description'] = constructor_args['live_activity_description']
        unpacked_arguments['liveActivity.name'] = constructor_args['live_activity_name']
        unpacked_arguments['_eventId_save'] = 'Save'
        
        activity = LiveActivity().new(self.uri, unpacked_arguments)
        self.log.info("Master:new_live_activity returned activity:%s" % activity)
        return activity
    
    def new_activity(self, constructor_args):
        """
            @summary: creates a new activity and returns it
            @param constructor_args: - dictionary containing all of below keys:
                {
                "zip_file_handler": "zipfile object (mandatory)"
                }
            @rtype: Activity or False
        """ 
        
        activity = Activity().new(self.uri, constructor_args)
        self.log.info("Master:new_activity returned activity:%s" % activity)
        return activity
        
    def new_space_controller(self, constructor_args):
        """
            @summary: creates new controller
            @param constructor_args: dictionary containing all of below keys:
                {
                "space_controller_name" : "mandatory string",
                "space_controller_description" : "non mandatory string",
                "space_controller_host_id" : "mandatory string"
                }
            
        """
        space_controller = SpaceController().new(self.uri, constructor_args)
        return space_controller

    def new_live_activity_group(self, constructor_args):
        """
            @summary: Creates a new live activity group.
            @param constructor_args: dictionary with following structure:
                {
                "live_activity_group_name" : "example.py live_activity_group_name",
                "live_activity_group_description" : "created by example.py",
                "live_activities" : [{"live_activity_name" : "SV Master on Node A",
                "space_controller_name" : "ISCtlDispAScreen00"},
                {"live_activity_name" : "SV Slave 01 on Node A",
                "space_controller_name" : "ISCtlDispAScreen00"}]
                }
        """
        live_activity_ids = self._translate_live_activities_names_to_ids(constructor_args['live_activities'])
        unpacked_arguments = {}
        unpacked_arguments['liveActivityGroup.name'] = constructor_args['live_activity_group_name']
        unpacked_arguments['liveActivityGroup.description'] = constructor_args['live_activity_group_description']
        unpacked_arguments['_eventId_save'] = 'Save'
        unpacked_arguments['liveActivityIds'] = live_activity_ids

        live_activity_group = LiveActivityGroup().new(self.uri, unpacked_arguments)
        return live_activity_group
    
    def new_space(self, name, description, live_activity_groups, spaces):
        """Creates a new space."""
        raise NotImplementedError

    def new_controller(self, name, description, host_id):
        """Creates a new controller."""
        raise NotImplementedError

    def new_named_script(self, name, description, language, content, scheduled=None):
        """Creates a new named script."""
        raise NotImplementedError
    
    """ Private methods below """
    
    def _filter_live_activities(self, response, search_pattern):
        """
        @summary: Should iterate over response from Master API and filter
        live activites with regards to their name"
        @param response: response['data'] from master API
        @param search_pattern: dictionary where values may be regexps
        @todo: refactor filtering because it looks ugly and make it global for all classes
        """
        live_activities = []
        """ Make a search pattern with default values set to None"""
        if isinstance(search_pattern, dict):
            search_pattern = SearchPattern(search_pattern)
        else:
            search_pattern = SearchPattern()
        
        live_activity_name = search_pattern['live_activity_name']
        
        """ Nested values are returning exception so do it manually here """
        try:
            space_controller_name = search_pattern['controller']['name']
        except Exception:
            space_controller_name = None
        
        self.log.debug("Filtering activities with pattern=%s" % search_pattern)
        
        for live_activity_data in response:
            do_filter = True
            """ Var for holding the state of filtering """
            current_live_activity_name = live_activity_data['name']
            current_space_controller_name = live_activity_data['controller']['name']
            if space_controller_name and do_filter:
                if Searcher().match(current_space_controller_name, space_controller_name):
                    pass
                else:
                    do_filter = False 
            if live_activity_name and do_filter:
                if Searcher().match(current_live_activity_name, live_activity_name):
                    pass
                else:
                    do_filter = False  
            if do_filter==True:
                live_activities.append(LiveActivity(live_activity_data, self.uri)) 
        self.log.info("Filtered live_activities and returned %s object(s)" % str(len(live_activities)))
        return live_activities
    
    def _filter_activities(self, response, search_pattern):
        """
        @summary: Should iterate over response from Master API and filter
        live activites with regards to their name"
        @param response: response['data'] from master API
        @param search_pattern: dictionary where values may be regexps
        @rtype: list of Activity objects
        
        """
        activities = []
        """ Make a search pattern with default values set to None"""
        if isinstance(search_pattern, dict):
            search_pattern = SearchPattern(search_pattern)
        else:
            search_pattern = SearchPattern()
            
        activity_name = search_pattern['activity_name']
        activity_version = search_pattern['activity_version']
        
        self.log.debug("Filtering activities with pattern=%s" % search_pattern)
        
        for activity_data in response:
            do_filter = True
            """ Var for holding the state of filtering """
            current_activity_name = activity_data['name']
            current_activity_version = activity_data['version']
            if activity_version and do_filter:
                if Searcher().match(current_activity_version, activity_version):
                    pass
                else:
                    do_filter = False 
            if activity_name and do_filter:
                if Searcher().match(current_activity_name, activity_name):
                    pass
                else:
                    do_filter = False  
            if do_filter==True:
                activities.append(Activity(activity_data, self.uri)) 
        self.log.info("Filtered activities and returned %s object(s) : %s" % (str(len(activities)), activities))
        return activities
    
    def _filter_live_activity_groups(self, response, search_pattern):
        """
        @summary: Should iterate over response from Master API and filter
        live activity groups with regards to their name"
        @param response: response['data'] from master API
        @param search_pattern: dictionary where values may be regexps
        @rtype: list of LiveActivityGroup objects
        
        """
        live_activity_groups = []
        """ Make a search pattern with default values set to None"""
        if isinstance(search_pattern, dict):
            search_pattern = SearchPattern(search_pattern)
        else:
            search_pattern = SearchPattern()
            
        live_activity_group_name = search_pattern['live_activity_group_name']
        
        self.log.debug("Filtering activities with pattern=%s" % search_pattern)
        
        for live_activity_group_data in response:
            do_filter = True
            """ Var for holding the state of filtering """
            current_live_activity_group_name = live_activity_group_data['name']
            if live_activity_group_name and do_filter:
                if Searcher().match(current_live_activity_group_name, live_activity_group_name):
                    pass
                else:
                    do_filter = False 
            if do_filter==True:
                live_activity_groups.append(LiveActivityGroup(live_activity_group_data, self.uri)) 
        self.log.info("Filtered live_activity_groups and returned %s object(s)" % str(len(live_activity_groups)))
        return live_activity_groups
    
    def _filter_spaces(self, response, search_pattern):
        """
        @summary: Should iterate over response from Master API and filter
        live activity groups with regards to their name"
        @param response: response['data'] from master API
        @param search_pattern: dictionary where values may be regexps
        @rtype: list of Space objects
        
        """
        spaces = []
        """ Make a search pattern with default values set to None"""
        if isinstance(search_pattern, dict):
            search_pattern = SearchPattern(search_pattern)
        else:
            search_pattern = SearchPattern()
            
        space_name = search_pattern['space_name']
        
        self.log.debug("Filtering spaces with pattern=%s" % search_pattern)
        
        for space_data in response:
            do_filter = True
            """ Var for holding the state of filtering """
            current_space_name = space_data['name']
            if space_name and do_filter:
                if Searcher().match(current_space_name, space_name):
                    pass
                else:
                    do_filter = False 
            if do_filter==True:
                spaces.append(Space(space_data, self.uri)) 
        self.log.info("Filtered spaces and returned %s object(s)" % str(len(spaces)))
        return spaces

    def _filter_space_controllers(self, response, search_pattern):
        """
        @summary: Should iterate over response from Master API and filter
            space controllers with regards to the given search dictionary
            consisting of name, uuid, mode and state (none of them are 
            mandatory)"
        @param response: response['data'] from master API
        @param search_pattern: dictionary where values may be regexps
        @rtype: list of Space objects
        
        """
        space_controllers = []
        """ Make a search pattern with default values set to None"""
        if isinstance(search_pattern, dict):
            search_pattern = SearchPattern(search_pattern)
        else:
            search_pattern = SearchPattern()
            
        space_controller_name = search_pattern['space_controller_name']
        space_controller_uuid = search_pattern['space_controller_uuid']
        space_controller_state = search_pattern['space_controller_state']
        space_controller_mode = search_pattern['space_controller_mode']
        
        self.log.debug("Filtering space controllers with pattern=%s" % search_pattern)
        
        for space_controller_data in response:
            do_filter = True
            current_space_controller_name = space_controller_data['name']
            current_space_controller_uuid = space_controller_data['uuid']
            current_space_controller_mode = space_controller_data['mode']
            current_space_controller_state = space_controller_data['state']
            if space_controller_name and do_filter:
                if Searcher().match(current_space_controller_name, space_controller_name):
                    pass
                else:
                    do_filter = False 
            if space_controller_uuid and do_filter:
                if current_space_controller_uuid == space_controller_uuid:
                    pass
                else:
                    do_filter = False 
            if space_controller_mode and do_filter:
                if current_space_controller_mode == space_controller_mode:
                    pass
                else:
                    do_filter = False 
            if space_controller_state and do_filter:
                if current_space_controller_state == space_controller_state:
                    pass
                else:
                    do_filter = False 
            if do_filter==True:
                space_controllers.append(SpaceController(space_controller_data, self.uri)) 
        self.log.info("Filtered space_controllers and returned %s object(s)" % str(len(space_controllers)))
        return space_controllers
    
    def _translate_live_activities_names_to_ids(self, live_activities):
        """
            @param live_activities: list of dictionaries containing following keys:
                { 
                "live_activity_name" : "some_name",
                "space_controller_name" : "some controller name"
                }
            @rtype: list
        """
        live_activity_ids = []
        for la_data in live_activities:
            live_activity = self.get_live_activity(la_data)
            live_activity_ids.append(live_activity.id())
        self.log.info("Translated %s live_activity_names to ids with a result of %s" % (len(live_activity_ids), live_activity_ids) )
        return live_activity_ids
 def __init__(self):
     self.log = Logger().get_logger()
Exemple #52
0
    def __init__(self,
                 generator: Union[Generator, nn.DataParallel],
                 discriminator: Union[Discriminator, nn.DataParallel],
                 training_dataset: DataLoader,
                 validation_dataset: Dataset,
                 validation_dataset_fid: DataLoader,
                 vgg16: Union[VGG16, nn.DataParallel] = VGG16(),
                 generator_optimizer: torch.optim.Optimizer = None,
                 discriminator_optimizer: torch.optim.Optimizer = None,
                 generator_loss: nn.Module = LSGANGeneratorLoss(),
                 discriminator_loss: nn.Module = LSGANDiscriminatorLoss(),
                 semantic_reconstruction_loss: nn.Module = SemanticReconstructionLoss(),
                 diversity_loss: nn.Module = DiversityLoss(),
                 save_data_path: str = 'saved_data') -> None:
        '''
        Constructor
        :param generator: (nn.Module, nn.DataParallel) Generator network
        :param discriminator: (nn.Module, nn.DataParallel) Discriminator network
        :param training_dataset: (DataLoader) Training dataset
        :param vgg16: (nn.Module, nn.DataParallel) VGG16 module
        :param generator_optimizer: (torch.optim.Optimizer) Optimizer of the generator network
        :param discriminator_optimizer: (torch.optim.Optimizer) Optimizer of the discriminator network
        :param generator_loss: (nn.Module) Generator loss function
        :param discriminator_loss: (nn.Module) Discriminator loss function
        :param semantic_reconstruction_loss: (nn.Module) Semantic reconstruction loss function
        :param diversity_loss: (nn.Module) Diversity loss function
        '''
        # Save parameters
        self.generator = generator
        self.discriminator = discriminator
        self.training_dataset = training_dataset
        self.validation_dataset = validation_dataset
        self.validation_dataset_fid = validation_dataset_fid
        self.vgg16 = vgg16
        self.generator_optimizer = generator_optimizer
        self.discriminator_optimizer = discriminator_optimizer
        self.generator_loss = generator_loss
        self.discriminator_loss = discriminator_loss
        self.semantic_reconstruction_loss = semantic_reconstruction_loss
        self.diversity_loss = diversity_loss
        self.latent_dimensions = self.generator.module.latent_dimensions \
            if isinstance(self.generator, nn.DataParallel) else self.generator.latent_dimensions
        # Init logger
        self.logger = Logger()
        # Make directories to save logs, plots and models during training
        time_and_date = str(datetime.now())
        self.path_save_models = os.path.join(save_data_path, 'models_' + time_and_date)
        if not os.path.exists(self.path_save_models):
            os.makedirs(self.path_save_models)
        self.path_save_plots = os.path.join(save_data_path, 'plots_' + time_and_date)
        if not os.path.exists(self.path_save_plots):
            os.makedirs(self.path_save_plots)
        self.path_save_metrics = os.path.join(save_data_path, 'metrics_' + time_and_date)
        if not os.path.exists(self.path_save_metrics):
            os.makedirs(self.path_save_metrics)
        # Make indexes for validation plots
        validation_plot_indexes = np.random.choice(range(len(self.validation_dataset_fid.dataset)), 49, replace=False)
        # Plot and save validation images used to plot generated images
        self.validation_images_to_plot, _, self.validation_masks = image_label_list_of_masks_collate_function(
            [self.validation_dataset_fid.dataset[index] for index in validation_plot_indexes])

        torchvision.utils.save_image(misc.normalize_0_1_batch(self.validation_images_to_plot),
                                     os.path.join(self.path_save_plots, 'validation_images.png'), nrow=7)
        # Plot masks
        torchvision.utils.save_image(self.validation_masks[0],
                                     os.path.join(self.path_save_plots, 'validation_masks.png'),
                                     nrow=7)
        # Generate latents for validation
        self.validation_latents = torch.randn(49, self.latent_dimensions, dtype=torch.float32)
        # Log hyperparameter
        self.logger.hyperparameter['generator'] = str(self.generator)
        self.logger.hyperparameter['discriminator'] = str(self.discriminator)
        self.logger.hyperparameter['vgg16'] = str(self.vgg16)
        self.logger.hyperparameter['generator_optimizer'] = str(self.generator_optimizer)
        self.logger.hyperparameter['discriminator_optimizer'] = str(self.discriminator_optimizer)
        self.logger.hyperparameter['generator_loss'] = str(self.generator_loss)
        self.logger.hyperparameter['discriminator_loss'] = str(self.discriminator_loss)
        self.logger.hyperparameter['diversity_loss'] = str(self.diversity_loss)
        self.logger.hyperparameter['discriminator_loss'] = str(self.semantic_reconstruction_loss)
Exemple #53
0
def check_app(app, force=False):
    '''
    Check application based on app name in Tapioca results
    '''

    dnscacheloaded = False
    largewarned = False

    # Get pcap file location
    if app.endswith('.pcap'):
        pcapfile = app
        if os.path.exists(pcapfile):
            sys.stdout = Logger('%s.%s' % (pcapfile, report_output))
    else:
        pcapfile = os.path.join('results', app, 'tcpdump.pcap')
        if os.path.exists(pcapfile):
            sys.stdout = Logger(os.path.join('results', app, report_output))

    if os.path.exists(pcapfile):

        pcapdir = os.path.dirname(pcapfile)
        dnspkl = os.path.join(pcapdir, '.dnsmap.pkl')

        eprint(color.bright('Checking app %s...' % color.cyan(app)))

        if os.path.exists(dnspkl) and not force:
            eprint('Loading cached DNS info...')
            with open(dnspkl, 'rb') as pklhandle:
                try:
                    net.dnsmap = pickle.load(pklhandle)
                    dnscacheloaded = True
                except:
                    pass

        if not dnscacheloaded:
            if os.path.getsize(pcapfile) > 100000000:
                # Over 100MB
                eprint(
                    color.bright(
                        color.yellow(
                            'Warning: capture size is large. Please be patient.'
                        )))
                largewarned = True

            # Get captured DNS info for IP addresses
            eprint('Getting DNS info...')
            dnspackets = pyshark.FileCapture(pcapfile,
                                             keep_packets=False,
                                             display_filter='dns')
            dnspackets.apply_on_packets(net.get_dns_info, timeout=1000)
            with open(dnspkl, 'wb') as pklhandle:
                pickle.dump(net.dnsmap,
                            pklhandle,
                            protocol=pickle.HIGHEST_PROTOCOL)

        if os.path.getsize(pcapfile) > 100000000 and not largewarned:
            # Over 100MB
            eprint(
                color.bright(
                    color.yellow(
                        'Warning: capture size is large. Please be patient.')))
            largewarned = True

        sslpackets = pyshark.FileCapture(pcapfile,
                                         keep_packets=False,
                                         display_filter='ssl')

        eprint('Getting SSL info from capture...')
        # get_indexed_ssl_info(cap)
        sslpackets.apply_on_packets(net.get_ssl_info, timeout=1000)

        dtlspackets = pyshark.FileCapture(pcapfile,
                                          keep_packets=False,
                                          display_filter='dtls')

        eprint('Getting DTLS info from capture...')
        dtlspackets.apply_on_packets(net.get_dtls_info, timeout=1000)

        # Print report
        generate_report(app, pcapfile=pcapfile)

        # Reset globals
        net.clear()
 def __init__(self):
     self.log = Logger().get_logger()
     super(Shutdownable, self).__init__()
Exemple #55
0
class Master(Communicable):
    """
    This is the main class with all the logic needed for
    high level stuff. You will typically use instance of
    Master for all your scripts.
    """
    def __init__(self,
                 host='lg-head',
                 port='8080',
                 prefix='/interactivespaces',
                 logfile_path='ispaces-client.log'):
        """
        :param host: default value is lg-head
        :param port: default value is 8080
        :param prefix: default value is /interactivespaces
        :todo: refactor filter_* methods because they're not DRY
        """
        self.host, self.port, self.prefix = host, port, prefix
        self.log = Logger(logfile_path=logfile_path).get_logger()
        self.uri = "http://%s:%s%s" % (self.host, self.port, prefix)
        super(Master, self).__init__()

    def get_activities(self, search_pattern=None):
        """
        Retrieves a list of Activity objects

        :rtype: list

        :param search_pattern: dictionary of regexps used for searching through Activities

        example regexp dict::

            {\
            "activity_name" : "regexp"\
            "activity_version" : "regexp"\
            }

        every search_pattern dictionary key may be blank/null
        """
        url = self._compose_url(class_name='Master',
                                method_name='get_activities',
                                uri=self.uri)
        self.log.info("Trying to retrieve url=%s" % url)
        response = self._api_get_json(url)
        self.log.info('Got response for "get_activities" %s ' % str(response))
        self.log.info('get_activities returned %s objects' %
                      str(len(response)))
        activities = self._filter_activities(response, search_pattern)
        return activities

    def get_activity(self, search_pattern=None):
        """
        Retrieves a list of Activity objects

        :rtype: list

        :param search_pattern: dictionary of regexps used for searching through Activities

        example regexp dict::

            {\
            "activity_name" : "regexp",\
            "activity_version" : "regexp"\
            }

        every search_pattern dictionary key may be blank/null
        """
        url = self._compose_url(class_name='Master',
                                method_name='get_activities',
                                uri=self.uri)
        self.log.info("Trying to retrieve url=%s" % url)
        response = self._api_get_json(url)
        self.log.info('Got response for "get_activities" %s ' % str(response))
        self.log.info('get_activities returned %s objects' %
                      str(len(response)))
        activity = self._filter_activities(response, search_pattern)

        return self._validate_single_getter_results(activity, Activity,
                                                    ActivityNotFoundException)

    def get_live_activities(self, search_pattern=None):
        """
        Retrieves a list of LiveActivity objects

        :rtype: list

        :param search_pattern: dictionary of regexps used for searching through LiveActivity names

        example regexp dict::

            {\
            "live_activity_name" : "regexp",\
            "space_controller_name" : "regexp"\
            }

        - each search_pattern dictionary key may be blank/null
        """
        url = self._compose_url(class_name='Master',
                                method_name='get_live_activities',
                                uri=self.uri)
        self.log.info("Trying to retrieve url=%s" % url)
        response = self._api_get_json(url)
        self.log.debug('Got response for "get_live_activities" %s ' %
                       str(response))
        self.log.info('get_live_activities returned %s objects' %
                      str(len(response)))
        live_activities = self._filter_live_activities(response,
                                                       search_pattern)
        return live_activities

    def get_live_activity(self, search_pattern=None):
        """
        Retrieves a list of LiveActivity objects

        :rtype: LiveActivity or False

        :param search_pattern: dictionary of regexps used for searching through LiveActivity names

        example regexp dict::

            {\
            "live_activity_name" : "GE ViewSync Master on Node A",\
            "space_controller_name" : "ISCtlDispAScreen00"\
            }

        each search_pattern dictionary key may be blank/null
        """
        url = self._compose_url(class_name='Master',
                                method_name='get_live_activities',
                                uri=self.uri)
        self.log.info("Trying to retrieve url=%s" % url)
        response = self._api_get_json(url)
        self.log.debug('Got response for "get_live_activities" %s ' %
                       str(response))
        self.log.info('get_live_activities returned %s objects' %
                      str(len(response)))
        live_activity = self._filter_live_activities(response, search_pattern)

        return self._validate_single_getter_results(
            live_activity, LiveActivity, LiveActivityNotFoundException)

    def get_live_activity_groups(self, search_pattern=None):
        """
        Retrieves a list of live activity groups.

        :rtype: list

        :param search_pattern: dictionary of regexps used for searching through LiveActivity names

        example regexp dict::

            {\
            "live_activity_group_name" : "regexp"\
            }

        """
        url = self._compose_url(class_name='Master',
                                method_name='get_live_activity_groups',
                                uri=self.uri)
        self.log.info("Trying to retrieve url=%s" % url)
        response = self._api_get_json(url)
        self.log.debug('Got response for "get_live_activity_groups" %s ' %
                       str(response))
        self.log.info(
            'get_live_activity_groups returned %s objects - filtering with %s'
            % (str(len(response)), search_pattern))
        live_activity_groups = self._filter_live_activity_groups(
            response, search_pattern)
        return live_activity_groups

    def get_live_activity_group(self, search_pattern=None):
        """
        Retrieves a list of live activity groups.

        :rtype: list

        :param search_pattern: dictionary of regexps used for searching through LiveActivity names

        example regexp dict::

            {\
            "live_activity_group_name" : "regexp"\
            }

        """
        url = self._compose_url(class_name='Master',
                                method_name='get_live_activity_groups',
                                uri=self.uri)
        self.log.info("Trying to retrieve url=%s" % url)
        response = self._api_get_json(url)
        self.log.debug('Got response for "get_live_activity_groups" %s ' %
                       str(response))
        self.log.info('get_live_activity_groups returned %s objects' %
                      str(len(response)))
        live_activity_group = self._filter_live_activity_groups(
            response, search_pattern)

        return self._validate_single_getter_results(
            live_activity_group, LiveActivityGroup,
            LiveActivityGroupNotFoundException)

    def get_spaces(self, search_pattern=None):
        """
        Retrieves a list of live activity groups.

        :rtype: list

        :param search_pattern: dictionary containing space name regexp

        example regexp dict::

            {"space_name" : "regexp"}

        """
        url = self._compose_url(class_name='Master',
                                method_name='get_spaces',
                                uri=self.uri)
        self.log.info("Trying to retrieve url=%s" % url)
        response = self._api_get_json(url)
        self.log.debug('Got response for "get_spaces" %s ' % str(response))
        spaces = self._filter_spaces(response, search_pattern)
        return spaces

    def get_space(self, search_pattern=None):
        """
        Retrieves a Space

        :rtype: Space

        :param search_pattern: dictionary containing space name regexp

        example regexp dict::

            {"space_name" : "regexp"}

        """
        url = self._compose_url(class_name='Master',
                                method_name='get_spaces',
                                uri=self.uri)
        self.log.info("Trying to retrieve url=%s" % url)
        response = self._api_get_json(url)
        self.log.debug('Got response for "get_spaces" %s ' % str(response))
        space = self._filter_spaces(response, search_pattern)

        return self._validate_single_getter_results(space, Space,
                                                    SpaceNotFoundException)

    def get_space_controllers(self, search_pattern=None):
        """
        Retrieves a list of live space controllers.

        :rtype: list

        :param search_pattern: dictionary containing regexps and strings

        example regexp dict::

            {\
            "state" : "STRING",\
            "mode" : "STRING",\
            "name" : "regexp",\
            "uuid" : "STRING"\
            }

        """
        url = self._compose_url(class_name='Master',
                                method_name='get_space_controllers',
                                uri=self.uri)
        self.log.info("Trying to retrieve url=%s" % url)
        response = self._api_get_json(url)
        self.log.debug('Got response for "get_controllers" %s ' %
                       str(response))
        space_controllers = self._filter_space_controllers(
            response, search_pattern)
        return space_controllers

    def get_space_controller(self, search_pattern=None):
        """
        Retrieves a list of live space controllers.

        :rtype: SpaceController

        :param search_pattern: dictionary containing regexps and strings

        example regexp dict::

            {\
            "space_controller_state" : "STRING",\
            "space_controller_mode" : "STRING",\
            "space_controller_name" : "regexp",\
            "space_controller_uuid" : "STRING"\
            }

        """
        url = self._compose_url(class_name='Master',
                                method_name='get_space_controllers',
                                uri=self.uri)
        self.log.info("Trying to retrieve url=%s" % url)
        response = self._api_get_json(url)
        self.log.debug('Got response for "get_controllers" %s ' %
                       str(response))
        space_controller = self._filter_space_controllers(
            response, search_pattern)
        return self._validate_single_getter_results(
            space_controller, SpaceController, ControllerNotFoundException)

    def get_named_scripts(self, pattern=None):
        """Retrieves a list of named scripts."""
        raise NotImplementedError

    def new_live_activity(self, constructor_args):
        """
        Creates a new live activity and returns it
        :param constructor_args: dictionary containing all of below keys::

            {"live_activity_name": "string containing name of a new live activity (mandatory)",\
            "live_activity_description" : "string containing description",\
            "activity_name" : "string containing activity name",\
            "space_controller_name" : "string containing controller name"}

        :rtype: LiveActivity
        """

        unpacked_arguments = {}
        unpacked_arguments['activityId'] = self.get_activity({
            "activity_name":
            constructor_args['activity_name']
        }).id()
        unpacked_arguments['controllerId'] = self.get_space_controller({
            "space_controller_name":
            constructor_args['space_controller_name']
        }).id()
        unpacked_arguments['liveActivity.description'] = constructor_args[
            'live_activity_description']
        unpacked_arguments['liveActivity.name'] = constructor_args[
            'live_activity_name']
        unpacked_arguments['_eventId_save'] = 'Save'

        if not self._api_object_exists(LiveActivity, constructor_args,
                                       self.get_live_activity):
            activity = LiveActivity().new(self.uri, unpacked_arguments)
            self.log.info("Master:new_live_activity returned activity:%s" %
                          activity)
            return activity
        else:
            return []

    def new_activity(self, constructor_args):
        """
        Creates a new activity and returns it
        :param constructor_args: dictionary containing all of below keys::
            {\
            "zip_file_handler": "zipfile object (mandatory)",\
            "activity_name" : "some name",\
            "activity_version": "some version"\
            }
        :rtype: Activity or False
        """
        self.log.info("Going to create new activity with arguments: %s" %
                      constructor_args)

        if not self._api_object_exists(Activity, constructor_args,
                                       self.get_activity):
            activity = Activity().new(self.uri, constructor_args)
            self.log.info("Master:new_activity returned activity:%s" %
                          activity)
            return activity
        else:
            return []

    def new_space_controller(self, constructor_args):
        """
        Creates new controller
        :param constructor_args: dictionary containing all of below keys::
            {\
            "space_controller_name" : "mandatory string",\
            "space_controller_description" : "non mandatory string",\
            "space_controller_host_id" : "mandatory string"\
            }
        """
        if not self._api_object_exists(SpaceController, constructor_args,
                                       self.get_space_controller):
            space_controller = SpaceController().new(self.uri,
                                                     constructor_args)
            self.log.info("Master:new_space_controller:%s" % space_controller)
            return space_controller
        else:
            return []

    def new_live_activity_group(self, constructor_args):
        """
        Creates a new live activity group.
        :param constructor_args: dictionary with following structure::
            {\
            "live_activity_group_name" : "example.py live_activity_group_name",\
            "live_activity_group_description" : "created by example.py",\
            "live_activities" : [{"live_activity_name" : "SV Master on Node A",\
            "space_controller_name" : "ISCtlDispAScreen00"},\
            {"live_activity_name" : "SV Slave 01 on Node A",\
            "space_controller_name" : "ISCtlDispAScreen00"}]\
            }
        """
        self.log.info(
            "Live activities that will comprise new live activity group: %s" %
            constructor_args['live_activities'])
        live_activity_ids = self._translate_live_activities_names_to_ids(
            constructor_args['live_activities'])
        unpacked_arguments = {}
        unpacked_arguments['liveActivityGroup.name'] = constructor_args[
            'live_activity_group_name']
        unpacked_arguments['liveActivityGroup.description'] = constructor_args[
            'live_activity_group_description']
        unpacked_arguments['_eventId_save'] = 'Save'
        unpacked_arguments['liveActivityIds'] = live_activity_ids

        if not self._api_object_exists(LiveActivityGroup, constructor_args,
                                       self.get_live_activity_group):
            live_activity_group = LiveActivityGroup().new(
                self.uri, unpacked_arguments)
            self.log.info("Master:new_live_activity_group:%s" %
                          live_activity_group)
            return live_activity_group
        else:
            return []

    def new_space(self, constructor_args):
        """
        Creates a new Space.
        :param constructor_args: dictionary with following structure::
            {\
            "space_name" : "example.py live_activity_group_name",\
            "space_description" : "created by example.py",\
            "live_activity_groups" : [{"live_activity_group_name" : "Media Services"}]\
            }

        """
        live_activity_group_ids = self._translate_live_activity_groups_names_to_ids(
            constructor_args['live_activity_groups'])
        unpacked_arguments = {}
        unpacked_arguments['space.name'] = constructor_args['space_name']
        unpacked_arguments['space.description'] = constructor_args[
            'space_description']
        unpacked_arguments['_eventId_save'] = 'Save'
        unpacked_arguments['liveActivityGroupIds'] = live_activity_group_ids
        if not self._api_object_exists(Space, constructor_args,
                                       self.get_space):
            space = Space().new(self.uri, unpacked_arguments)
            self.log.info("Master:new_space:%s" % space)
            return space
        else:
            return []

    def new_named_script(self,
                         name,
                         description,
                         language,
                         content,
                         scheduled=None):
        """Creates a new named script."""
        raise NotImplementedError

    """ Private methods below """

    def _translate_live_activity_groups_names_to_ids(self,
                                                     live_activity_groups):
        """
        Converts live activity groups dicts to list of ids

        :param live_activities: list of dictionaries containing following keys::

            {\
            "live_activity_group_name" : "some_name",\
            }

        :rtype: list
        """
        live_activity_groups_ids = []
        for lag_data in live_activity_groups:
            live_activity_group = self.get_live_activity_group(lag_data)
            live_activity_groups_ids.append(live_activity_group.id())
        self.log.info(
            "Translated %s live_activity_groups_names to ids with a result of %s"
            % (len(live_activity_groups_ids), live_activity_groups_ids))
        return live_activity_groups_ids

    def _translate_live_activities_names_to_ids(self, live_activities):
        """
        Converts live activities dicts to list of ids

        :param live_activities: list of dictionaries containing following keys::

            {\
            "live_activity_name" : "some_name",\
            "space_controller_name" : "some controller name"\
            }

        :rtype: list
        """
        live_activity_ids = []
        for la_data in live_activities:
            self.log.info("Getting Live Activity for data: %s" % la_data)
            live_activity = self.get_live_activity(la_data)
            live_activity_ids.append(live_activity.id())
        self.log.info(
            "Translated %s live_activity_names to ids with a result of %s" %
            (len(live_activity_ids), live_activity_ids))
        return live_activity_ids

    """ Private methods below """

    def _api_object_exists(self, object_type, constructor_args, getter_method):
        self.log.info(
            "Checking whether object %s with following attributes %s exists in the API"
            % (object_type, constructor_args))

        api_object = getter_method(constructor_args)

        if api_object:
            self.log.warn("Object already exists: %s" % api_object)
            return True
        else:
            self.log.info("Object does not exist yet")
            return False

    def _validate_single_getter_results(self, response, expected_type,
                                        exception):
        """
        Validates response from the API. Runs type and other simple checks.

        :param response: list of objects returned from api

        :param expected_type: expected type of the object

        :param exception: exception to throw if response is invalid

        :rtype: interactivespaces object
        """

        if len(response) > 1:
            raise exception("API query returned more than one row")
        elif len(response) == 0:
            return None
        elif isinstance(response[0], expected_type):
            try:
                api_object = response[0].fetch()
                self.log.info("Getter method returned Object:%s" %
                              str(api_object))
                return api_object
            except Exception, e:
                raise
        else:
 def __init__(self):
     self.log = Logger().get_logger()
     super(Startupable, self).__init__()
class Activity(Fetchable, Deletable):
    """ 
        @summary: Should be responsible for managing a single activity
    """
    def __init__(self, data_hash=None, uri=None, activity_archive_uri=None, name=None):
        self.log = Logger().get_logger()
        super(Activity, self).__init__()
        
        if (data_hash==None and uri==None):
            self.log.info("No data provided - assuming creation of new Activity")
        elif (data_hash!=None and uri!=None):
            self.data_hash = data_hash
            self.uri = uri
            self.absolute_url = self._get_absolute_url()
            self.log.info("Instantiated Activity object with url=%s" % self.absolute_url)
        
    def __repr__(self):
        return str(self.data_hash)
    
    def new(self, uri, constructor_args):
        """
            @summary: method to keep naming convention of .new() methods
        """
        
        new_activity = self.upload(uri, constructor_args['zip_file_handler'])
        return new_activity
    
    def upload(self, uri, zip_file_handler):
        """ 
            @summary: Should make a deployment of the activity with followin steps:
                - receive handler to a local zipfile
                - upload it to the API  
                - save
                - set instance variables for the object
            @return: False or URL to a new Activity
            @param uri: stirng
            @param zip_file_handler: 'file' class instance
            @rtype: new Activity object or False
        """
        self.log.info("Uploading new Activity from file %s" % zip_file_handler)
        route = Path().get_route_for('Activity', 'upload')
        url = "%s%s" % (uri, route)
        payload = {"_eventId_save" : "Save"}
        request_response = self._api_post_json(url, payload, zip_file_handler)
        
        if request_response.url:
            self.absolute_url = request_response.url.replace("view.html", "view.json")
            self.fetch()
            self.log.info("Created new Activity with url=%s, data_hash is now %s" % (self.absolute_url, self.data_hash))
            return self
        else:
            self.log.info("Created new Activity %s but returned False" % self)
            return False
    
        
    def to_json(self):
        """ 
            Should selected attributes in json form defined by the template
        """
        self.serializer = ActivitySerializer(self.data_hash)
        return self.serializer.to_json()
    
    def fetch(self):
        """ Should retrieve data from Master API"""
        self.data_hash = self._refresh_object(self.absolute_url)
    
    def name(self):
        """ Should return live activity name"""
        return self.data_hash['activity']['name']  
    
    def identifying_name(self):
        """ Should return identifying name """
        return self.data_hash['activity']['identifyingName']
    
    def version(self):
        """ Should return Activity version """
        return self.data_hash['activity']['version']
    
    def id(self):
        """ Should return Activity id """
        return self.data_hash['activity']['id']
  
    def description(self):
        """ Should return Activity description """
        return self.data_hash['activity']['description']
    
    """ Private methods below"""
    
    def _get_absolute_url(self):
        activity_id = self.data_hash['id']
        url = "%s/activity/%s/view.json" % (self.uri, activity_id)
        return url  
 def __init__(self):
     self.log = Logger().get_logger()
     super(Configable, self).__init__()
 def __init__(self,
              generator: Union[Generator, nn.DataParallel],
              discriminator: Union[Discriminator, nn.DataParallel],
              training_dataset: DataLoader,
              validation_dataset: DataLoader,
              vgg16: Union[VGG16, nn.DataParallel] = VGG16(),
              generator_optimizer: torch.optim.Optimizer = None,
              discriminator_optimizer: torch.optim.Optimizer = None,
              generator_loss: nn.Module = LSGANGeneratorLoss(),
              discriminator_loss: nn.Module = LSGANDiscriminatorLoss(),
              semantic_reconstruction_loss: nn.Module = SemanticReconstructionLoss(),
              diversity_loss: nn.Module = DiversityLoss(),
              save_data_path: str = 'saved_data') -> None:
     '''
     Constructor
     :param generator: (nn.Module, nn.DataParallel) Generator network
     :param discriminator: (nn.Module, nn.DataParallel) Discriminator network
     :param training_dataset: (DataLoader) Training dataset
     :param vgg16: (nn.Module, nn.DataParallel) VGG16 module
     :param generator_optimizer: (torch.optim.Optimizer) Optimizer of the generator network
     :param discriminator_optimizer: (torch.optim.Optimizer) Optimizer of the discriminator network
     :param generator_loss: (nn.Module) Generator loss function
     :param discriminator_loss: (nn.Module) Discriminator loss function
     :param semantic_reconstruction_loss: (nn.Module) Semantic reconstruction loss function
     :param diversity_loss: (nn.Module) Diversity loss function
     '''
     # Save parameters
     self.generator = generator
     self.discriminator = discriminator
     self.training_dataset = training_dataset
     self.validation_dataset_fid = validation_dataset
     self.vgg16 = vgg16
     self.generator_optimizer = generator_optimizer
     self.discriminator_optimizer = discriminator_optimizer
     self.generator_loss = generator_loss
     self.discriminator_loss = discriminator_loss
     self.semantic_reconstruction_loss = semantic_reconstruction_loss
     self.diversity_loss = diversity_loss
     self.latent_dimensions = self.generator.module.latent_dimensions \
         if isinstance(self.generator, nn.DataParallel) else self.generator.latent_dimensions
     # Calc no gradients for weights of vgg16
     for parameter in self.vgg16.parameters():
         parameter.requires_grad = False
     # Init logger
     self.logger = Logger()
     # Make directories to save logs, plots and models during training
     time_and_date = str(datetime.now())
     self.path_save_models = os.path.join(save_data_path, 'models_' + time_and_date)
     if not os.path.exists(self.path_save_models):
         os.makedirs(self.path_save_models)
     self.path_save_plots = os.path.join(save_data_path, 'plots_' + time_and_date)
     if not os.path.exists(self.path_save_plots):
         os.makedirs(self.path_save_plots)
     self.path_save_metrics = os.path.join(save_data_path, 'metrics_' + time_and_date)
     if not os.path.exists(self.path_save_metrics):
         os.makedirs(self.path_save_metrics)
     # Log hyperparameter
     self.logger.hyperparameter['generator'] = str(self.generator)
     self.logger.hyperparameter['discriminator'] = str(self.discriminator)
     self.logger.hyperparameter['vgg16'] = str(self.vgg16)
     self.logger.hyperparameter['generator_optimizer'] = str(self.generator_optimizer)
     self.logger.hyperparameter['discriminator_optimizer'] = str(self.discriminator_optimizer)
     self.logger.hyperparameter['generator_loss'] = str(self.generator_loss)
     self.logger.hyperparameter['discriminator_loss'] = str(self.discriminator_loss)
     self.logger.hyperparameter['diversity_loss'] = str(self.diversity_loss)
     self.logger.hyperparameter['discriminator_loss'] = str(self.semantic_reconstruction_loss)
Exemple #60
0
class SpaceController(Fetchable, Statusable, Deletable, Shutdownable,
                      Connectable):
    """
         Should be responsible for managing live activity groups
    """
    def __init__(self, data_hash=None, uri=None):
        self.log = Logger().get_logger()
        self.class_name = self.__class__.__name__
        super(SpaceController, self).__init__()
        if data_hash == None and uri == None:
            self.log.info(
                "No data provided - assuming creation of new LiveActivity")
        else:
            self.data_hash = data_hash
            self.uri = uri
            self.absolute_url = self._get_absolute_url()
            self.log.info("Instantiated Activity object with url=%s" %
                          self.absolute_url)

    def __repr__(self):
        return str(self.data_hash)

    def new(self, uri, constructor_args):
        """
        Used to create new space controller through API and set the "uri" so that we
        can operate on this instance of SpaceController right away after .new() returns True

        :param constructor_args: dictionary with following structure::

            {"space_controller_name" : "mandatory string",\
            "space_controller_description" : "non mandatory string",\
            "space_controller_host_id" : "mandatory string"}

        :param uri: "http://some_server/prefix" (passed by master)

        :rtype: new SpaceController object or False

        """

        unpacked_arguments = {}
        unpacked_arguments['name'] = constructor_args['space_controller_name']
        unpacked_arguments['description'] = constructor_args[
            'space_controller_description']
        unpacked_arguments['hostId'] = constructor_args[
            'space_controller_host_id']
        unpacked_arguments['_eventId_save'] = 'Save'

        self.log.info("Creating new SpaceController with arguments: %s" %
                      unpacked_arguments)
        route = Path().get_route_for('SpaceController', 'new')
        url = "%s%s" % (uri, route)
        request_response = self._api_post_json(url, unpacked_arguments)

        if request_response.url:
            self.absolute_url = request_response.url.replace(
                "view.html", "view.json")
            self.fetch()
            self.log.info(
                "Created new SpaceController with url=%s, data_hash is now %s"
                % (self.absolute_url, self.data_hash))
            return self
        else:
            self.log.info("Created new SpaceController %s but returned False" %
                          self)
            return False

    def to_json(self):
        """
        Should selected attributes in json form defined by the template
        """
        self.serializer = SpaceControllerSerializer(self.data_hash)
        return self.serializer.to_json()

    def id(self):
        return self.data_hash['id']

    def uuid(self):
        return self.data_hash['uuid']

    def name(self):
        """  Should return space controller name"""
        return self.data_hash['name']

    def description(self):
        """  Should return space controller description """
        return self.data_hash['description']

    def mode(self):
        """  Should return status of the controller  """
        return self.data_hash['mode']

    def state(self):
        """ Should return state of the controller """
        return self.data_hash['state']

    """ Private methods below """

    def _get_absolute_url(self):
        live_activity_group_id = self.data_hash['id']
        url = "%s/spacecontroller/%s/view.json" % (self.uri,
                                                   live_activity_group_id)
        return url