def queryDpm(args, config):
    """ Download the policies from the DB and execute the related B2SAFE workflow

    @type  args:       list of objects
    @param args:       The list of input parameters
    @type  config:     ConfigLoader object
    @param config:     It contains the configuration parameters
    """
    debug = args.verbose
    # manage the checksum verification
    chk_veri = False
    chk_verify = config.SectionMap('Integrity')['checksum_verify']
    if chk_verify.lower() == 'true':
        chk_veri = True
    # load the policy schema path/url
    policySchemaUrl = None
    policySchema = config.SectionMap('Schemas')['policies']
    if policySchema.startswith('http://'):
        policySchemaUrl = policySchema
    # get the list of policies matching the input criteria
    conn = ServerConnector(args.config, args.test, "PolicyManager", debug)
    policy_files = getInfoPolicies(args,logger)
    if policy_files is not None:
        for url in policy_files:
            logger.info('Processing policy: %s', url)
            # load the XML policy schema
            pParser = PolicyParser(None, args.test, 'PolicyManager', debug)
            policySchemaDoc = pParser.parseXmlSchema([policySchemaUrl], 
                                                     [policySchema])
            if chk_veri:
                # get the id from the policy on the DB
                policyId = conn.getDocumentByUrl(url, '//*:policy/@uniqueid/data()')
                # get the status doc from the DB for the checksum
                status, doc = conn.getStatus(policyId)
                # parse the policy and validate against schema and checksum
                errMsg = pParser.parseFromUrl(url, policySchemaDoc, conn, 
                         status[staNs+':policy'][staNs+':checksum']['@method'],
                         status[staNs+':policy'][staNs+':checksum']['#text'])
            else:
                # parse the policy and validate against schema
                errMsg = pParser.parseFromUrl(url, policySchemaDoc, conn)
            if errMsg is not None:
                print 'ERROR: ' + errMsg
                exit(1)            
            if pParser.policy is not None:
                # load user mapping
                mapFilename = config.SectionMap('AccountMapping')['file']
                usermap = loadUserMap(mapFilename)
                # schedule the policy
                runPolicy(pParser.policy, usermap, args.test, 'PolicyManager',
                          debug)
Beispiel #2
0
    def run(self):
        print "running SCSClient"
        self.connector = ServerConnector(hostname="localhost", port=5000)
        self.db = ClientDbConnector("../client.db")

        if not os.path.exists(self.path):
            self.initializeCloudStorage()
        else:
            # load the file info from the database
            infos = self.db.getFiles()
            infoMap = { info["id"]: info for info in infos }

            # print infoMap
            for info in infos:
                parts = [info["name"]]

                while info["parent"]:
                    info = infoMap[info["parent"]]
                    parts.append(info["name"])

                path = "/".join(parts)
                fullpath = os.path.join(self.path, path)
                self.tree.index(info["id"], fullpath)

        self.observeChanges()
def updatePolicyStatus(args):
    """
    Update the status of all the policies in the central DB

    @type  args: list of objects
    @param args: The list of input parameters
    @rtype:      string
    @return:     The response of the update operation on the DB  
    """
    response = None
    debug = args.verbose
    config = ConfigLoader(args.config)
    setLoggingSystem(config, debug)
    logger.info('Start to update the status of the policies')
    loggerName = 'PolicyManager'
    # update the status related to policies
    conn = ServerConnector(args.config, args.test, loggerName, debug)
    if args.id:
        response = statusManagement(args.suspended, args.rejected, args.show, 
                                    args.id, debug, logger, conn)
        print ('Updated the status of the policy {}, response: {}'
              ).format(args.id, response)
    else:
        # get the list of the policies to be updated
        policies = conn.listPolicies()
        if policies is not None:
            for url in policies:
                logger.info('Processing policy: %s', url)
                # get the policy id from the DB
                policyId = conn.getDocumentByUrl(url, '//*:policy/@uniqueid/data()')
                if policyId is not None:
                    response = statusManagement(args.suspended, args.rejected, 
                                                args.show, policyId, debug, 
                                                logger, conn)
                    print ('Updated the status of the policy {}, response: {}'
                          ).format(policyId, response)
                else:
                    logger.info('policy id not found')
                    print 'Policy with id {} not found'.format(policyId)
Beispiel #4
0
def updatePolicyStatus(args):
    """
    Update the status of all the policies in the central DB

    @type  args: list of objects
    @param args: The list of input parameters
    @rtype:      string
    @return:     The response of the update operation on the DB  
    """
    response = None
    debug = args.verbose
    config = ConfigLoader(args.config)
    setLoggingSystem(config, debug)
    st_pre = config.SectionMap('DpmServer')['status_prefix']
    loggerName = 'DBCommander'
    logger.info('Start to update the status of the policies')
    # update the status related to policies
    conn = ServerConnector(args.config, args.test, loggerName, debug)

    policies = conn.listPolicies()
    if policies is not None:
        for url in policies:
            logger.info('Search status of policy: ' + url)
            # get the policy id from the DB
            policyId = conn.getDocumentByUrl(url, '//*:policy/@uniqueid/data()')
            states = conn.getStates(policyId)
            if states is None:
                print 'Status of policy {} not found'.format(policyId)
            else:
                community_name = (url.rsplit('/', 2)[-2]).split('_',2)[2]
                # get the status doc from the DB
                dbname = st_pre + community_name
                response = conn.updateStatus(policyId, states, dbname)
                print 'Status of policy {}, update response:'.format(policyId, 
                                                                     response)
    else:
        print 'No policies to update'
Beispiel #5
0
def addPolicyStatus(args, mylogger=None):
    """ Check if a status document exist for each policy 
        and, if not, it creates a new one.

    @type  args:       list of objects
    @param args:       The list of input parameters
                       considered to search for policies 
    @type logger:      loggin.logger object
    @param logger:     the logger
    """
    debug = args.verbose
    config = ConfigLoader(args.config)
    st_pre = config.SectionMap('DpmServer')['status_prefix']
    if mylogger is None:
        logger = setLoggingSystem(config, debug)
    else:
        logger = mylogger

    logger.info('Start to list the policies')
    conn = ServerConnector(args.config, args.test, "DBCommander", debug)

    attributes = {}
    # loading the default from config
    if len(config.SectionMap('PolicyFilters')) > 0:
        logger.debug('Loading the filter parameters from the config file')
        for par in config.SectionMap('PolicyFilters'):
            attributes[par] = config.SectionMap('PolicyFilters')[par]
    # loading the filter parameters from the input
    if args.filter:
        logger.debug('Loading the filter parameters from the input')
        pairs = args.filter.split(',')
        for pair in pairs:
            try:
                key, value = pair.split(':')
            except:
                print 'wrong value [{}] as a filter'.format(str(pair))
                sys.exit(1)
            attributes[key] = value
    # filter policies according to input time interval
    if args.start is not None:
        sdate = datetime.strptime(args.start, "%d-%m-%Y %H:%M")
        start = int(time.mktime(sdate.timetuple()))
        policies = conn.listPolicies(attributes, start)
        if args.end is not None:
            edate = datetime.strptime(args.end, "%d-%m-%Y %H:%M")
            end = int(time.mktime(edate.timetuple()))
            policies = conn.listPolicies(attributes, start, end)
    elif args.end is not None:
        edate = datetime.strptime(args.end, "%d-%m-%Y %H:%M")
        end = int(time.mktime(edate.timetuple()))
        policies = conn.listPolicies(attributes, 0, end)
    else:
        policies = conn.listPolicies(attributes)

    # listing of the policies matching the criteria of the dict "attributes"
    if policies is not None:
        for url in policies:
            logger.debug('Checking status for policy with URL: ' + url)
            community_name = (url.rsplit('/', 2)[-2]).split('_',2)[2]
            uniqueid = conn.getDocumentByUrl(url, "//*:policy/@uniqueid/data()")
            # get the status doc from the DB
            dbname = st_pre + community_name
            status, doc = conn.getStatus(uniqueid, dbname)
            if status is None:
                logger.debug('Status doc not found, creating a new one')
                response = conn.createStatus(uniqueid, 'NEW', dbname)
                print 'Added new status for policy {}, response: {}'.format(
                                                                     uniqueid,
                                                                     response)
            else:
                logger.debug('Status doc already available')
                print ('Status already available, nothing to add for policy {}'
                      ).format(uniqueid)
    else:
        print 'Policies not found'
    
    return policies
def getInfoPolicies(args, mylogger=None):
    """ Download the policies from the DB and execute the related B2SAFE workflow

    @type  args:       list of objects
    @param args:       The list of input parameters
                       considered to search for policies 
    @type logger:      loggin.logger object
    @param logger:     the logger
    """
    debug = args.verbose
    config = ConfigLoader(args.config)
    st_pre = config.SectionMap('DpmServer')['status_prefix']
    if mylogger is None:
        logger = setLoggingSystem(config, debug)
    else:
        logger = mylogger
    logger.info('Start to list the policies')
    conn = ServerConnector(args.config, args.test, "PolicyManager", debug)
    # if a policy id is provided the whole policy doc is shown
    if args.subcmd == 'list' and args.id:
        logger.debug('Policy with id [{}] is downloaded'.format(args.id))
        polDict, polDoc = conn.getPolicy(args.id)
        print polDoc
        return None
    attributes = {}
    # loading the default from config
    if len(config.SectionMap('PolicyFilters')) > 0:
        logger.debug('Loading the filter parameters from the config file')
        for par in config.SectionMap('PolicyFilters'):
            attributes[par] = config.SectionMap('PolicyFilters')[par]
    # loading the filter parameters from the input
    if args.filter:
        logger.debug('Loading the filter parameters from the input')
        pairs = args.filter.split(',')
        for pair in pairs:
            try:
                key, value = pair.split(':')
            except:
                print 'wrong value [{}] as a filter'.format(str(pair))
                sys.exit(1)
            attributes[key] = value
    # filter policies according to input time interval
    if args.start is not None:
        sdate = datetime.strptime(args.start, "%d-%m-%Y %H:%M")
        start = int(time.mktime(sdate.timetuple()))
        policies = conn.listPolicies(attributes, start)
        if args.end is not None:
            edate = datetime.strptime(args.end, "%d-%m-%Y %H:%M")
            end = int(time.mktime(edate.timetuple()))
            policies = conn.listPolicies(attributes, start, end)
    elif args.end is not None:
        edate = datetime.strptime(args.end, "%d-%m-%Y %H:%M")
        end = int(time.mktime(edate.timetuple()))
        policies = conn.listPolicies(attributes, 0, end)
    else:
        policies = conn.listPolicies(attributes)
    # listing of the policies matching the criteria of the dict "attributes"
    if policies is not None:
        for url in policies:
            print url
            community_name = (url.rsplit('/', 2)[-2]).split('_',2)[2]
            if args.subcmd == 'list'and args.ext:
                # listing policies with extended attributes
                xmldoc = conn.getDocumentByUrl(url)
                pol = xmltodict.parse(xmldoc, process_namespaces=True)
                for key in pol[polNs+':policy']:
                    if isinstance(key, basestring) and key.startswith('@'):
                        print '{} = {}'.format(key[1:], 
                                               pol[polNs+':policy'][key])
                # get the status doc from the DB
                dbname = None
                if args.all:
                    dbname = st_pre + community_name
                status, doc = conn.getStatus(pol[polNs+':policy']['@uniqueid'], dbname)
                if status is None:
                    print 'status = '
                    print 'checksum = '
                else:
                    st = status[staNs+':policy'][staNs+':status']
                    if args.all:
                        print 'overall status = {}'.format(st[staNs+':overall'])
                        if (st[staNs+':details'] is not None
                            and st[staNs+':details'][staNs+':site'] is not None):
                            if isinstance(st[staNs+':details'][staNs+':site'], list):
                                for line in st[staNs+':details'][staNs+':site']:
                                    print '{: ^4}status[{}] = {}'.format('', 
                                                                  line['@name'],
                                                                  line['#text'])
                            else:
                                print '{: ^4}status[{}] = {}'.format('',
                                   st[staNs+':details'][staNs+':site']['@name'],
                                   st[staNs+':details'][staNs+':site']['#text'])
                    else:
                        print 'status = {}'.format(
                            status[staNs+':policy'][staNs+':status']
                                                   [staNs+':overall'])
                    print 'checksum = {}'.format(
                          status[staNs+':policy'][staNs+':checksum']['#text'])
            print '{: ^40}'.format('')
    else:
        print 'Nothing found'
    
    return policies
Beispiel #7
0
class SCSClient(object):
    """docstring for SCSClient"""
    def __init__(self, path):
        super(SCSClient, self).__init__()
        self.path = path

        self.tree = FileTreeMap()

    def run(self):
        print "running SCSClient"
        self.connector = ServerConnector(hostname="localhost", port=5000)
        self.db = ClientDbConnector("../client.db")

        if not os.path.exists(self.path):
            self.initializeCloudStorage()
        else:
            # load the file info from the database
            infos = self.db.getFiles()
            infoMap = { info["id"]: info for info in infos }

            # print infoMap
            for info in infos:
                parts = [info["name"]]

                while info["parent"]:
                    info = infoMap[info["parent"]]
                    parts.append(info["name"])

                path = "/".join(parts)
                fullpath = os.path.join(self.path, path)
                self.tree.index(info["id"], fullpath)

        self.observeChanges()

    def observeChanges(self):
        self.observer = SCSFileObserver(self.path, self.tree)
        self.observer.run()

    def stop(self):
        self.observer.stop()
        self.observer.join()

        print "\nstopped SCSClient"

    def downloadFile(self, id):
        r = self.connector.getFile(id, 0, True)
        data = r["response"]["data"]

        path = data["filename"]
        fullpath = os.path.join(self.path, path)

        with open(fullpath, "wb") as fh:
            fh.write(data["content"])

        # store the information in our file list
        self.tree.index(id, fullpath)


    def initializeCloudStorage(self):
        print "initializing cloud storage in '%s'" % (self.path)
        os.makedirs(self.path)

        # get all files that currently exist on the remote host
        response = self.connector.getHierarchy()
        data = response["response"]["data"]

        #store files to local db
        self.db.truncateFiles()
        self.db.addFiles(data)

        for info in data:
            self.downloadFile(info["id"])


    def sync(self):
        print "synchronizing..."
        # replay the changes on our file tree
        # to identify the exact changes
        local_changes = copy.copy(self.observer.getChanges())
        local_changes = sorted(local_changes)
        new_list = self.tree.clone()

        for timestamp, event in local_changes:
            new_list.applyFileSystemEvent(event)

        # print self.tree.files, self.tree.deleted_files
        # print new_list.files, new_list.deleted_files

        # synching
        for fileId in new_list.files:
            if fileId < 0 and not fileId in new_list.deleted_files:
                # CREATE NEW FILE
                print "New File Created: %d : %s" % (fileId, new_list.getPath(fileId))

                path = new_list.getPath(fileId)
                with open(path, "rb") as fh:
                    data = {
                        "filename": os.path.basename(path),
                        "parent": None,
                        "type": "file",
                        "hash": "abc",
                        "content": fh.read()
                    }
                    response = self.connector.pushFile(data, 0, True)
                    id = response["response"]["data"]["id"]

                    self.db.addFile(id, data["filename"], data["parent"], data["type"], data["hash"])

                    del new_list.files[fileId]
                    new_list.files[id] = path
                    new_list.path_index[path] = id



            if new_list.getPath(fileId) != self.tree.getPath(fileId):
                print "File Moved: %d : %s (from %s)" % (fileId, new_list.getPath(fileId), self.tree.getPath(fileId))

        for fileId in new_list.deleted_files:
            if fileId > 0: # only delete files that already have been synched
                print "File deleted: %d : %s" % (fileId, new_list.getPath(fileId))

        for fileId in new_list.modified_files:
            print "File modified: %d : %s" % (fileId, new_list.getPath(fileId))

            path = new_list.getPath(fileId)
            with open(path, "rb") as fh:
                data = {
                    "id": fileId,
                    "filename": os.path.basename(path),
                    "parent": None,
                    "type": "file",
                    "hash": "abc",
                    "content": fh.read()
                }

                response = self.connector.pushFile(data, 0, True)
                # todo: update db File

        new_list.modified_files.clear()
        new_list.deleted_files.clear()
        self.observer.changelog.clear()

        self.tree = new_list