def getMachineId(self):

        WINDOW = xbmcgui.Window(10000)

        clientId = WINDOW.getProperty("client_id")
        if clientId != None and clientId != "":
            return clientId

        # we need to load and or generate a client machine id
        __addon__ = self.addon
        __addondir__ = xbmc.translatePath(__addon__.getAddonInfo('path'))
        machine_guid_lock_path = os.path.join(__addondir__,
                                              "machine_guid.lock")
        machine_guid_path = os.path.join(__addondir__, "machine_guid")
        clientId = ""

        try:
            lock = Lock(machine_guid_lock_path)
            locked = lock.acquire()

            if locked:

                fd = os.open(machine_guid_path, os.O_CREAT | os.O_RDWR)
                clientId = os.read(fd, 256)

                if len(clientId) == 0:
                    uuid = uuid4()
                    clientId = str("%012X" % uuid)
                    self.logMsg("ClientId saved to FILE: %s" % clientId, 2)
                    os.write(fd, clientId)
                    os.fsync(fd)

                os.close(fd)

                self.logMsg("ClientId saved to WINDOW: %s" % clientId, 1)
                WINDOW.setProperty("client_id", clientId)

        finally:
            lock.release()

        return clientId
    def getMachineId(self):

        WINDOW = xbmcgui.Window(10000)

        clientId = WINDOW.getProperty("client_id")
        if clientId != None and clientId != "":
            return clientId

        # we need to load and or generate a client machine id    
        __addon__ = self.addon
        __addondir__ = xbmc.translatePath(__addon__.getAddonInfo('path'))
        machine_guid_lock_path = os.path.join(__addondir__, "machine_guid.lock")
        machine_guid_path = os.path.join(__addondir__, "machine_guid")
        clientId = ""

        try:
            lock = Lock(machine_guid_lock_path)
            locked = lock.acquire()

            if locked:

                fd = os.open(machine_guid_path, os.O_CREAT | os.O_RDWR)
                clientId = os.read(fd, 256)

                if len(clientId) == 0:
                    uuid = uuid4()
                    clientId = str("%012X" % uuid)
                    self.logMsg("ClientId saved to FILE: %s" % clientId, 2)
                    os.write(fd, clientId)
                    os.fsync(fd)

                os.close(fd)

                self.logMsg("ClientId saved to WINDOW: %s" % clientId, 1)
                WINDOW.setProperty("client_id", clientId)

        finally:
            lock.release()

        return clientId
 def getMachineId(self):
 
     WINDOW = xbmcgui.Window( 10000 )
     
     clientId = WINDOW.getProperty("client_id")
     if(clientId != None and clientId != ""):
         return clientId
         
     # we need to load and or generate a client machine id    
     __addon__ = xbmcaddon.Addon(id='plugin.video.xbmb3c')
     __addondir__ = xbmc.translatePath( __addon__.getAddonInfo('path'))
     machine_guid_lock_path = os.path.join(__addondir__, "machine_guid.lock")
     machine_guid_path = os.path.join(__addondir__, "machine_guid")
     clientId = ""
     
     try:
         lock = Lock(machine_guid_lock_path)
         locked = lock.acquire()
         
         if(locked == True):
         
             fd = os.open(machine_guid_path, os.O_CREAT|os.O_RDWR)
             clientId = os.read(fd, 256)
             
             if(len(clientId) == 0):
                 uuid = uuid4()
                 clientId = str("%012X" % uuid)
                 xbmc.log("CLIENT_ID - > Client ID saved to FILE : " + clientId)                    
                 os.write(fd, clientId)
                 os.fsync(fd)
                 
             os.close(fd)
             
             xbmc.log("CLIENT_ID - > Client ID saved to WINDOW : " + clientId)
             WINDOW.setProperty("client_id", clientId)
              
     finally: 
         lock.release()
             
     return clientId
Example #4
0
class Scheduler(object):
    ## initial function
    def __init__(self):
        ## set priviate values
        self.config = Config(workpath)
        self.pid = os.getpid()
        self.pname = 'Scheduler.py'

        ## logger initial
        self.loggerInit()

        ## lock initial
        self.lockObj = Lock(self.pname, self.pid, self.config.LOCK_DIR,
                            self.config.LOCK_FILE, self.logger)

        ## debug output
        self.logger.debug('Scheduler Initial Start')
        self.logger.debug('[SYS_CFG_DIR][%s]' % (self.config.SYS_CFG_DIR))
        self.logger.debug('[LOCK_DIR][%s]' % (self.config.LOCK_DIR))
        self.logger.debug('[LOCK_FILE][%s]' % (self.config.LOCK_FILE))
        self.logger.debug('[LOG_DIR][%s]' % (self.config.LOG_DIR))
        self.logger.debug('[LOG_FILE][%s]' % (self.config.LOG_FILE))
        self.logger.debug('[LOG_LEVEL][%s]' % (self.config.LOG_LEVEL))
        self.logger.debug('[LOG_MAX_SIZE][%s]' % (self.config.LOG_MAX_SIZE))
        self.logger.debug('[LOG_BACKUP_COUNT][%s]' %
                          (self.config.LOG_BACKUP_COUNT))
        self.logger.debug('Scheduler Initial Done')

    ## initial logger
    def loggerInit(self):
        self.logger = logging.getLogger("Scheduler")

        try:
            log_level = getattr(logging, self.config.LOG_LEVEL)

        except BaseException:
            log_level = logging.NOTSET

        self.logger.setLevel(log_level)

        fh = RotatingFileHandler(self.config.LOG_FILE,
                                 mode='a',
                                 maxBytes=self.config.LOG_MAX_SIZE,
                                 backupCount=self.config.LOG_BACKUP_COUNT)
        fh.setLevel(log_level)

        ch = logging.StreamHandler()
        ch.setLevel(log_level)

        formatter = logging.Formatter(
            '[%(asctime)s][%(name)s][%(levelname)s] %(message)s')
        fh.setFormatter(formatter)
        ch.setFormatter(formatter)

        self.logger.addHandler(fh)
        self.logger.addHandler(ch)

        return (True)

    ## run asset function
    def run(self):
        self.logger.debug('Scheduler Start')

        ## load and run tasks
        taskObj = Task(self.logger, self.config)
        taskObj.run()

        ## release lock
        self.lockObj.release()

        return (True)
Example #5
0
#!/usr/bin/env python3

from Lock import Lock
from time import sleep

# Usage
try:
    lock = Lock("Worker")
    lock.acquire()
    print("fun1 starting")
    for loop in range(1, 5):
        print("Fun1 Working {}".format(loop))
        sleep(1)
    print("fun1 finished")
finally:
    print("Releasing Lock")
    lock.release()
Example #6
0
class Asset(object):
    ## initial function
    def __init__(self):
        ## set priviate values
        self.config = Config(workpath)
        self.pid = os.getpid()
        self.pname = 'Asset.py'

        ## logger initial
        self.loggerInit()

        ## lock initial
        self.lockObj = Lock(self.pname, self.pid, self.config.LOCK_DIR,
                            self.config.LOCK_FILE, self.logger)

        ## debug output
        self.logger.debug('Asset Initial Start')
        self.logger.debug('[SYS_CIS][%s]' % (self.config.SYS_CIS))
        self.logger.debug('[SYS_SAVE_CSV][%s]' % (self.config.SYS_SAVE_CSV))
        self.logger.debug('[SYS_CSV_DIR][%s]' % (self.config.SYS_CSV_DIR))
        self.logger.debug('[MQ_SERVERS][%s]' % (self.config.MQ_SERVERS))
        self.logger.debug('[MQ_PORT][%s]' % (self.config.MQ_PORT))
        self.logger.debug('[MQ_QUEUE][%s]' % (self.config.MQ_QUEUE))
        self.logger.debug('[SUBPROC_SCRIPTSDIR][%s]' %
                          (self.config.SUBPROC_SCRIPTSDIR))
        self.logger.debug('[SUBPROC_TIMEOUT][%s]' %
                          (self.config.SUBPROC_TIMEOUT))
        self.logger.debug('[LOCK_DIR][%s]' % (self.config.LOCK_DIR))
        self.logger.debug('[LOCK_FILE][%s]' % (self.config.LOCK_FILE))
        self.logger.debug('[LOG_DIR][%s]' % (self.config.LOG_DIR))
        self.logger.debug('[LOG_FILE][%s]' % (self.config.LOG_FILE))
        self.logger.debug('[LOG_LEVEL][%s]' % (self.config.LOG_LEVEL))
        self.logger.debug('[LOG_MAX_SIZE][%s]' % (self.config.LOG_MAX_SIZE))
        self.logger.debug('[LOG_BACKUP_COUNT][%s]' %
                          (self.config.LOG_BACKUP_COUNT))
        self.logger.debug('Asset Initial Done')

    ## initial logger
    def loggerInit(self):
        self.logger = logging.getLogger("Asset")

        try:
            log_level = getattr(logging, self.config.LOG_LEVEL)

        except BaseException:
            log_level = logging.NOTSET

        self.logger.setLevel(log_level)

        fh = RotatingFileHandler(self.config.LOG_FILE,
                                 mode='a',
                                 maxBytes=self.config.LOG_MAX_SIZE,
                                 backupCount=self.config.LOG_BACKUP_COUNT)
        fh.setLevel(log_level)

        ch = logging.StreamHandler()
        ch.setLevel(log_level)

        formatter = logging.Formatter(
            '[%(asctime)s][%(name)s][%(levelname)s] %(message)s')
        fh.setFormatter(formatter)
        ch.setFormatter(formatter)

        self.logger.addHandler(fh)
        self.logger.addHandler(ch)

        return (True)

    ## getObj from input args
    def getObj(self, module_name, class_name, *args, **kwargs):
        module_meta = __import__(module_name, globals(), locals(),
                                 [class_name])
        class_meta = getattr(module_meta, class_name)
        obj = class_meta(*args, **kwargs)

        return (obj)

    ## trans list data to dict
    def list2df(self, data):
        result = {}
        cols = data[0]
        data = data[1:]
        df = pd.DataFrame(data, columns=cols)

        return (df)

    ## save to csv file
    def saveCSV(self, ci_name, data):
        df = self.list2df(data)
        df.to_csv('{}/{}.csv'.format(self.config.SYS_CSV_DIR, ci_name),
                  index=False,
                  sep='|')
        return (True)

    ## send data to MQ
    def sendData(self):
        sendDataObj = SendData(self.logger, self.config, self.config.SYS_CIS)
        sendDataObj.run()
        return (True)

    ## run asset function
    def run(self):
        self.logger.debug('Getting Asset Data Start')

        ## auto import libs
        CIObj_dict = {}
        for l in self.config.SYS_CIS:
            CIObj_dict[l] = self.getObj(l, l, self.logger, self.config)
            self.logger.debug('[CIObj_dict][{}]'.format(l))

        ## get CIs data and save them in csv
        for ci_name in CIObj_dict:
            self.logger.debug('[call][{}]'.format(ci_name))
            csv_data = CIObj_dict[ci_name].getData()
            self.saveCSV(ci_name, csv_data)

        self.logger.debug('Getting Asset Data Done')

        ## send data to MQ
        self.sendData()

        ## release lock
        self.lockObj.release()

        return (True)
Example #7
0
class ETL(object):
    ## initial function
    def __init__(self):
        ## set priviate values
        self.config = Config(workpath)
        self.pid = os.getpid()
        self.pname = 'ETL.py'

        ## logger initial
        self.loggerInit()

        ## lock initial
        self.lockObj = Lock(
            self.pname,
            self.pid,
            self.config.LOCK_DIR,
            self.config.LOCK_FILE,
            self.logger)

        ## debug output
        self.logger.debug('ETL Initial Start')
        self.logger.debug('[SYS_BUFFER_SIZE][%s]' % (self.config.SYS_BUFFER_SIZE))
        self.logger.debug('[SYS_BUFFER_WAIT][%s]' % (self.config.SYS_BUFFER_WAIT))
        self.logger.debug('[MQ_SERVER][%s]' % (self.config.MQ_SERVER))
        self.logger.debug('[MQ_PORT][%s]' % (self.config.MQ_PORT))
        self.logger.debug('[MQ_QUEUE][%s]' % (self.config.MQ_QUEUE))
        self.logger.debug('[MARIADB_HOST][%s]' % (self.config.MARIADB_HOST))
        self.logger.debug('[MARIADB_PORT][%s]' % (self.config.MARIADB_PORT))
        self.logger.debug('[MARIADB_USER][%s]' % (self.config.MARIADB_USER))
        self.logger.debug('[MARIADB_PASSWORD][%s]' % (self.config.MARIADB_PASSWORD))
        self.logger.debug('[MARIADB_DATABASE][%s]' % (self.config.MARIADB_DATABASE))
        self.logger.debug('[LOCK_DIR][%s]' % (self.config.LOCK_DIR))
        self.logger.debug('[LOCK_FILE][%s]' % (self.config.LOCK_FILE))
        self.logger.debug('[LOG_DIR][%s]' % (self.config.LOG_DIR))
        self.logger.debug('[LOG_FILE][%s]' % (self.config.LOG_FILE))
        self.logger.debug('[LOG_LEVEL][%s]' % (self.config.LOG_LEVEL))
        self.logger.debug('[LOG_MAX_SIZE][%s]' % (self.config.LOG_MAX_SIZE))
        self.logger.debug(
            '[LOG_BACKUP_COUNT][%s]' %
            (self.config.LOG_BACKUP_COUNT))
        self.logger.debug('ETL Initial Done')

    ## initial logger
    def loggerInit(self):
        self.logger = logging.getLogger("ETL")

        try:
            log_level = getattr(logging, self.config.LOG_LEVEL)

        except BaseException:
            log_level = logging.NOTSET

        self.logger.setLevel(log_level)

        fh = RotatingFileHandler(
            self.config.LOG_FILE,
            mode='a',
            maxBytes=self.config.LOG_MAX_SIZE,
            backupCount=self.config.LOG_BACKUP_COUNT)
        fh.setLevel(log_level)

        ch = logging.StreamHandler()
        ch.setLevel(log_level)

        formatter = logging.Formatter(
            '[%(asctime)s][%(name)s][%(levelname)s] %(message)s')
        fh.setFormatter(formatter)
        ch.setFormatter(formatter)

        self.logger.addHandler(fh)
        self.logger.addHandler(ch)

        return(True)

    ## run asset function
    def run(self):
        ## get data from RabbitMQ
        getDataObj = Connector(self.logger, self.config)
        data = getDataObj.run()

        ## release lock
        self.lockObj.release()

        return(True)