def _request(self, socktype, sockopt={}): socket = self.socket(socktype, sockopt) socket.connect(self._broker.requestAddr) sockware = Note({ 'socket':socket, 'address':self._broker.requestAddr}) return sockware
def makeSock(self, socktype, sockopt={}): socket = self.socket(socktype, sockopt) socket.connect(self.responseAddr) sockware = Note({ 'socket': socket, 'address': self.responseAddr}) return sockware
def prepareDownload(self): # the itemKey context is opposite, to check if the applied category # exists in registered consumerCategories jpacket = {'eventKey': f'REPO|{self.jobId}', 'itemKey': 'csvToJson'} repo = self.query(Note(jpacket)) apiBase = self._leveldb['apiBase'] sysPath = f'{apiBase}/{repo.sysPath}' if not os.path.exists(sysPath): errmsg = f'output repo path does not exist : {sysPath}' raise TaskError(errmsg) catPath = self.jmeta.category if catPath not in repo.consumerCategories: errmsg = 'consumer category branch %s does not exist in %s' \ % (catPath, str(repo.consumerCategories)) raise TaskError(errmsg) repoPath = f'{sysPath}/{catPath}/{self.jobId}' logger.info('output json file repo path : ' + repoPath) try: self.sysCmd(['mkdir', '-p', repoPath]) except TaskError as ex: logger.error('output repo path creation failed') raise csvGZipfile = f'{self.jobId}.{self.jmeta.fileExt}' logger.info('output csv tar gzipfile : ' + csvGZipfile) dbKey = f'{self.jobId}|datastream|workspace' self._leveldb[dbKey] = repoPath dbKey = f'{self.jobId}|datastream|outfile' self._leveldb[dbKey] = csvGZipfile
def get(self, socktype, sockAddr, sockopt={}): socket = self.socket(socktype, sockopt) socket.connect(sockAddr) sockware = Note({ 'socket':socket, 'address':sockAddr}) return sockware
def run(self, jobMeta): gitUser, owner, product, releaseTag = self.releaseInfo logger.info(f'Code deployment, using release package : {gitUser} {owner} {product} {releaseTag}') archivePath = f'{self.apiBase}/project/archive/{owner}-{releaseTag}' self.method = 'copy' if os.path.exists(archivePath) else 'extract' self.deploy() self.loadMeta(Note(jobMeta))
def downloadJsonFile(self): # the itemKey context is opposite, to check if the applied category # exists in registered consumerCategories jpacket = {'eventKey': f'REPO|{self.jobId}', 'itemKey': 'jsonToCsv'} repo = self.query(Note(jpacket)) apiBase = self._leveldb['apiBase'] sysPath = f'{apiBase}/{repo.sysPath}' if not os.path.exists(sysPath): errmsg = f'output repo path does not exist : {sysPath}' raise TaskError(errmsg) catPath = self.jmeta.category if catPath not in repo.consumerCategories: errmsg = 'consumer category branch %s does not exist in %s' \ % (catPath, str(repo.consumerCategories)) raise TaskError(errmsg) repoPath = f'{sysPath}/{catPath}' logger.info('output json gzipfile repo path : ' + repoPath) jsonZipfile = f'{self.jobId}.{self.jmeta.fileExt}' logger.info('output json gzipfile : ' + jsonZipfile) dbKey = f'{self.jobId}|datastream|workspace' self._leveldb[dbKey] = repoPath dbKey = f'{self.jobId}|datastream|outfile' self._leveldb[dbKey] = jsonZipfile
def makeSock(self, socktype, sockopt={}): socket = self.socket(socktype, sockopt) port = socket.bind_to_random_port(self.hostAddr) sockAddr = f'{self.hostAddr}:{port}' sockware = Note({ 'socket': socket, 'address': sockAddr}) return sockware
def query(self, packet, render='Note'): result = self.select(packet) if render == 'Note': return Note(result) elif render == 'Article': return Article(result) else: return result
def extended(self): eventKey = self.dependency[-1] params = {'eventKey':eventKey,'itemKey':'dependency'} nextDep = self.query(Note(params)) logger.info(f'next dependency : {nextDep}') if nextDep: self.dependency.extend(nextDep) self.depth += 1 return nextDep != []
def __start__(cls, metaFile): if not cls._nodeTree: with open(metaFile,'r') as fhr: try: schema = Note(json.load(fhr)) provider = cls.make(schema) logger.debug(f'########## schema is loaded, maxLevel : {provider.maxLevel}') del provider except ValueError as ex: errmsg = 'json load error: ' + str(ex) raise Exception(errmsg)
def _getNode(self, nodeName, config, MemberKlass): treeNode = Note(config) moduleName = type(self).__module__ logger.debug(f'{self.name}, {nodeName} classTag : {treeNode.classTag}') try: className = 'TreeNode' + treeNode.classTag klass = getattr(sys.modules[moduleName], className) return klass.make(nodeName, config, MemberKlass) except AttributeError: errMsg = f'{nodeName} classTag {treeNode.classTag} does not exist in {__name__}' raise Exception(errMsg)
async def resume(self, result, jpacket): caller = Note(jpacket.caller) logmsg = f'resuming {caller.typeKey}.{caller.actor} with signal {result.signal} ...' logger.info(f'{self.name}, {self.jobId}, {logmsg}') rpacket = { 'jobId': caller.jobId, 'typeKey': caller.typeKey, 'caller': jpacket.actor, 'actor': caller.actor, 'fromKey': jpacket.typeKey, 'synchronous': False, 'signal': result.signal } await self.request('resume', rpacket)
async def resume(self, signal): jpacket = self.monitor.jpacket caller = Note(jpacket.caller) logmsg = f'resuming {caller.typeKey}.{caller.actor} with signal {signal} ...' logger.info(f'{self.name}, {self.jobId}, {logmsg}') rpacket = { 'jobId': caller.jobId, 'typeKey': caller.typeKey, 'caller': jpacket.actor, 'actor': caller.actor, 'fromKey': jpacket.typeKey, 'synchronous': False, 'signal': signal } connector = ApiRequest.connector(caller.jobId) await self.request('resume', rpacket, connector)
def evalSysStatus(self): jpacket = {'eventKey': f'REPO|{self.jobId}', 'itemKey': 'csvToJson'} repo = self.query(Note(jpacket)) apiBase = self._leveldb['apiBase'] sysPath = f'{apiBase}/{repo.sysPath}' if not os.path.exists(sysPath): errmsg = f'xform input path does not exist : {sysPath}' raise TaskError(errmsg) catPath = self.jmeta.category if catPath not in repo.consumerCategories: errmsg = 'consumer category branch %s does not exist under %s' \ % (catPath, str(repo.consumerCategories)) raise TaskError(errmsg) repoPath = f'{sysPath}/{catPath}' logger.info('input zipfile repo path : ' + repoPath) inputZipFile = f'{self.jobId}.{self.jmeta.fileExt}' logger.info('input zipfile : ' + inputZipFile) zipFilePath = f'{repoPath}/{inputZipFile}' if not os.path.exists(zipFilePath): errmsg = 'xform input zipfile does not exist in source repo' raise TaskError(errmsg) workbase = f'{apiBase}/{self.jmeta.workspace}' if not os.path.exists(workbase): errmsg = f'xform workspace path does not exist : {workbase}' raise TaskError(errmsg) tsXref = datetime.now().strftime('%y%m%d%H%M%S') workspace = f'{workbase}/{tsXref}' logger.info('session workspace : ' + workspace) logger.info('creating session workspace ... ') try: cmdArgs = ['mkdir', '-p', workspace] self.sysCmd(cmdArgs) except TaskError as ex: logger.error(f'{self.jobId}, workspace creation failed') raise try: self.sysCmd(['cp', zipFilePath, workspace]) except TaskError as ex: logger.error(f'zipfile copy to workspace failed : {zipFilePath}') raise try: cmdArgs = ['tar', '-xzf', inputZipFile] self.sysCmd(cmdArgs, cwd=workspace) except TaskError as ex: logger.error(f'{inputZipFile}, gunzip tar extract command failed') raise # put workspace path in storage for micro-service access dbKey = f'{self.jobId}|workspace' self._leveldb[dbKey] = workspace self.workspace = workspace
def evalSysStatus(self): jpacket = {'eventKey':f'REPO|{self.jobId}','itemKey':'jsonToCsv'} repo = self.query(Note(jpacket)) apiBase = self._leveldb['apiBase'] sysPath = f'{apiBase}/{repo.sysPath}' if not os.path.exists(sysPath): errmsg = f'xform input path does not exist : {sysPath}' raise TaskError(errmsg) catPath = self.jmeta.category if catPath not in repo.consumerCategories: errmsg = 'consumer category branch %s does not exist in %s' \ % (catPath, str(repo.consumerCategories)) raise TaskError(errmsg) repoPath = f'{sysPath}/{catPath}' logger.info('json input file repo path : ' + repoPath) inputJsonFile = f'{self.jobId}.{self.jmeta.fileExt}' logger.info('json input file : ' + inputJsonFile) jsonFilePath = f'{repoPath}/{inputJsonFile}' if not os.path.exists(jsonFilePath): errmsg = 'xform json input file does not exist in source repo' raise TaskError(errmsg) workbase = f'{apiBase}/{self.jmeta.workspace}' if not os.path.exists(workbase): errmsg = f'xform workspace path does not exist : {workbase}' raise TaskError(errmsg) tsXref = datetime.now().strftime('%y%m%d%H%M%S') workspace = f'{workbase}/{tsXref}' logger.info('session workspace : ' + workspace) logger.info('creating session workspace ... ') try: self.sysCmd(['mkdir','-p',workspace]) except TaskError as ex: logger.error(f'{self.jobId}, workspace creation failed') raise try: cmdArgs = ['cp',jsonFilePath,workspace] self.sysCmd(cmdArgs) except TaskError as ex: logger.error(f'copy to workspace failed : {inputJsonFile}') raise jsonFilePath = f'{workspace}/{inputJsonFile}' lineCount = getLineCount(jsonFilePath) if lineCount <= 2000: logMsg = f'file split not required, line count : {lineCount} < 2000' logger.info(f'{self.jobId}, {logMsg}') self.jobRange = 1 dbKey = f'{self.jobId}|XFORM|input|1|jsonFile' self._leveldb[dbKey] = inputJsonFile else: self.jobRange = 2 splitSize = int(math.ceil(lineCount / self.jobRange)) # round up to the nearest 50 #splitSize = int(math.ceil(splitSize / 50.0)) * 50 logger.info(f'{self.jobId}, line count, split size : {lineCount}, {splitSize}') try: splitFileName = self.jobId cmdArgs = ['split','-l',str(splitSize),inputJsonFile,splitFileName] self.sysCmd(cmdArgs,cwd=workspace) except TaskError as ex: logger.error(f'{inputJsonFile}, split command failed') raise for i in range(1, self.jobRange+1): self.putSplitFilename(i) # put workspace path in storage for micro-service access dbKey = f'{self.jobId}|workspace' self._leveldb[dbKey] = workspace self.workspace = workspace
def next(self): eventKey = self.dependency.pop() params = Note({'eventKey':eventKey}) jobMeta = self.query(params) logger.info(f'next generate meta : {jobMeta}') return JobPacket(jobMeta)
def make(cls, jobId, peerNote): metaKey = f'{peerNote.jobId}|hardhash|makeResult' result = Note(LeveldbHash.db[metaKey]) context = HHClientContext(result.hhId, HHRequest) return cls(jobId, peerNote, context)
def __init__(self): self.level = 1 self.treeLevel = Note({'curr': None, 'prev': None}) self.nodeset = {} self.maxLevel = 1