async def _PREPARE(self, jobId, taskId): self.jobId = jobId logger.info( f'{self.name}, job {jobId}, preparing {taskId} data stream ...') hardhash = HardhashContext.connector(jobId) try: dbKey = f'{jobId}|workspace' workspace = hardhash[dbKey] dbKey = f'{jobId}|datastream|infile' self.infileName = hardhash[dbKey] except KeyError as ex: errmsg = f'{jobId}, failed to get job article from datastorage' await self.sock.send_json([500, {'error': errmsg}]) raise TaskError(errmsg) logger.info( f'{self.name}, datastream workspace, infile : {workspace}, {self.infileName}' ) self.infilePath = f'{workspace}/{self.infileName}' if not os.path.exists(self.infilePath): errmsg = f'source file {self.infileName} does not exist in workspace' await self.sock.send_json([500, {'error': errmsg}]) else: await self.sock.send_json( [200, { 'status': 'ready', 'infile': f'{self.infileName}' }])
def loadMeta(self, jobMeta): hardhash = HardhashContext.connector(jobMeta.jobId) hardhash['apiBase'] = self.apiBase subPath = self.assets['subPath'] assetPath = f'{self.productPath}/{subPath}' logger.info(f'loading {jobMeta.jobId} meta assets in {assetPath}') for metaFile in self.assets['metaFiles']: metaPath = f'{assetPath}/{metaFile}' packet = FilePacket.open(metaPath) logger.info(f'{self.name}, loading item : {packet.eventKey}') hardhash[packet.eventKey] = packet.metaDoc
def make(cls, jobId): context = HardhashContext.get(jobId) subscriber = cls(jobId, context) return subscriber
def __call__(self, jobId, taskNum, *args, **kwargs): self._hh = HardhashContext.connector(contextId=jobId) self.runActor(jobId, taskNum, *args, **kwargs)
async def __call__(self, jobId, taskNum, *args, **kwargs): self._leveldb = HardhashContext.connector(contextId=jobId) await self.runActor(jobId, taskNum, *args, **kwargs) await self._subscriber.notify(taskNum, self.name)