def execute(self, connector, process_action): logger = Logger(connector, self) job_name = process_action['job_name'] file_repository = FileRepository(connector) process_repository = ProcessRepository(connector) process_property_repository = ProcessPropertyRepository(connector) process_action_property_repository = ProcessActionPropertyRepository(connector) # retrieve the process properties process = process_repository.find_by_id(process_action['pcs_id']) file_id = process_property_repository.get_property(process, 'file_id') message = process_property_repository.get_property(process, 'message') backtrace = process_property_repository.get_property(process, 'backtrace') file = file_repository.find_by_id(file_id) filename = file['filename'] state = process_action_property_repository.get_property(process_action, 'state') if not state: state = process_property_repository.get_property(process, 'state') if not state: state = 'ERROR_NO_STATE' file['state'] = state file['message'] = message file['backtrace'] = backtrace file_repository.state(file) logger.info(job_name, "filename: " + filename + ", state: " + file['state'])
def execute(self, connector, process_action): job_name = process_action['job_name'] logger = Logger(connector, self) chunk_repository = ChunkRepository(connector) process_repository = ProcessRepository(connector) process_property_repository = ProcessPropertyRepository(connector) process_action_property_repository = ProcessActionPropertyRepository(connector) # retrieve the chunk properties process = process_repository.find_by_id(process_action['pcs_id']) chunk_id = process_property_repository.get_property(process, 'chunk_id') message = process_property_repository.get_property(process, 'message') backtrace = process_property_repository.get_property(process, 'backtrace') chunk = chunk_repository.find_by_id(chunk_id) state = process_action_property_repository.get_property(process_action, 'state') # retrieve the datasource of the payload datasource = chunk['datasource'] payload_ds = DatasourceBuilder.find(connector, datasource) payload_c = ConnectorFactory.create_connector(payload_ds) payload_repository = PayloadRepository(payload_c) payload = json.loads(chunk['payload']) payload_repository.state(chunk, payload, state) payload_c.close() logger.info(job_name, 'job_name: ' + job_name + " state: " + chunk['state'])
def execute(self, connector, process_action): job_name = process_action['job_name'] logger = Logger(connector, self) event_repository = EventRepository(connector) process_repository = ProcessRepository(connector) process_property_repository = ProcessPropertyRepository(connector) process_action_property_repository = ProcessActionPropertyRepository( connector) # retrieve the chunk properties process = process_repository.find_by_id(process_action['pcs_id']) event_id = process_property_repository.get_property( process, 'event_id') message = process_property_repository.get_property(process, 'message') backtrace = process_property_repository.get_property( process, 'backtrace') event = event_repository.find_by_id(event_id) state = process_action_property_repository.get_property( process_action, 'state') event['state'] = state event['message'] = message event['backtrace'] = backtrace event_repository.state(event) logger.info(job_name, 'job_name: ' + job_name + " state: " + event['state'])
def execute(self, connector, process_action): logger = Logger(connector, self) job_name = process_action['job_name'] # create instances of classes file_property_repository = FilePropertyRepository(connector) file_repository = FileRepository(connector) process_repository = ProcessRepository(connector) process_property_repository = ProcessPropertyRepository(connector) process_action_property_repository = ProcessActionPropertyRepository( connector) # retrieve the required properties process = process_repository.find_by_id(process_action['pcs_id']) file_id = process_property_repository.get_property(process, 'file_id') name = process_action_property_repository.get_property( process_action, 'name') value = process_action_property_repository.get_property( process_action, 'value') # get the file using the file_id we collected file = file_repository.find_by_id(file_id) filename = file['filename'] file_property_repository.set_property(file, name, value) logger.info( job_name, "filename: " + filename + ", name: " + name + ", value: " + value)
def execute(self, connector, process_action): logger = Logger(connector, self) job_name = process_action['job_name'] process_repository = ProcessRepository(connector) process_property_repository = ProcessPropertyRepository(connector) process_action_property_repository = ProcessActionPropertyRepository( connector) # retrieve the file properties process = process_repository.find_by_id(process_action['pcs_id']) identifier = process_property_repository.get_property( process, 'identifier') datasource = process_action_property_repository.get_property( process_action, 'datasource') oracle_ds = DatasourceBuilder.find(connector, datasource) oracle_c = ConnectorFactory.create_connector(oracle_ds) scheduler = OracleScheduler(oracle_c) if scheduler.is_running(identifier): oracle_c.close() return process_action # the result should be created_at = process['created_at'] run_result = scheduler.run_result(identifier, job_name, created_at) oracle_c.close() process_property_repository.set_property(process, 'message', run_result['message']) process_property_repository.set_property(process, 'state', run_result['state']) process_property_repository.set_property(process, 'backtrace', run_result['backtrace'])
def execute(self, connector, process_action): logger = Logger(connector, self) job_name = process_action['job_name'] process_repository = ProcessRepository(connector) process_property_repository = ProcessPropertyRepository(connector) process_action_property_repository = ProcessActionPropertyRepository( connector) # retrieve the file properties process = process_repository.find_by_id(process_action['pcs_id']) # retrieve the payload if present payload = JsonExt.loads( process_property_repository.get_property(process, 'payload')) twitter_token = process_action_property_repository.get_property( process_action, 'twitter_token') twitter_token = JsonExt.loads(twitter_token) tweet_content = process_action_property_repository.get_property( process_action, 'tweet_content') tweet_content = ExpressionParser.parse(tweet_content, locals()) tweet_hashtags = process_action_property_repository.get_property( process_action, 'tweet_hashtags') tweet_hashtags = ExpressionParser.parse(tweet_hashtags, locals()) headers = json.loads( process_action_property_repository.get_property( process_action, 'headers')) certificate = JsonExt.loads( process_action_property_repository.get_property( process_action, 'certificate')) proxies = JsonExt.loads( process_action_property_repository.get_property( process_action, 'proxies')) url = process_action_property_repository.get_property( process_action, 'url') message = { 'twitter_token': twitter_token, 'tweet_content': tweet_content, 'tweet_hashtags': tweet_hashtags } logger.info(job_name, json.dumps(message)) response = RestRequest.post(headers, url, message, certificate, proxies) logger.info( job_name, "status_code : " + str(response.status_code) + ", reason : " + response.reason + ", content : " + response.content) if response.status_code != 200: raise ProcessException("status_code : " + str(response.status_code) + ", reason : " + response.reason + ", content : " + response.content)
def execute(self, connector, process_action): logger = Logger(connector, self) job_name = process_action['job_name'] properties = PropertyRepository(connector) process_repository = ProcessRepository(connector) process_property_repository = ProcessPropertyRepository(connector) process_action_property_repository = ProcessActionPropertyRepository( connector) file_repository = FileRepository(connector) process = process_repository.find_by_id(process_action['pcs_id']) file_id = process_property_repository.get_property(process, 'file_id') file = file_repository.find_by_id(file_id) tmp = properties.get_property('scanner.tmp') # retrieve the file properties pickup_location = process_property_repository.get_property( process, 'pickup_location') pickup_filename = file['filename'] pickup_path = process_property_repository.get_property(process, 'path') drop_location = process_action_property_repository.get_property( process_action, 'drop_location') logger.info( job_name, 'filename: ' + pickup_filename + ", pickup_location: " + pickup_location + ", drop_location: " + drop_location) # copy the file from pickup folder to the tmp folder. pickup_client = VfsFactory.create_client(pickup_location) pickup_client.connect() pickup_path = pickup_path + os.sep + pickup_filename tmp_path = tmp + os.sep + pickup_filename pickup_client.get(pickup_path, tmp_path) pickup_client.close() # copy the file form the tmp folder to the drop folder. drop_client = VfsFactory.create_client(drop_location) drop_client.connect() property_path = drop_client.get_path() drop_path = drop_client.get_path() + os.sep + pickup_filename drop_client.put(tmp_path, drop_path) drop_client.close() # set the process properties for the next process_action process_property_repository.set_property(process, 'pickup_location', drop_location) process_property_repository.set_property(process, 'path', property_path)
def test_call(self): paprika_ds = DatasourceBuilder.build('paprika-ds.json') connector = ConnectorFactory.create_connector(paprika_ds) job_repository = JobRepository(connector) job_name = job_repository.job() process_respository = ProcessRepository(connector) process = dict() process['job_name'] = job_name['job_name'] process['pdn_id'] = None process['state'] = None process['e_pdn_id'] = None process['name'] = None process['queue'] = None process_respository.insert(process) event_respository = EventRepository(connector) event = dict() event['job_name'] = job_name['job_name'] event['state'] = None event['repetition'] = 'DAYS' event['intermission'] = '1' event['pcs_id'] = process['id'] event_respository.insert(event) process_property_repository = ProcessPropertyRepository(connector) process_property_repository.set_property(process, 'event_id', event['id']) process_action_repository = ProcessActionRepository(connector) process_action = dict() process_action['job_name'] = job_name['job_name'] process_action['pcs_id'] = process['id'] process_action['dan_id'] = None process_action['name'] = 'copy' process_action['state'] = 'processed' process_action_repository.insert(process_action) process_action_property_repository = ProcessActionPropertyRepository( connector) process_action_property = dict() process_action_property['name'] = 'file_id' process_action_property['value'] = 1 process_action_property['pan_id'] = process_action['id'] process_action_property_repository.insert(process_action_property) copy = Copy() copy.execute(connector, process_action) connector.close()
def execute(self, connector, process_action): job_name = process_action['job_name'] logger = Logger(connector, self) process_action_property_repository = ProcessActionPropertyRepository(connector) # retrieve the properties days = process_action_property_repository.get_property(process_action, 'days') process_property_repository = ProcessPropertyRepository(connector) count = process_property_repository.clean(days) logger.info(job_name, str(count) + ' processes_properties record(s) purged.')
def action(self, connector, message): try: process = json.loads(message['payload']) process_repository = ProcessRepository(connector) process['state'] = 'PROCESSING' process['message'] = '' process['backtrace'] = '' process_repository.state(process) process_definition_repository = ProcessDefinitionRepository(connector) process_definition_action_repository = ProcessDefinitionActionRepository(connector) process_definition = process_definition_repository.find_by_id(process['pdn_id']) process_definition_action = process_definition_action_repository.find_first_by_process_definition(process_definition) process_action_repository = ProcessActionRepository(connector) process_action = dict() process_action['job_name'] = process['job_name'] process_action['pcs_id'] = process['id'] process_action['dan_id'] = process_definition_action['id'] process_action['name'] = process_definition_action['name'] process_action['state'] = 'READY' process_action = process_action_repository.insert(process_action) process_action_property_repository = ProcessActionPropertyRepository(connector) process_definition_action_property_repository = ProcessDefinitionActionPropertyRepository(connector) process_definition_action_properties = process_definition_action_property_repository.list_by_process_definition_action(process_definition_action) for process_definition_action_property in process_definition_action_properties: process_action_property_repository.set_property(process_action, process_definition_action_property['name'], process_definition_action_property['value']) payload = process_action Message.enqueue(connector, process['queue'], payload, 'message', 'paprika.consumers.ProcessAction.ProcessAction') except: process = json.loads(message['payload']) paprika_ds = DatasourceBuilder.build('paprika-ds.json') process_repository = ProcessRepository(paprika_ds) result = Traceback.build() result['id'] = process['id'] result['state'] = 'FAILED' process_repository.state(result) logger = Logger(connector, self) logger.fatal(process['job_name'], result['message'], result['backtrace'])
def execute(self, connector, process_action): logger = Logger(connector, self) job_name = process_action['job_name'] process_repository = ProcessRepository(connector) process_property_repository = ProcessPropertyRepository(connector) process_action_property_repository = ProcessActionPropertyRepository( connector) # retrieve the file properties process = process_repository.find_by_id(process_action['pcs_id']) # retrieve the payload if present payload = process_property_repository.get_property(process, 'payload') if payload: payload = json.loads(payload) datasource = process_action_property_repository.get_property( process_action, 'datasource') method_name = process_action_property_repository.get_property( process_action, 'method_name') params = process_action_property_repository.get_property( process_action, 'params') if params: params = json.loads(params) params = ExpressionParser.parse(params, locals()) oracle_ds = DatasourceBuilder.find(connector, datasource) oracle_c = ConnectorFactory.create_connector(oracle_ds) oracle_call = OracleCall(oracle_c) call = dict() call['method_name'] = method_name call['params'] = params oracle_call.execute(call) logger.info(job_name, json.dumps(call)) oracle_c.close() process_property_repository.set_property(process, 'call', json.dumps(call))
def execute(self, connector, process_action): logger = Logger(connector, self) job_name = process_action['job_name'] file_repository = FileRepository(connector) process_repository = ProcessRepository(connector) process_property_repository = ProcessPropertyRepository(connector) process_action_property_repository = ProcessActionPropertyRepository( connector) # retrieve the file properties process = process_repository.find_by_id(process_action['pcs_id']) payload = process_property_repository.get_property(process, 'payload') if payload: payload = json.loads(payload) datasource = process_action_property_repository.get_property( process_action, 'datasource') method_name = process_action_property_repository.get_property( process_action, 'method_name') params = process_action_property_repository.get_property( process_action, 'params') test_result_params = process_action_property_repository.get_property( process_action, 'test_result_params') if params: params = json.loads(params) params = ExpressionParser.parse(params, locals()) oracle_ds = DatasourceBuilder.find(connector, datasource) oracle_c = ConnectorFactory.create_connector(oracle_ds) scheduler = OracleScheduler(oracle_c) identifier = job_name + '_' + Strings.identifier(10) message = dict() message['method_name'] = method_name message['identifier'] = identifier message['params'] = params if test_result_params: message['test_result_params'] = json.loads(test_result_params) scheduler.create_job(message) logger.info(job_name, json.dumps(message)) process_property_repository.set_property(process, 'identifier', identifier) oracle_c.close()
def action(self, connector, message): try: process_action = json.loads(message['payload']) process_action_repository = ProcessActionRepository(connector) # set the state of the process action process_action['state'] = 'PROCESSING' process_action['message'] = '' process_action['backtrace'] = '' process_action_repository.state(process_action) process_action_property_repository = ProcessActionPropertyRepository( connector) action = ClassLoader.find( process_action_property_repository.get_property( process_action, 'action')) payload = action.execute(connector, process_action) if payload: process_repository = ProcessRepository(connector) process_action_property_repository = ProcessActionPropertyRepository( connector) process = process_repository.find_by_id(payload['pcs_id']) sleep = float( process_action_property_repository.get_property( process_action, 'sleep')) now = datetime.now() delay = now + timedelta(seconds=int(sleep)) delay = delay.strftime('%Y-%m-%d %H:%M:%S') Message.enqueue_wait( connector, process['queue'], delay, payload, 'message', 'paprika.consumers.ProcessAction.ProcessAction') else: process_repository = ProcessRepository(connector) process = process_repository.find_by_id( process_action['pcs_id']) process_definition_action_repository = ProcessDefinitionActionRepository( connector) process_definition_action = process_definition_action_repository.find_next_by_process_action( process_action, process) if process_definition_action: next_process_action = dict() next_process_action['job_name'] = process_action[ 'job_name'] next_process_action['pcs_id'] = process_action['pcs_id'] next_process_action['dan_id'] = process_definition_action[ 'id'] next_process_action['name'] = process_definition_action[ 'name'] next_process_action['state'] = 'READY' next_process_action = process_action_repository.insert( next_process_action) process_definition_action_property_repository = ProcessDefinitionActionPropertyRepository( connector) process_definition_action_properties = process_definition_action_property_repository.list_by_process_definition_action( process_definition_action) for process_definition_action_property in process_definition_action_properties: process_action_property_repository.set_property( next_process_action, process_definition_action_property['name'], process_definition_action_property['value']) payload = next_process_action Message.enqueue( connector, process['queue'], payload, 'message', 'paprika.consumers.ProcessAction.ProcessAction') else: payload = process_repository.find_by_id( process_action['pcs_id']) Message.enqueue( connector, process['queue'], payload, 'message', 'paprika.consumers.ProcessFinish.ProcessFinish') process_action['state'] = 'PROCESSED' process_action['message'] = '' process_action['backtrace'] = '' process_action_repository.state(process_action) except: # set the process_action to failed process_action = json.loads(message['payload']) process_action_repository = ProcessActionRepository(connector) result = Traceback.build() result['id'] = process_action['id'] result['state'] = 'FAILED' process_action_repository.state(result) # set the process to failed process_repository = ProcessRepository(connector) process = process_repository.find_by_id(process_action['pcs_id']) result['id'] = process['id'] result['state'] = 'FAILED' process_repository.state(result) # log a fatal of the process logger = Logger(connector, self) logger.fatal(process['job_name'], result['message'], result['backtrace']) # start the exception process if present if process['e_pdn_id']: e_process = ProcessService.create_process( connector, process['e_pdn_id'], process['job_name']) process_property_repository = ProcessPropertyRepository( connector) process_property_repository.copy(process, e_process) process_property_repository.set_property( e_process, 'message', result['message']) process_property_repository.set_property( e_process, 'backtrace', result['backtrace']) ProcessService.execute_process(connector, e_process)
def execute(self, connector, process_action): job_name = process_action['job_name'] logger = Logger(connector, self) process_action_property_repository = ProcessActionPropertyRepository( connector) # retrieve the properties uuid = process_action_property_repository.get_property( process_action, 'uuid') page_size = process_action_property_repository.get_property( process_action, 'page_size') datasource = process_action_property_repository.get_property( process_action, 'datasource') # create the scraper scraper = Clang() response = scraper.mailing_get_quickmails(uuid) resource_id = response.msg # get resource id resource_status = 'BUSY' while resource_status != "READY": response = scraper.resource_get_by_id(uuid, resource_id) resource_status = response.msg.status time.sleep(1) # get mailings resource_size = response.msg.size mailing_ids = [] for i in xrange(0, resource_size): response = scraper.mailing_set_get_mailing_ids( uuid, resource_id, i, 2) mailing_ids.append(response.msg.integer[0]) scraper.resource_free(uuid, resource_id) # get summaries mail_summaries = [] for mailing_id in mailing_ids: response = scraper.mailing_get_by_id(uuid, mailing_id) campaign_name = response.msg.campaignName content_name = response.msg.contentName started_at = response.msg.startedAt ended_at = response.msg.endedAt description = response.msg.description received = response.msg.received unique_clicks = response.msg.uniqueClicks unique_opens = response.msg.uniqueOpens bounces = response.msg.bounces message = dict() message['mailing_id'] = mailing_id message['campaign_name'] = Strings.encode(campaign_name, 'utf-8') message['started_at'] = Strings.encode(started_at, 'utf-8') message['ended_at'] = Strings.encode(ended_at, 'utf-8') message['content_name'] = Strings.encode(content_name, 'utf-8') message['description'] = Strings.encode(description, 'utf-8') message['received'] = received message['unique_clicks'] = unique_clicks message['unique_opens'] = unique_opens message['bounces'] = bounces message['cor'] = MathHelper.divide(unique_opens, received) * 100.0 message['cto'] = MathHelper.divide(unique_clicks, unique_opens) * 100.0 message['ctr'] = MathHelper.divide(unique_clicks, received) * 100.0 mail_summaries.append(message) # delete all the mailings and insert the new ones. mi_ds = DatasourceBuilder.find(datasource) clang_mail_summary_repository = ClangMailSummaryRepository(mi_ds) clang_mail_summary_repository.clean() for summary in mail_summaries: clang_mail_summary_repository.insert(summary) logger.info(job_name, 'job_name: ' + job_name)
def execute(self, connector, process_action): logger = Logger(connector, self) job_name = process_action['job_name'] process_repository = ProcessRepository(connector) process_property_repository = ProcessPropertyRepository(connector) process_action_property_repository = ProcessActionPropertyRepository( connector) print json.dumps(process_action) # retrieve the file properties process = process_repository.find_by_id(process_action['pcs_id']) print json.dumps(process) # retrieve the payload if present payload = JsonExt.loads( process_property_repository.get_property(process, 'payload')) mailjob_id = process_property_repository.get_property( process, 'mailjob_id') auth_info = json.loads( process_action_property_repository.get_property( process_action, 'auth_info')) headers = json.loads( process_action_property_repository.get_property( process_action, 'headers')) certificate = JsonExt.loads( process_action_property_repository.get_property( process_action, 'certificate')) proxies = JsonExt.loads( process_action_property_repository.get_property( process_action, 'proxies')) url = process_action_property_repository.get_property( process_action, 'url') message = {"auth_info": auth_info, "mailjob_id": mailjob_id} logger.info(job_name, json.dumps(message)) response = RestRequest.post(headers, url, message, certificate, proxies) logger.info( job_name, "status_code : " + str(response.status_code) + ", reason : " + response.reason + ", content : " + response.content) if response.status_code != 200: message = "status_code : " + str( response.status_code ) + ", reason : " + response.reason + ", content : " + response.content raise ProcessException(message) content = json.loads(response.content) status = content['status'] numberOfSkipped = content['numberOfSkipped'] if status in ['ENDED_WITH_ERRORS', 'FAILED']: message = "status : " + status + ", reason : " + content['error'] raise ProcessException(message) if numberOfSkipped != 0: message = "numberOfSkipped : " + str( numberOfSkipped ) + ", reason : the email is not send but skipped. Possibly emailadress empty?" raise ProcessException(message) if not status == 'ENDED': return process_action
def execute(self, process_action): job_name = process_action['job_name'] paprika_ds = DatasourceBuilder.build('paprika-ds.json') logger = Logger(self) file_repository = FileRepository(paprika_ds) process_repository = ProcessRepository(paprika_ds) process_property_repository = ProcessPropertyRepository(paprika_ds) process_action_property_repository = ProcessActionPropertyRepository( paprika_ds) # retrieve the file properties process = process_repository.find_by_id(process_action['pcs_id']) file_id = process_property_repository.get_property(process, 'file_id') file = file_repository.find_by_id(file_id) datasource = process_action_property_repository.get_property( process_action, 'datasource') drop_location = process_action_property_repository.get_property( process_action, 'drop_location') filename = drop_location + '/' + file['filename'] CsvFile.normalize(filename, filename + '.norm') source_encoding = CsvFile.guess_encoding(filename + '.norm') CsvFile.iconv(filename + '.norm', source_encoding, filename + '.utf8', 'utf8') delimiter = CsvFile.guess_delimiter(filename + '.utf8') skip_header = False if not header: header = CsvFile.read_header(filename + '.utf8', delimiter) skip_header = True ds = DatasourceBuilder.find(datasource) connector = ConnectorFactory.create_connector(ds) file_repository = FileRepository(ds) file = dict() file['pcs_id'] = 0 file['job_name'] = job_name file['filename'] = filename file['state'] = 'READY' file['rule'] = '' file['hashcode'] = '' file['pickup_location'] = '' file['path'] = '' file['filesize'] = 0 file['pattern'] = '' file = file_repository.insert(file) statics = dict() statics['job_name'] = job_name statics['fle_id'] = file['id'] mapping = 'id.eeid;notification.action;job_name.job_name;fle_id.fle_id' stager = Stager(ds) stager.stage(filename + '.utf8', header, delimiter, 'tripolis_mailings', mapping, skip_header, statics) stager.close() logger.info(job_name, 'job_name: ' + job_name + 'file: ' + filename + " staged")
def execute(self, connector, process_action): job_name = process_action['job_name'] logger = Logger(connector, self) properties = PropertyRepository(connector) process_repository = ProcessRepository(connector) process_property_repository = ProcessPropertyRepository(connector) process_action_property_repository = ProcessActionPropertyRepository( connector) file_repository = FileRepository(connector) process = process_repository.find_by_id(process_action['pcs_id']) file_id = process_property_repository.get_property(process, 'file_id') file = file_repository.find_by_id(file_id) tmp = properties.get_property('scanner.tmp') # retrieve the file properties pickup_location = process_property_repository.get_property( process, 'pickup_location') pickup_filename = file['filename'] pickup_path = process_property_repository.get_property(process, 'path') drop_location = process_action_property_repository.get_property( process_action, 'drop_location') # copy the file from pickup folder to the tmp folder. logger.info(job_name, pickup_location + os.sep + pickup_filename) pickup_client = VfsFactory.create_client(pickup_location) pickup_client.connect() pickup_path = pickup_path + os.sep + pickup_filename tmp_path = tmp + os.sep + pickup_filename pickup_client.get(pickup_path, tmp_path) pickup_client.close() # unzip the file and copy the unzipped files to the unzip folder logger.info(job_name, 'unzip ' + pickup_location + os.sep + pickup_filename) zip_client = VfsFactory.create_client("zip://" + tmp_path) zip_client.connect() zip_files = zip_client.list('') for zip_file in zip_files: logger.info(job_name, 'unzipping ' + zip_file['path'] + zip_file['filename']) zip_client.get( zip_file['path'] + zip_file['filename'], tmp + os.sep + zip_file['path'] + zip_file['filename']) # copy the file from the tmp folder to the drop folder logger.info( job_name, 'copying ' + tmp + os.sep + zip_file['path'] + zip_file['filename'] + ' to ' + drop_location) drop_client = VfsFactory.create_client(drop_location) drop_client.connect() tmp_path = zip_file['path'] + zip_file['filename'] drop_path = drop_client.get_path() + os.sep + zip_file['filename'] drop_client.put(tmp_path, drop_path) drop_client.close() logger.info(job_name, pickup_location + os.sep + pickup_filename + " unzipped")