def output(self): tran = transition(self.test_input, self.component) tran1 = transition(self.component, self.test_output) job1 = job([self.test_output]) job1.run() if self.output_channel not in self.datas: raise etl_test_exception( 'expected output channel does not has actual data.') act_datas = self.datas[self.output_channel] if self.output_data: if len(act_datas) != len(self.output_data): raise etl_test_exception( 'lengths of actual output and expected output are different' ) else: return self.datas count = 0 while count < len(act_datas): exp_r = self.output_data[count] act_r = act_datas[count] exp_keys = exp_r.keys() act_keys = act_r.keys() if len(exp_keys) != len(act_keys): raise etl_test_exception( 'key length of actual output and expected output are different' ) key_count = 0 while key_count < len(act_keys): exp_key = exp_keys[key_count] act_key = act_keys[key_count] if exp_key != act_key: raise etl_test_exception( 'keys of actual output and expected output are different.' ) key_count += 1 value_count = 0 exp_values = exp_r.values() act_values = act_r.values() while value_count < len(act_values): exp_value = exp_values[value_count] act_value = act_values[value_count] if exp_value != act_value: raise etl_test_exception( 'values of actual output and expected output are different' ) value_count += 1 count += 1
def create_instance(self, cr, uid, id, context={}, data={}): obj_component = self.pool.get('etl.component') res = self.read(cr, uid, id, ['component_ids', 'name']) components = [] component_instance = [] for comp in obj_component.browse(cr, uid, res['component_ids']): components.append(comp) for trans in comp.trans_in_ids + comp.trans_out_ids: components.append(trans.source_component_id) components.append(trans.destination_component_id) comps = [] for comp in components: comps.append(comp.id) for trans in comp.trans_in_ids + comp.trans_out_ids: comps.append(trans.source_component_id.id) comps.append(trans.destination_component_id.id) comps = list(set(comps)) for cmp_id in comps: component_instance.append( obj_component.get_instance(cr, uid, cmp_id, context, data)) job = etl.job(component_instance, res['name']) return job
# # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import etl sort1 = etl.component.transform.sort('name') log1 = etl.component.transform.logger(name='Read Partner File') log2 = etl.component.transform.logger(name='After Sort') sleep1 = etl.component.control.sleep() tran0 = etl.transition(log1, sort1) tran1 = etl.transition(sort1, sleep1) tran2 = etl.transition(sleep1, log2) job1 = etl.job([log1, sort1, sleep1, log2]) fileconnector_partner = etl.connector.localfile('input/partner.csv') csv_in1 = etl.component.input.csv_in(fileconnector_partner, name='Partner Data') fileconnector_output = etl.connector.localfile('output/subjob_partner.csv', 'w+') csv_out1 = etl.component.output.csv_out(fileconnector_output, name='Partner OUT Data1') subjob = etl.component.transform.subjob(job1) tran0 = etl.transition(csv_in1, subjob) tran1 = etl.transition(subjob, csv_out1)
'id': 'category_id_main', 'name': 'category_name' }) oo_out_partner = etl.component.output.openobject_out( ooconnector, 'res.partner', { 'id': 'id', 'name': 'name', 'category_id:id': 'category_id_main', 'ref': 'ref' }) tran = etl.transition(csv_in1, log1) tran = etl.transition(log1, csv_in2) #tran=etl.transition(log1,csv_in2) tran = etl.transition(csv_in2, map) tran = etl.transition(map, oo_out_cat) tran = etl.transition(map, oo_out_partner) #tran=etl.transition(facebook_in_events, map2) ##,channel_source='friends' #tran=etl.transition(facebook_in_friends, log) #tran=etl.transition(map, oo_out_partner) #tran=etl.transition(map, oo_out_address) #tran=etl.transition(map, oo_out_address1) #tran=etl.transition(map1, oo_out_event) #tran=etl.transition(map1, oo_out_event1) #tran=etl.transition(oo_out_address,log1) job1 = etl.job([map, log1, oo_out_cat, oo_out_partner]) job1.run()
#! /usr/bin/python import etl in1 = etl.operator.etl_csv_input('data/partner.csv', is_start=True) out1 = etl.operator.etl_csv_output('output/partner.csv') sort1 = etl.operator.etl_operator_sort('name') log1 = etl.operator.etl_operator_log(name='PartnerLogger') etl.transition(in1, sort1) etl.transition(sort1, out1) etl.transition(sort1, log1) job = etl.job([in1, out1]) job.run()
'admin', con_type='xmlrpc') oo_out_partner = etl.component.output.openobject_out(ooconnector, 'res.partner', { 'id': 'id', 'name': 'name' }) xmlrpc_conn = etl.connector.xmlrpc_connector('localhost', 5000) xmlrpc_in = etl.component.input.xmlrpc_in(xmlrpc_conn) log2 = etl.component.transform.logger(name='Partner File') tran = etl.transition(log2, xmlrpc_in) tran = etl.transition(xmlrpc_in, oo_out_partner) #tran=etl.transition(xmlrpc_in,) job2 = etl.job([oo_out_partner]) fileconnector_partner = etl.connector.localfile('input/partner.csv') csv_in1 = etl.component.input.csv_in(fileconnector_partner, name='Partner Data') log1 = etl.component.transform.logger(name='Read Partner File') xmlrpc_out = etl.component.output.xmlrpc_out(xmlrpc_conn) tran = etl.transition(csv_in1, log1) tran = etl.transition(log1, xmlrpc_out) job1 = etl.job([xmlrpc_out]) job1.run() job2.run()
oo_out = etl.component.output.openobject_out(ooconnector, 'res.partner', { 'id': 'id', 'name': 'name' }) oo_out2 = etl.component.output.openobject_out( ooconnector, 'res.partner.address', { 'name': 'contact_name', 'id': 'address_id', 'partner_id:id': 'id', 'email': 'email' }) log1 = etl.component.transform.logger(name='vCard->Oo') tran = etl.transition(vcard_in1, map) tran = etl.transition(map, log1) tran = etl.transition(log1, oo_out) tran = etl.transition(oo_out, oo_out2) log2 = etl.component.transform.logger(name='Count') count = etl.component.control.data_count() tran = etl.transition(map, count, channel_destination='gmail') tran = etl.transition(oo_out, count, channel_destination='partner') tran = etl.transition(oo_out2, count, channel_destination='address') tran = etl.transition(count, log2) job1 = etl.job([vcard_in1, oo_out, oo_out2, log2, count]) job1.run()
'test', 'admin', 'admin', con_type='xmlrpc') facebook_conn = etl.connector.facebook_connector('http://facebook.com', '*****@*****.**') facebook_out_friends = etl.component.output.facebook_out(facebook_conn, 'set_events', fields=['name']) map = etl.component.transform.map({ 'main': { 'id': "tools.uniq_id(main.get('name', 'anonymous'), prefix='partner1_')", 'name': " 'ERPOPEM'", } }) oo_in_partner = etl.component.input.openobject_in(ooconnector, 'res.partner', fields=['id', 'name'] # {'id':'id','name':'name'} ) tran = etl.transition(oo_in_partner, map) tran = etl.transition(map, facebook_out_friends) job1 = etl.job([facebook_out_friends]) job1.run()
#! /usr/bin/python import etl in1 = etl.operator.etl_csv_input('data/partner3.csv', is_start=True) in2 = etl.operator.etl_csv_input('intermediate/add.csv', is_start=True) merge1 = etl.operator.etl_merge() log1 = etl.operator.etl_operator_log_bloc(name="Original Data") log2 = etl.operator.etl_operator_log_bloc(name="Final Data") etl.transition(in1, merge1) etl.transition(in1, log1) etl.transition(in2, merge1) etl.transition(merge1, log2) job = etl.job([in1,in2,merge1]) job.run()
{'id': 1, 'name': 'Fabien', 'country_id': 3}, {'id': 2, 'name': 'Luc', 'country_id': 3}, {'id': 3, 'name': 'Henry', 'country_id': 1} ]) input_cty = etl.component.input.data([ {'id': 1, 'name': 'Belgium'}, {'id': 3, 'name': 'France'} ]) map_keys = {'main': { 'id': "main['id']", 'name': "main['name'].upper()", 'country': "country_var[main['country_id']]['name']" }} def preprocess(self, channels): cdict = {} for trans in channels['country']: for d in trans: cdict[d['id']] = d return {'country_var': cdict} map=etl.component.transform.map(map_keys,preprocess) log=etl.component.transform.logger(name='Read Partner File') tran=etl.transition(input_part,map, channel_destination='main') tran1=etl.transition(input_cty,map, channel_destination='country') tran4=etl.transition(map,log) job=etl.job([log]) job.run()
ooconnector, 'res.partner.address', {'name': 'contact_name', 'id':'home_address_id', 'partner_id:id':'id','city':'home_city','state_id':'home_state_id','zip':'home_zip','country_id':'home_country_id', 'type':'home_type' }) oo_out_event= etl.component.output.openobject_out( ooconnector, 'crm.case', { 'id': 'event_id', 'name':'event_name', 'section_id':'section_id', 'partner_id':'partner_id' }) #oo_out_event1= etl.component.output.openobject_out( # ooconnector, # 'event.event', # { 'id': 'event_id', 'name':'event_name', 'section_id':'section_id', 'date_begin':'date_begin', 'date_end':'date_end', 'product_id':'product_id' # }) tran=etl.transition(facebook_in_friends, map) tran=etl.transition(facebook_in_events, map1) #tran=etl.transition(facebook_in_events, map2) #,channel_source='friends' tran=etl.transition(facebook_in_friends, log) tran=etl.transition(map, oo_out_partner) tran=etl.transition(map, oo_out_address) tran=etl.transition(map, oo_out_address1) tran=etl.transition(map1, oo_out_event) #tran=etl.transition(map1, oo_out_event1) tran=etl.transition(oo_out_address,log1) job1=etl.job([facebook_in_friends,facebook_in_events,map,map1,log1,oo_out_partner, oo_out_address, oo_out_address1, oo_out_event, log]) job1.run()
# # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import etl xmlrpc_conn=etl.connector.xmlrpc_connector('localhost',5000) xmlrpc_out = etl.component.output.xmlrpc_out(xmlrpc_conn) fileconnector_partner=etl.connector.localfile('input/partner.csv') csv_in1= etl.component.input.csv_in(fileconnector_partner,name='Partner Data') log2=etl.component.transform.logger(name='File') tran=etl.transition(csv_in1,xmlrpc_out) tran=etl.transition(xmlrpc_out,log2) job1=etl.job([log2]) job1.run()
'Is_NULL': False }, 'tel': { 'type': 'unicode', 'Is_NULL': False }, } schema_valid = etl.component.transform.schema_validator(schema) log1 = etl.component.transform.logger(name='invalid_field') log2 = etl.component.transform.logger(name='invalid_name') log3 = etl.component.transform.logger(name='invalid_type') log4 = etl.component.transform.logger(name='invalid_key') log5 = etl.component.transform.logger(name='invalid_null') log6 = etl.component.transform.logger(name='invalid_size') log7 = etl.component.transform.logger(name='invalid_format') log8 = etl.component.transform.logger(name='main') tran = etl.transition(csv_in1, schema_valid) tran1 = etl.transition(schema_valid, log1, channel_source='invalid_field') tran2 = etl.transition(schema_valid, log2, channel_source='invalid_name') tran3 = etl.transition(schema_valid, log3, channel_source='invalid_type') tran4 = etl.transition(schema_valid, log4, channel_source='invalid_key') tran5 = etl.transition(schema_valid, log5, channel_source='invalid_null') tran6 = etl.transition(schema_valid, log6, channel_source='invalid_size') tran7 = etl.transition(schema_valid, log7, channel_source='invalid_format') tran8 = etl.transition(schema_valid, log8, channel_source='main') job1 = etl.job([log1, log2, log3, log4, log5, log6, log7, log8]) job1.run()
# OpenERP, Open Source Management Solution # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import sys sys.path.append('..') import etl fileconnector_partner = etl.connector.localfile('input/input.vcf') vcard_in1 = etl.component.input.vcard_in(fileconnector_partner) log1 = etl.component.transform.logger(name='Read Vcard File') tran = etl.transition(vcard_in1, log1) job1 = etl.job([log1]) job1.run()
fileconnector_partner = etl.connector.localfile('input/partner.csv') fileconnector_partner1 = etl.connector.localfile('input/partner1.csv') fileconnector_partner3 = etl.connector.localfile('input/partner3.csv') fileconnector_output = etl.connector.localfile('output/test1_partner.csv', 'w+') csv_in1 = etl.component.input.csv_in(fileconnector_partner, name='Partner Data') csv_in2 = etl.component.input.csv_in(fileconnector_partner1, name='Partner Data1') csv_out1 = etl.component.output.csv_out(fileconnector_output, name='Partner OUT Data1') sort1 = etl.component.transform.sort('name') log1 = etl.component.transform.logger(name='Read Partner File') log2 = etl.component.transform.logger(name='After Sort') sleep1 = etl.component.control.sleep() tran = etl.transition(csv_in1, sort1) tran1 = etl.transition(csv_in2, sort1) tran4 = etl.transition(sort1, sleep1) tran4 = etl.transition(sleep1, log2) #tran6=etl.etl.transition(sleep1,log1,channel_source="statistics") tran5 = etl.transition(sort1, csv_out1) job1 = etl.job([csv_in1, csv_in2, csv_out1, sort1, log1, log2, sleep1]) job2 = job1.copy() job2.run()
# but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import etl xmlrpc_conn = etl.connector.xmlrpc_connector('localhost', 5000) xmlrpc_out = etl.component.output.xmlrpc_out_block(xmlrpc_conn) filevcard = etl.connector.localfile('input/contacts.vcf') vcard_in1 = etl.component.input.vcard_in(filevcard) map = etl.component.transform.map({ 'main': { 'org': "main.get('org',['anonymous'])", 'fn': "main.get('fn','anonymous')", 'email': "main.get('email','')" } }) log2 = etl.component.transform.logger(name='File') tran = etl.transition(vcard_in1, map) tran = etl.transition(map, xmlrpc_out) tran = etl.transition(xmlrpc_out, log2) job1 = etl.job([vcard_in1, map, xmlrpc_out, log2]) job1.run()
'main': { 'id': "tools.uniq_id(main.get('name', 'anonymous'), prefix='event_')", 'name': "main.get('name','anonymous')", # 'date_begin':"main.get(datetime.datetime.fromtimestamp(time.mktime(time.strptime('date_begin','%Y-%m-%dT%H:%M:%S.000Z'))).strftime('%Y-%m-%d %H:%M:%S')) ", # 'date_end':"main.get(datetime.datetime.fromtimestamp(time.mktime(time.strptime('date_end','%Y-%m-%dT%H:%M:%S.000Z'))).strftime('%Y-%m-%d %H:%M:%S'))", 'date_begin': "main.get('date_begin') ", 'date_end': "main.get('date_end')", 'product_id': "main.get('product_id', 'Advance Product')", } }) ooconnector = etl.connector.openobject_connector('http://localhost:8069', 'trunk_mra', 'admin', 'admin', con_type='xmlrpc') oo_out_event = etl.component.output.openobject_out( ooconnector, 'event.event', { 'id': 'id', 'name': 'name', 'date_begin': 'date_begin', 'date_end': 'date_end', 'product_id': 'product_id' }) tran = etl.transition(gcalendar_in_events, map) tran = etl.transition(map, oo_out_event) job1 = etl.job([gcalendar_in_events, oo_out_event]) job1.run()
import etl in1 = etl.operator.etl_csv_input('data/partner.csv', is_start=True) in2 = etl.operator.etl_csv_input('data/partner2.csv', is_start=True) diff1 = etl.operator.etl_operator_diff(['id']) log_1 = etl.operator.etl_operator_log_bloc(name="Original Data") log_2 = etl.operator.etl_operator_log_bloc(name="Modified Data") log1 = etl.operator.etl_operator_log(name="Log Same") log2 = etl.operator.etl_operator_log(name="Log Add") log3 = etl.operator.etl_operator_log(name="Log Remove") log4 = etl.operator.etl_operator_log(name="Log Update") csv1 = etl.operator.etl_csv_output('intermediate/add.csv') etl.transition(in1, log_1) etl.transition(in2, log_2) etl.transition(in1, diff1, 'original') etl.transition(in2, diff1, 'modified') etl.transition(diff1, log1, channel_source="same") etl.transition(diff1, log3, channel_source="remove") etl.transition(diff1, log2, channel_source="add") etl.transition(diff1, csv1, channel_source="add") etl.transition(diff1, log4, channel_source="update") job = etl.job([in1, in2, diff1]) job.run()
# This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import sys sys.path.append('..') import etl from etl import transformer fileconnector=etl.connector.localfile('input/invoice.csv') trans=transformer( { 'id':transformer.LONG, 'name':transformer.STRING, 'invoice_date':transformer.DATE, 'invoice_amount':transformer.FLOAT, 'is_paid':transformer.BOOLEAN } ) csv_in1= etl.component.input.csv_in(fileconnector=fileconnector,transformer=trans) log1=etl.component.transform.logger(name='Read Invoice File') tran=etl.transition(csv_in1,log1) job1=etl.job([csv_in1,log1]) job1.run()
'user_id': "tools.uniq_id(main.get('login', 'anonymous'), prefix='user_')", 'user_name': "main.get('name', 'anonymous')", 'login': "******", 'context_lang': "main.get('context_lang','en_US')", 'groups_id:id': "main.get('groups_id:id','False')", } } map = etl.component.transform.map(map_keys) sqlconnector_partner = etl.connector.sql_connector('localhost', 5432, 'etl', 'qdp', 'qdp') sql_in1 = etl.component.transform.sql_join( sqlconnector_partner, "select res_id from ir_model_data where name = '%s'", 'user_id', outputkey='unique_res_id') #definition of the transitions tran0 = etl.transition(csv_in_groups, oo_out_groups) tran1 = etl.transition(csv_in_users, map) tran2 = etl.transition(map, oo_out_users) tran3 = etl.transition(oo_out_users, sql_in1) tran4 = etl.transition(sql_in1, log1) job1 = etl.job([oo_out_groups, oo_out_users, log1]) #print job1 job1.run() #print job1.get_statitic_info()
# # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import etl ooconnector = etl.connector.openobject_connector('http://localhost:8069', 'trunk', 'admin', 'a', con_type='xmlrpc') map = etl.component.transform.map({'main':{ 'id': "tools.uniq_id(main.get('org', 'anonymous'), prefix='partner_')", 'address_id': "tools.uniq_id(main.get('fn', 'anonymous'), prefix='contact_')", 'name': "main.get('org',['anonymous'])[0]", 'contact_name': "main.get('fn','anonymous')", 'email': "main.get('email','').upper()" }}) oo_out= etl.component.output.openobject_out_create( ooconnector, 'res.partner', {'name':'name'} ) tran=etl.transition(map,oo_out) job1=etl.job([map,oo_out]) xmlrpc_conn=etl.connector.xmlrpc_connector('localhost',5000) xmlrpc_in= etl.component.input.xmlrpc_in_block(xmlrpc_conn, job1) job2=etl.job([xmlrpc_in]) job2.run()
log_1 = etl.component.transform.logger_bloc(name="Original Data") log_2 = etl.component.transform.logger_bloc(name="Modified Data") log1 = etl.component.transform.logger(name="Log Same") log2 = etl.component.transform.logger(name="Log Add") log3 = etl.component.transform.logger(name="Log Remove") log4 = etl.component.transform.logger(name="Log Update") fileconnector_output = etl.connector.localfile('output/subjob2_add.csv', 'w+') csv_out1 = etl.component.output.csv_out(fileconnector_output, name='Output') etl.transition(in1, log_1) etl.transition(in2, log_2) etl.transition(in1, diff1, channel_destination='original') etl.transition(in2, diff1, channel_destination='modified') job1 = etl.job([in1, in2, log_1, log_2, diff1]) subjob = etl.component.transform.subjob(job1) etl.transition(subjob, log1, channel_source="same") etl.transition(subjob, log3, channel_source="remove") etl.transition(subjob, log2, channel_source="add") etl.transition(subjob, csv_out1, channel_source="add") etl.transition(subjob, log4, channel_source="update") job = etl.job([subjob, log1, log2, log3, log4, csv_out1]) job.run()
# # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import threading import sys sys.path.append('..') import etl gdoc_connector = etl.connector.gdoc_connector(user, password) # gmail gdoc_in1 = etl.component.input.gdoc_in(gdoc_connector, file_path='/home/tiny/Desktop/') #fileconnector_partner=etl.connector.localfile('/home/tiny/Desktop/partner1.csv') fileconnector_output = etl.connector.localfile('output/gdoc.csv', 'r+') #csv_in1= etl.component.input.csv_in(fileconnector_partner,name='Partner Data') csv_out1 = etl.component.output.csv_out(fileconnector_output, name='Partner OUT Data1') log1 = etl.component.transform.logger(name='Read Partner File') tran = etl.transition(gdoc_in1, log1) #tran=etl.transition(log1, csv_in1) tran1 = etl.transition(log1, csv_out1) job1 = etl.job([gdoc_in1, csv_out1], name="dd") job1.run()
# You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import sys sys.path.append('..') import etl sugarcrm_conn = etl.connector.sugarcrm_connector( 'admin', 'sugarpasswd', url='http://192.168.0.7/sugarcrm/soap.php') sugarcrm_in1 = etl.component.input.sugarcrm_in(sugarcrm_conn, 'Contacts') log = etl.component.transform.logger(name='After map') tran = etl.transition(sugarcrm_in1, log, channel_source='Contacts') job1 = etl.job([sugarcrm_in1, log]) job1.run() print job1.get_statitic_info() # ## sugarcrm -> logger ##facebook -> mapping -> schema_valodator -> openobject_out ('main') ## -> logger1 ('invalid_field') ## -> logger2 invalid_name ## -> logger3 invalid_key # -> logger4 invalid_null # -> logger5 invalid_type # -> logger6 invalid_size # -> logger7 invalid_format
in1 = etl.component.input.csv_in(fileconnector_partner, name='Partner Data') in2 = etl.component.input.csv_in(fileconnector_partner2, name='Partner Data2') diff1 = etl.component.transform.diff(['id']) log_1 = etl.component.transform.logger_bloc(name="Original Data") log_2 = etl.component.transform.logger_bloc(name="Modified Data") log1 = etl.component.transform.logger(name="Log Same") log2 = etl.component.transform.logger(name="Log Add") log3 = etl.component.transform.logger(name="Log Remove") log4 = etl.component.transform.logger(name="Log Update") fileconnector_output = etl.connector.localfile('output/test2_add.csv', 'w+') csv_out1 = etl.component.output.csv_out(fileconnector_output, name='Output') etl.transition(in1, log_1) etl.transition(in2, log_2) etl.transition(in1, diff1, channel_destination='original') etl.transition(in2, diff1, channel_destination='modified') etl.transition(diff1, log1, channel_source="same") etl.transition(diff1, log3, channel_source="remove") etl.transition(diff1, log2, channel_source="add") etl.transition(diff1, csv_out1, channel_source="add") etl.transition(diff1, log4, channel_source="update") job = etl.job( [in1, in2, log_1, log_2, diff1, log1, log2, log3, log4, csv_out1]) job.run()