Beispiel #1
0
    def output(self):
        tran = transition(self.test_input, self.component)
        tran1 = transition(self.component, self.test_output)
        job1 = job([self.test_output])
        job1.run()
        if self.output_channel not in self.datas:
            raise etl_test_exception(
                'expected output channel does not has actual data.')
        act_datas = self.datas[self.output_channel]
        if self.output_data:
            if len(act_datas) != len(self.output_data):
                raise etl_test_exception(
                    'lengths of actual output and expected output are different'
                )
        else:
            return self.datas
        count = 0
        while count < len(act_datas):
            exp_r = self.output_data[count]
            act_r = act_datas[count]
            exp_keys = exp_r.keys()
            act_keys = act_r.keys()
            if len(exp_keys) != len(act_keys):
                raise etl_test_exception(
                    'key length of actual output and expected output are different'
                )
            key_count = 0

            while key_count < len(act_keys):
                exp_key = exp_keys[key_count]
                act_key = act_keys[key_count]
                if exp_key != act_key:
                    raise etl_test_exception(
                        'keys of actual output and expected output are different.'
                    )
                key_count += 1

            value_count = 0
            exp_values = exp_r.values()
            act_values = act_r.values()
            while value_count < len(act_values):
                exp_value = exp_values[value_count]
                act_value = act_values[value_count]
                if exp_value != act_value:
                    raise etl_test_exception(
                        'values of actual output and expected output are different'
                    )
                value_count += 1
            count += 1
 def create_instance(self, cr, uid, id, context={}, data={}):
     obj_component=self.pool.get('etl.component')
     trans=self.browse(cr, uid, id)
     cmp_in = obj_component.get_instance(cr, uid, trans.source_component_id.id, context, data)
     cmp_out = obj_component.get_instance(cr, uid, trans.destination_component_id.id, context, data)
     if (cr.dbname, uid, data.get('process_id', False), id) in self._cache:
         return self._cache[(cr.dbname, uid, data.get('process_id', False), id)]
     val=etl.transition(cmp_in, cmp_out, channel_source=trans.channel_source or 'main',\
                        channel_destination=trans.channel_destination or 'main', type=trans.type)
     return val
    def output(self):
        tran=transition(self.test_input,self.component)
        tran1=transition(self.component,self.test_output)
        job1=job([self.test_output])
        job1.run()
        if self.output_channel not in self.datas:
            raise etl_test_exception('expected output channel does not has actual data.')
        act_datas=self.datas[self.output_channel]
        if self.output_data:
            if len(act_datas)!=len(self.output_data):
                raise etl_test_exception('lengths of actual output and expected output are different')
        else:
             return self.datas
        count=0
        while count<len(act_datas):
            exp_r=self.output_data[count]
            act_r=act_datas[count]
            exp_keys=exp_r.keys()
            act_keys=act_r.keys()
            if len(exp_keys)!=len(act_keys):
                raise etl_test_exception('key length of actual output and expected output are different')
            key_count=0

            while key_count<len(act_keys):
                exp_key=exp_keys[key_count]
                act_key=act_keys[key_count]
                if exp_key!=act_key:
                    raise etl_test_exception('keys of actual output and expected output are different.')
                key_count+=1

            value_count=0
            exp_values=exp_r.values()
            act_values=act_r.values()
            while value_count<len(act_values):
                exp_value=exp_values[value_count]
                act_value=act_values[value_count]
                if exp_value!=act_value:
                    raise etl_test_exception('values of actual output and expected output are different')
                value_count+=1
            count+=1
Beispiel #4
0
    def process(self):
        dummy = etl.component.component(name='dummy')
        start_coms = []
        for com in self.sub_job.get_components():
            if com.is_start():
                start_coms.append(com)
            com.generator = False

        dummy2 = etl.component.component(name='dummy')
        end_coms = []
        for com in self.sub_job.get_components():
            if com.is_end():
                end_coms.append(com)
            com.generator = False
        self.sub_job.add_component(dummy)
        self.sub_job.add_component(dummy2)

        for start_com in start_coms:
            new_in_tran = etl.transition(dummy, start_com)
        for end_com in end_coms:
            new_out_tran = etl.transition(end_com,
                                          dummy2,
                                          channel_source='',
                                          channel_destination='')

        self.dummy2 = dummy2
        self.dummy2.generator = self.dummy2_iterator()

        self.input = {}
        for channel, trans in self.input_get().items():
            for iterator in trans:
                self.input.setdefault(channel, [])
                for d in iterator:
                    self.input[channel].append(d)
        self.result = {}
        dummy.generator = self.dummy_iterator()
        self.sub_job.run()
        for channel, iterator in self.result.items():
            for d in iterator:
                yield d, channel
 def process(self):
     start_coms = []
     end_coms = []
     start_dummy = etl.component.component(name='start dummy')
     end_dummy = etl.component.component(name='end dummy')
     for com in self.rel_job.get_components():
         if com.is_start():
             start_coms.append(com)
         if com.is_end():
             end_coms.append(com)
     self.rel_job.add_component(start_dummy)
     self.rel_job.add_component(end_dummy)
     for start_com in start_coms:
         tran = etl.transition(start_dummy, start_com)
     for end_com in end_coms:
         tran = etl.transition(end_com, end_dummy, '', '')
     self.start_dummy = start_dummy
     self.end_dummy = end_dummy
     self.end_dummy.datas = []
     self.connector.start(self.import_data)
     for d in self.datas:
         yield d, 'main'
 def process(self):
     start_coms = []
     end_coms = []
     start_dummy = etl.component.component(name='start dummy')
     end_dummy = etl.component.component(name='end dummy')
     for com in self.rel_job.get_components():
         if com.is_start():
             start_coms.append(com)
         if com.is_end():
             end_coms.append(com)
     self.rel_job.add_component(start_dummy)
     self.rel_job.add_component(end_dummy)
     for start_com in start_coms:
         tran = etl.transition(start_dummy,start_com)
     for end_com in end_coms:
         tran = etl.transition(end_com,end_dummy,'','')
     self.start_dummy = start_dummy
     self.end_dummy = end_dummy
     self.end_dummy.datas = []
     self.connector.start(self.import_data)        
     for d in self.datas:
         yield d, 'main'
 def process(self):
     start_com = False
     dummy = etl.component.component(name='dummy')
     for com in self.rel_job.get_components():
         if com.is_start():
             start_com = com
     self.rel_job.add_component(dummy)
     if start_com:
         tran = etl.transition(dummy,start_com)
     self.dummy = dummy
     self.connector.start(self.import_data)
     for d in self.datas:
         yield d, 'main'
    def process(self):
        dummy = etl.component.component(name='dummy')
        start_coms = []
        for com in self.sub_job.get_components():
            if com.is_start():
                start_coms.append(com)
            com.generator = False

        dummy2 = etl.component.component(name='dummy')
        end_coms = []
        for com in self.sub_job.get_components():
            if com.is_end():
                end_coms.append(com)
            com.generator = False
        self.sub_job.add_component(dummy)
        self.sub_job.add_component(dummy2)

        for start_com in start_coms:
            new_in_tran = etl.transition(dummy, start_com)
        for end_com in end_coms:
            new_out_tran = etl.transition(end_com, dummy2, channel_source='', channel_destination='')

        self.dummy2 = dummy2
        self.dummy2.generator =  self.dummy2_iterator()


        self.input = {}
        for channel,trans in self.input_get().items():
            for iterator in trans:
                self.input.setdefault(channel, [])
                for d in iterator:
                    self.input[channel].append(d)
        self.result = {}
        dummy.generator = self.dummy_iterator()
        self.sub_job.run()
        for channel,iterator in self.result.items():
            for d in iterator:
                yield d, channel
 def create_instance(self, cr, uid, id, context={}, data={}):
     obj_component = self.pool.get('etl.component')
     trans = self.browse(cr, uid, id)
     cmp_in = obj_component.get_instance(cr, uid,
                                         trans.source_component_id.id,
                                         context, data)
     cmp_out = obj_component.get_instance(cr, uid,
                                          trans.destination_component_id.id,
                                          context, data)
     if (cr.dbname, uid, data.get('process_id', False), id) in self._cache:
         return self._cache[(cr.dbname, uid, data.get('process_id',
                                                      False), id)]
     val=etl.transition(cmp_in, cmp_out, channel_source=trans.channel_source or 'main',\
                        channel_destination=trans.channel_destination or 'main', type=trans.type)
     return val
}})

oo_out= etl.component.output.openobject_out(
     ooconnector,
     'res.partner',
     {'id':'id','name':'name'}
)

oo_out2= etl.component.output.openobject_out(
     ooconnector,
     'res.partner.address',
     {'name': 'contact_name', 'id':'address_id', 'partner_id:id':'id','email':'email'}
)
log1=etl.component.transform.logger(name='vCard->Oo')

tran=etl.transition(vcard_in1,map)
tran=etl.transition(map,log1)
tran=etl.transition(log1,oo_out)
tran=etl.transition(oo_out,oo_out2)

log2=etl.component.transform.logger(name='Count')

count = etl.component.control.data_count()
tran=etl.transition(map, count, channel_destination='gmail')
tran=etl.transition(oo_out, count, channel_destination='partner')
tran=etl.transition(oo_out2, count, channel_destination='address')
tran=etl.transition(count, log2)


job1=etl.job([vcard_in1,oo_out,oo_out2, log2,count])
job1.run()
#    but WITHOUT ANY WARRANTY; without even the implied warranty of
#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#    GNU Affero General Public License for more details.
#
#    You should have received a copy of the GNU Affero General Public License
#    along with this program.  If not, see <http://www.gnu.org/licenses/>.     
#
##############################################################################
import sys
sys.path.append('..')
import etl
from etl import transformer
fileconnector_partner=etl.connector.localfile('input/partner.csv')
csv_in1= etl.component.input.csv_in(fileconnector_partner,name='Partner Data')


unique = etl.component.transform.unique()
log1=etl.component.transform.logger(name='main')
log2=etl.component.transform.logger(name='duplicate')

tran=etl.transition(csv_in1,unique)

tran1=etl.transition(unique,log2,channel_source='duplicate')
tran1=etl.transition(unique,log1,channel_source='main')

job1=etl.job([log2,log1])
job1.run()



Beispiel #12
0
        'Is_NULL': False
    },
    'tel': {
        'type': 'unicode',
        'Is_NULL': False
    },
}
schema_valid = etl.component.transform.schema_validator(schema)
log1 = etl.component.transform.logger(name='invalid_field')
log2 = etl.component.transform.logger(name='invalid_name')
log3 = etl.component.transform.logger(name='invalid_type')
log4 = etl.component.transform.logger(name='invalid_key')
log5 = etl.component.transform.logger(name='invalid_null')
log6 = etl.component.transform.logger(name='invalid_size')
log7 = etl.component.transform.logger(name='invalid_format')
log8 = etl.component.transform.logger(name='main')

tran = etl.transition(csv_in1, schema_valid)
tran1 = etl.transition(schema_valid, log1, channel_source='invalid_field')
tran2 = etl.transition(schema_valid, log2, channel_source='invalid_name')
tran3 = etl.transition(schema_valid, log3, channel_source='invalid_type')
tran4 = etl.transition(schema_valid, log4, channel_source='invalid_key')
tran5 = etl.transition(schema_valid, log5, channel_source='invalid_null')
tran6 = etl.transition(schema_valid, log6, channel_source='invalid_size')
tran7 = etl.transition(schema_valid, log7, channel_source='invalid_format')
tran8 = etl.transition(schema_valid, log8, channel_source='main')

job1 = etl.job([log1, log2, log3, log4, log5, log6, log7, log8])

job1.run()
    'http://mra.tinyerp.co.in:8069',
    'etl',
    'admin',
    'admin',
    con_type='xmlrpc')
oo_out_partner = etl.component.output.openobject_out(ooconnector,
                                                     'res.partner', {
                                                         'id': 'id',
                                                         'name': 'name'
                                                     })

xmlrpc_conn = etl.connector.xmlrpc_connector('localhost', 5000)

xmlrpc_in = etl.component.input.xmlrpc_in(xmlrpc_conn)
log2 = etl.component.transform.logger(name='Partner File')
tran = etl.transition(log2, xmlrpc_in)
tran = etl.transition(xmlrpc_in, oo_out_partner)
#tran=etl.transition(xmlrpc_in,)
job2 = etl.job([oo_out_partner])

fileconnector_partner = etl.connector.localfile('input/partner.csv')
csv_in1 = etl.component.input.csv_in(fileconnector_partner,
                                     name='Partner Data')
log1 = etl.component.transform.logger(name='Read Partner File')
xmlrpc_out = etl.component.output.xmlrpc_out(xmlrpc_conn)

tran = etl.transition(csv_in1, log1)
tran = etl.transition(log1, xmlrpc_out)

job1 = etl.job([xmlrpc_out])
job1.run()
oo_out_cat = etl.component.output.openobject_out(ooconnector,
                                                 'res.partner.category', {
                                                     'id': 'category_id_main',
                                                     'name': 'category_name'
                                                 })

oo_out_partner = etl.component.output.openobject_out(
    ooconnector, 'res.partner', {
        'id': 'id',
        'name': 'name',
        'category_id:id': 'category_id_main',
        'ref': 'ref'
    })

tran = etl.transition(csv_in1, log1)
tran = etl.transition(log1, csv_in2)
#tran=etl.transition(log1,csv_in2)
tran = etl.transition(csv_in2, map)

tran = etl.transition(map, oo_out_cat)
tran = etl.transition(map, oo_out_partner)
#tran=etl.transition(facebook_in_events, map2)
##,channel_source='friends'
#tran=etl.transition(facebook_in_friends, log)
#tran=etl.transition(map, oo_out_partner)
#tran=etl.transition(map, oo_out_address)
#tran=etl.transition(map, oo_out_address1)
#tran=etl.transition(map1, oo_out_event)
#tran=etl.transition(map1, oo_out_event1)
#tran=etl.transition(oo_out_address,log1)
Beispiel #15
0
    'name': "name",
    'destination': 'Address Name'
}]

data_map_component = etl.component.transform.map(map_criteria,
                                                 transformer=transformer)

filter_criteria = [{
    'name': 'Partner',
    'filter': '"%(Partner)s".lower() or ""',
    'operator': '==',
    'operand': "'leclerc'",
    'condition': 'or'
}, {
    'name': 'Address Name',
    'operator': '==',
    'operand': "'Fabien Pinckaers'"
}]

data_filter_component = etl.component.transform.data_filter(
    filter_criteria, transformer=transformer)

log1 = etl.component.transform.logger(name='Read Partner')

tran1 = etl.transition(openobject_in1, data_map_component)
tran2 = etl.transition(data_map_component, data_filter_component)
tran3 = etl.transition(data_filter_component, log1)

job1 = etl.job([log1])
job1.run()
Beispiel #16
0
oo_out = etl.component.output.openobject_out(ooconnector, 'res.partner', {
    'id': 'id',
    'name': 'name'
})

oo_out2 = etl.component.output.openobject_out(
    ooconnector, 'res.partner.address', {
        'name': 'contact_name',
        'id': 'address_id',
        'partner_id:id': 'id',
        'email': 'email'
    })
log1 = etl.component.transform.logger(name='vCard->Oo')

tran = etl.transition(vcard_in1, map)
tran = etl.transition(map, log1)
tran = etl.transition(log1, oo_out)
tran = etl.transition(oo_out, oo_out2)

log2 = etl.component.transform.logger(name='Count')

count = etl.component.control.data_count()
tran = etl.transition(map, count, channel_destination='gmail')
tran = etl.transition(oo_out, count, channel_destination='partner')
tran = etl.transition(oo_out2, count, channel_destination='address')
tran = etl.transition(count, log2)

job1 = etl.job([vcard_in1, oo_out, oo_out2, log2, count])
job1.run()
Beispiel #17
0
#    You should have received a copy of the GNU Affero General Public License
#    along with this program.  If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import sys
sys.path.append('..')

import etl

sugarcrm_conn = etl.connector.sugarcrm_connector(
    'admin', 'sugarpasswd', url='http://192.168.0.7/sugarcrm/soap.php')
sugarcrm_in1 = etl.component.input.sugarcrm_in(sugarcrm_conn, 'Contacts')

log = etl.component.transform.logger(name='After map')

tran = etl.transition(sugarcrm_in1, log, channel_source='Contacts')

job1 = etl.job([sugarcrm_in1, log])
job1.run()
print job1.get_statitic_info()
#
## sugarcrm -> logger
##facebook -> mapping -> schema_valodator   -> openobject_out ('main')
##                                          -> logger1 ('invalid_field')
##                                                               -> logger2 invalid_name
##                                                               -> logger3 invalid_key
#                                                               -> logger4 invalid_null
#                                                               -> logger5 invalid_type
#                                                               -> logger6 invalid_size
#                                                               -> logger7 invalid_format
#    GNU Affero General Public License for more details.
#
#    You should have received a copy of the GNU Affero General Public License
#    along with this program.  If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import etl

ooconnector = etl.connector.openobject_connector("http://localhost:8069", "trunk", "admin", "a", con_type="xmlrpc")
map = etl.component.transform.map(
    {
        "main": {
            "id": "tools.uniq_id(main.get('org', 'anonymous'), prefix='partner_')",
            "address_id": "tools.uniq_id(main.get('fn', 'anonymous'), prefix='contact_')",
            "name": "main.get('org',['anonymous'])[0]",
            "contact_name": "main.get('fn','anonymous')",
            "email": "main.get('email','').upper()",
        }
    }
)

oo_out = etl.component.output.openobject_out_create(ooconnector, "res.partner", {"name": "name"})


tran = etl.transition(map, oo_out)
job1 = etl.job([map, oo_out])
xmlrpc_conn = etl.connector.xmlrpc_connector("localhost", 5000)
xmlrpc_in = etl.component.input.xmlrpc_in_block(xmlrpc_conn, job1)
job2 = etl.job([xmlrpc_in])
job2.run()
Beispiel #19
0
#    but WITHOUT ANY WARRANTY; without even the implied warranty of
#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#    GNU Affero General Public License for more details.
#
#    You should have received a copy of the GNU Affero General Public License
#    along with this program.  If not, see <http://www.gnu.org/licenses/>.     
#
##############################################################################
import etl

sort1=etl.component.transform.sort('name')
log1=etl.component.transform.logger(name='Read Partner File')
log2=etl.component.transform.logger(name='After Sort')
sleep1=etl.component.control.sleep()

tran0=etl.transition(log1,sort1)
tran1=etl.transition(sort1,sleep1)
tran2=etl.transition(sleep1,log2)
job1=etl.job([log1,sort1,sleep1,log2])


fileconnector_partner=etl.connector.localfile('input/partner.csv')
csv_in1= etl.component.input.csv_in(fileconnector_partner,name='Partner Data')

fileconnector_output=etl.connector.localfile('output/subjob_partner.csv','w+')
csv_out1= etl.component.output.csv_out(fileconnector_output,name='Partner OUT Data1')

subjob = etl.component.transform.subjob(job1)

tran0=etl.transition(csv_in1,subjob)
tran1=etl.transition(subjob,csv_out1)
Beispiel #20
0
    {'id': 1, 'name': 'Fabien', 'country_id': 3},
    {'id': 2, 'name': 'Luc', 'country_id': 3},
    {'id': 3, 'name': 'Henry', 'country_id': 1}
])
input_cty = etl.component.input.data([
    {'id': 1, 'name': 'Belgium'},
    {'id': 3, 'name': 'France'}
])
map_keys = {'main': {
    'id': "main['id']",
    'name': "main['name'].upper()",
    'country': "country_var[main['country_id']]['name']"
}}
def preprocess(self, channels):
    cdict = {}
    for trans in channels['country']:
        for d in trans:
            cdict[d['id']] = d
    return {'country_var': cdict}

map=etl.component.transform.map(map_keys,preprocess)
log=etl.component.transform.logger(name='Read Partner File')

tran=etl.transition(input_part,map, channel_destination='main')
tran1=etl.transition(input_cty,map, channel_destination='country')
tran4=etl.transition(map,log)

job=etl.job([log])
job.run()

#
#    You should have received a copy of the GNU Affero General Public License
#    along with this program.  If not, see <http://www.gnu.org/licenses/>.     
#
##############################################################################
import threading
import sys
sys.path.append('..')

import etl



gdoc_connector = etl.connector.gdoc_connector(user,password) # gmail
gdoc_in1= etl.component.input.gdoc_in(gdoc_connector, file_path='/home/tiny/Desktop/')

#fileconnector_partner=etl.connector.localfile('/home/tiny/Desktop/partner1.csv')

fileconnector_output=etl.connector.localfile('output/gdoc.csv','r+')

#csv_in1= etl.component.input.csv_in(fileconnector_partner,name='Partner Data')
csv_out1= etl.component.output.csv_out(fileconnector_output,name='Partner OUT Data1')
log1=etl.component.transform.logger(name='Read Partner File')

tran=etl.transition(gdoc_in1, log1)
#tran=etl.transition(log1, csv_in1)
tran1=etl.transition(log1, csv_out1)

job1=etl.job([gdoc_in1,csv_out1], name="dd")

job1.run()
#! /usr/bin/python

import etl

in1 = etl.operator.etl_csv_input('data/partner3.csv', is_start=True)
in2 = etl.operator.etl_csv_input('intermediate/add.csv', is_start=True)

merge1 = etl.operator.etl_merge()

log1 = etl.operator.etl_operator_log_bloc(name="Original Data")
log2 = etl.operator.etl_operator_log_bloc(name="Final Data")

etl.transition(in1, merge1)
etl.transition(in1, log1)

etl.transition(in2, merge1)

etl.transition(merge1, log2)

job = etl.job([in1,in2,merge1])
job.run()

#
##############################################################################
import sys
sys.path.append('..')

import etl

sugarcrm_conn=etl.connector.sugarcrm_connector('admin','sugarpasswd',url='http://192.168.0.7/sugarcrm/soap.php')
sugarcrm_in1= etl.component.input.sugarcrm_in(sugarcrm_conn,'Contacts')

log=etl.component.transform.logger(name='After map')




tran=etl.transition(sugarcrm_in1,log,channel_source='Contacts')

job1=etl.job([sugarcrm_in1,log])
job1.run()
print job1.get_statitic_info()
#
## sugarcrm -> logger
##facebook -> mapping -> schema_valodator   -> openobject_out ('main')
##                                          -> logger1 ('invalid_field')
##                                                               -> logger2 invalid_name
##                                                               -> logger3 invalid_key
#                                                               -> logger4 invalid_null
#                                                               -> logger5 invalid_type
#                                                               -> logger6 invalid_size
#                                                               -> logger7 invalid_format
Beispiel #24
0
fileconnector_partner = etl.connector.localfile('input/partner.csv')

fileconnector_partner1 = etl.connector.localfile('input/partner1.csv')
fileconnector_partner3 = etl.connector.localfile('input/partner3.csv')
fileconnector_output = etl.connector.localfile('output/test1_partner.csv',
                                               'w+')

csv_in1 = etl.component.input.csv_in(fileconnector_partner,
                                     name='Partner Data')
csv_in2 = etl.component.input.csv_in(fileconnector_partner1,
                                     name='Partner Data1')
csv_out1 = etl.component.output.csv_out(fileconnector_output,
                                        name='Partner OUT Data1')
sort1 = etl.component.transform.sort('name')
log1 = etl.component.transform.logger(name='Read Partner File')
log2 = etl.component.transform.logger(name='After Sort')
sleep1 = etl.component.control.sleep()

tran = etl.transition(csv_in1, sort1)
tran1 = etl.transition(csv_in2, sort1)
tran4 = etl.transition(sort1, sleep1)
tran4 = etl.transition(sleep1, log2)
#tran6=etl.etl.transition(sleep1,log1,channel_source="statistics")
tran5 = etl.transition(sort1, csv_out1)

job1 = etl.job([csv_in1, csv_in2, csv_out1, sort1, log1, log2, sleep1])

job2 = job1.copy()
job2.run()
#
#    This program is free software: you can redistribute it and/or modify
#    it under the terms of the GNU Affero General Public License as
#    published by the Free Software Foundation, either version 3 of the
#    License, or (at your option) any later version.
#
#    This program is distributed in the hope that it will be useful,
#    but WITHOUT ANY WARRANTY; without even the implied warranty of
#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#    GNU Affero General Public License for more details.
#
#    You should have received a copy of the GNU Affero General Public License
#    along with this program.  If not, see <http://www.gnu.org/licenses/>.     
#
##############################################################################
import etl

xmlrpc_conn=etl.connector.xmlrpc_connector('localhost',5000)

xmlrpc_out = etl.component.output.xmlrpc_out(xmlrpc_conn)
fileconnector_partner=etl.connector.localfile('input/partner.csv')
csv_in1= etl.component.input.csv_in(fileconnector_partner,name='Partner Data')

log2=etl.component.transform.logger(name='File')

tran=etl.transition(csv_in1,xmlrpc_out)
tran=etl.transition(xmlrpc_out,log2)
job1=etl.job([log2])

job1.run()
Beispiel #26
0
#    but WITHOUT ANY WARRANTY; without even the implied warranty of
#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#    GNU Affero General Public License for more details.
#
#    You should have received a copy of the GNU Affero General Public License
#    along with this program.  If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import etl

sort1 = etl.component.transform.sort('name')
log1 = etl.component.transform.logger(name='Read Partner File')
log2 = etl.component.transform.logger(name='After Sort')
sleep1 = etl.component.control.sleep()

tran0 = etl.transition(log1, sort1)
tran1 = etl.transition(sort1, sleep1)
tran2 = etl.transition(sleep1, log2)
job1 = etl.job([log1, sort1, sleep1, log2])

fileconnector_partner = etl.connector.localfile('input/partner.csv')
csv_in1 = etl.component.input.csv_in(fileconnector_partner,
                                     name='Partner Data')

fileconnector_output = etl.connector.localfile('output/subjob_partner.csv',
                                               'w+')
csv_out1 = etl.component.output.csv_out(fileconnector_output,
                                        name='Partner OUT Data1')

subjob = etl.component.transform.subjob(job1)
Beispiel #27
0
     ooconnector,
     'res.partner.address',
     {'name': 'contact_name', 'id':'home_address_id', 'partner_id:id':'id','city':'home_city','state_id':'home_state_id','zip':'home_zip','country_id':'home_country_id', 'type':'home_type'
        })

oo_out_event= etl.component.output.openobject_out(
     ooconnector,
     'crm.case',
     { 'id': 'event_id', 'name':'event_name', 'section_id':'section_id', 'partner_id':'partner_id'
        })

#oo_out_event1= etl.component.output.openobject_out(
 #    ooconnector,
  #   'event.event',
   #  { 'id': 'event_id', 'name':'event_name', 'section_id':'section_id', 'date_begin':'date_begin', 'date_end':'date_end', 'product_id':'product_id'
    #    })

tran=etl.transition(facebook_in_friends, map)
tran=etl.transition(facebook_in_events, map1)
#tran=etl.transition(facebook_in_events, map2)
#,channel_source='friends'
tran=etl.transition(facebook_in_friends, log)
tran=etl.transition(map, oo_out_partner)
tran=etl.transition(map, oo_out_address)
tran=etl.transition(map, oo_out_address1)
tran=etl.transition(map1, oo_out_event)
#tran=etl.transition(map1, oo_out_event1)
tran=etl.transition(oo_out_address,log1)
job1=etl.job([facebook_in_friends,facebook_in_events,map,map1,log1,oo_out_partner, oo_out_address, oo_out_address1, oo_out_event, log])
job1.run()
#    OpenERP, Open Source Management Solution
#    Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
#    This program is free software: you can redistribute it and/or modify
#    it under the terms of the GNU Affero General Public License as
#    published by the Free Software Foundation, either version 3 of the
#    License, or (at your option) any later version.
#
#    This program is distributed in the hope that it will be useful,
#    but WITHOUT ANY WARRANTY; without even the implied warranty of
#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#    GNU Affero General Public License for more details.
#
#    You should have received a copy of the GNU Affero General Public License
#    along with this program.  If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import sys

sys.path.append('..')

import etl

fileconnector_partner = etl.connector.localfile('input/input.vcf')
vcard_in1 = etl.component.input.vcard_in(fileconnector_partner)
log1 = etl.component.transform.logger(name='Read Vcard File')

tran = etl.transition(vcard_in1, log1)
job1 = etl.job([log1])
job1.run()
ooconnector = etl.connector.openobject_connector('http://localhost:8069', 'etl', 'admin', 'admin', con_type='xmlrpc')

oo_out_cat= etl.component.output.openobject_out(
     ooconnector,
     'res.partner.category',
     {'id':'category_id_main','name':'category_name'}
            )

oo_out_partner= etl.component.output.openobject_out(
     ooconnector,
     'res.partner',
     {'id':'id','name':'name', 'category_id:id':'category_id_main', 'ref':'ref'}
            )

tran=etl.transition(csv_in1, log1)
tran=etl.transition(log1, csv_in2)
#tran=etl.transition(log1,csv_in2)
tran=etl.transition(csv_in2, map)

tran=etl.transition(map, oo_out_cat)
tran=etl.transition(map, oo_out_partner)
#tran=etl.transition(facebook_in_events, map2)
##,channel_source='friends'
#tran=etl.transition(facebook_in_friends, log)
#tran=etl.transition(map, oo_out_partner)
#tran=etl.transition(map, oo_out_address)
#tran=etl.transition(map, oo_out_address1)
#tran=etl.transition(map1, oo_out_event)
#tran=etl.transition(map1, oo_out_event1)
#tran=etl.transition(oo_out_address,log1)
Beispiel #30
0
    'test',
    'admin',
    'admin',
    con_type='xmlrpc')

facebook_conn = etl.connector.facebook_connector('http://facebook.com',
                                                 '*****@*****.**')

facebook_out_friends = etl.component.output.facebook_out(facebook_conn,
                                                         'set_events',
                                                         fields=['name'])

map = etl.component.transform.map({
    'main': {
        'id':
        "tools.uniq_id(main.get('name', 'anonymous'), prefix='partner1_')",
        'name': " 'ERPOPEM'",
    }
})

oo_in_partner = etl.component.input.openobject_in(ooconnector,
                                                  'res.partner',
                                                  fields=['id', 'name']
                                                  # {'id':'id','name':'name'}
                                                  )

tran = etl.transition(oo_in_partner, map)
tran = etl.transition(map, facebook_out_friends)
job1 = etl.job([facebook_out_friends])
job1.run()
    "main": {
        "user_id": "tools.uniq_id(main.get('login', 'anonymous'), prefix='user_')",
        "user_name": "main.get('name', 'anonymous')",
        "login": "******",
        "context_lang": "main.get('context_lang','en_US')",
        "groups_id:id": "main.get('groups_id:id','False')",
    }
}
map = etl.component.transform.map(map_keys)


sqlconnector_partner = etl.connector.sql_connector("localhost", 5432, "etl", "qdp", "qdp")

sql_in1 = etl.component.transform.sql_join(
    sqlconnector_partner, "select res_id from ir_model_data where name = '%s'", "user_id", outputkey="unique_res_id"
)


# definition of the transitions
tran0 = etl.transition(csv_in_groups, oo_out_groups)
tran1 = etl.transition(csv_in_users, map)
tran2 = etl.transition(map, oo_out_users)
tran3 = etl.transition(oo_out_users, sql_in1)
tran4 = etl.transition(sql_in1, log1)


job1 = etl.job([oo_out_groups, oo_out_users, log1])
# print job1
job1.run()
# print job1.get_statitic_info()
Beispiel #32
0
}, {
    'id': 3,
    'name': 'Henry',
    'country_id': 1
}])
input_cty = etl.component.input.data([{
    'id': 1,
    'name': 'Belgium'
}, {
    'id': 3,
    'name': 'France'
}])
map_keys = {
    'main': {
        'id': "main['id']",
        'name': "main['name'].upper()",
        'country': "country[main['country_id']]['name']"
    }
}
join_keys = {'country': 'id'}

join = etl.component.transform.join(map_keys, join_keys)
log = etl.component.transform.logger(name='Read Partner File')

tran = etl.transition(input_part, join, channel_destination='main')
tran1 = etl.transition(input_cty, join, channel_destination='country')
tran4 = etl.transition(join, log)

job = etl.job([log])
job.run()
fileconnector_partner=etl.connector.localfile('input/partner.csv')

fileconnector_partner1=etl.connector.localfile('input/partner1.csv')
fileconnector_partner3=etl.connector.localfile('input/partner3.csv')
fileconnector_output=etl.connector.localfile('output/test1_partner11.csv','r+')

csv_in1= etl.component.input.csv_in(fileconnector_partner,name='Partner Data')
csv_in2= etl.component.input.csv_in(fileconnector_partner1,name='Partner Data1')
csv_out1= etl.component.output.csv_out(fileconnector_output,name='Partner OUT Data1')
sort1=etl.component.transform.sort('name')
log1=etl.component.transform.logger(name='Read Partner File')
log2=etl.component.transform.logger(name='After Sort')
sleep1=etl.component.control.sleep()

tran=etl.transition(csv_in1,sort1)
tran1=etl.transition(csv_in2,sort1)
tran4=etl.transition(sort1,sleep1)
tran4=etl.transition(sleep1,log2)
#tran6=etl.etl.transition(sleep1,log1,channel_source="statistics")
tran5=etl.transition(sort1,csv_out1)


job1=etl.job([csv_in1,csv_in2,csv_out1,sort1,log1,log2,sleep1], name="job_test")



import pickle
class etl_server(threading.Thread):
    path = 'pickle.txt'
    job = False
import etl

from etl.component import component
from etl.connector import connector

#facebook_conn=etl.connector.facebook_connector('http://facebook.com', '*****@*****.**')
ooconnector = etl.connector.openobject_connector('http://mra.tinyerp.co.in:8069', 'test', 'admin', 'admin', con_type='xmlrpc')


facebook_conn=etl.connector.facebook_connector('http://facebook.com', '*****@*****.**')

facebook_out_friends= etl.component.output.facebook_out(facebook_conn,'set_events',fields=['name'])

map = etl.component.transform.map({'main':{
    'id': "tools.uniq_id(main.get('name', 'anonymous'), prefix='partner1_')",
    'name': " 'ERPOPEM'",
}})

oo_in_partner= etl.component.input.openobject_in(
     ooconnector,
     'res.partner',
      fields = ['id','name']
    # {'id':'id','name':'name'}
            )

tran=etl.transition(oo_in_partner, map)
tran=etl.transition(map, facebook_out_friends)
job1=etl.job([facebook_out_friends])
job1.run()

Beispiel #35
0
#    but WITHOUT ANY WARRANTY; without even the implied warranty of
#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#    GNU Affero General Public License for more details.
#
#    You should have received a copy of the GNU Affero General Public License
#    along with this program.  If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import etl

xmlrpc_conn = etl.connector.xmlrpc_connector('localhost', 5000)

xmlrpc_out = etl.component.output.xmlrpc_out_block(xmlrpc_conn)
filevcard = etl.connector.localfile('input/contacts.vcf')
vcard_in1 = etl.component.input.vcard_in(filevcard)
map = etl.component.transform.map({
    'main': {
        'org': "main.get('org',['anonymous'])",
        'fn': "main.get('fn','anonymous')",
        'email': "main.get('email','')"
    }
})
log2 = etl.component.transform.logger(name='File')

tran = etl.transition(vcard_in1, map)
tran = etl.transition(map, xmlrpc_out)
tran = etl.transition(xmlrpc_out, log2)
job1 = etl.job([vcard_in1, map, xmlrpc_out, log2])

job1.run()
    'main': {
        'id': "tools.uniq_id(main.get('name', 'anonymous'), prefix='event_')",
        'name': "main.get('name','anonymous')",
        #    'date_begin':"main.get(datetime.datetime.fromtimestamp(time.mktime(time.strptime('date_begin','%Y-%m-%dT%H:%M:%S.000Z'))).strftime('%Y-%m-%d %H:%M:%S')) ",
        #    'date_end':"main.get(datetime.datetime.fromtimestamp(time.mktime(time.strptime('date_end','%Y-%m-%dT%H:%M:%S.000Z'))).strftime('%Y-%m-%d %H:%M:%S'))",
        'date_begin': "main.get('date_begin') ",
        'date_end': "main.get('date_end')",
        'product_id': "main.get('product_id', 'Advance Product')",
    }
})

ooconnector = etl.connector.openobject_connector('http://localhost:8069',
                                                 'trunk_mra',
                                                 'admin',
                                                 'admin',
                                                 con_type='xmlrpc')

oo_out_event = etl.component.output.openobject_out(
    ooconnector, 'event.event', {
        'id': 'id',
        'name': 'name',
        'date_begin': 'date_begin',
        'date_end': 'date_end',
        'product_id': 'product_id'
    })

tran = etl.transition(gcalendar_in_events, map)
tran = etl.transition(map, oo_out_event)
job1 = etl.job([gcalendar_in_events, oo_out_event])
job1.run()
import etl

in1 = etl.operator.etl_csv_input('data/partner.csv', is_start=True)
in2 = etl.operator.etl_csv_input('data/partner2.csv', is_start=True)
diff1 = etl.operator.etl_operator_diff(['id'])

log_1 = etl.operator.etl_operator_log_bloc(name="Original Data")
log_2 = etl.operator.etl_operator_log_bloc(name="Modified Data")

log1 = etl.operator.etl_operator_log(name="Log Same")
log2 = etl.operator.etl_operator_log(name="Log Add")
log3 = etl.operator.etl_operator_log(name="Log Remove")
log4 = etl.operator.etl_operator_log(name="Log Update")

csv1 = etl.operator.etl_csv_output('intermediate/add.csv')

etl.transition(in1, log_1)
etl.transition(in2, log_2)

etl.transition(in1, diff1, 'original')
etl.transition(in2, diff1, 'modified')

etl.transition(diff1, log1, channel_source="same")
etl.transition(diff1, log3, channel_source="remove")
etl.transition(diff1, log2, channel_source="add")
etl.transition(diff1, csv1, channel_source="add")
etl.transition(diff1, log4, channel_source="update")

job = etl.job([in1, in2, diff1])
job.run()
Beispiel #38
0
#    This program is distributed in the hope that it will be useful,
#    but WITHOUT ANY WARRANTY; without even the implied warranty of
#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#    GNU Affero General Public License for more details.
#
#    You should have received a copy of the GNU Affero General Public License
#    along with this program.  If not, see <http://www.gnu.org/licenses/>.     
#
##############################################################################
import sys
sys.path.append('..')

import etl
from etl import transformer

fileconnector=etl.connector.localfile('input/invoice.csv')
trans=transformer(
    {
        'id':transformer.LONG,
        'name':transformer.STRING,
        'invoice_date':transformer.DATE,
        'invoice_amount':transformer.FLOAT,
        'is_paid':transformer.BOOLEAN
    }
)
csv_in1= etl.component.input.csv_in(fileconnector=fileconnector,transformer=trans)
log1=etl.component.transform.logger(name='Read Invoice File')
tran=etl.transition(csv_in1,log1)
job1=etl.job([csv_in1,log1])
job1.run()
        'user_id':
        "tools.uniq_id(main.get('login', 'anonymous'), prefix='user_')",
        'user_name': "main.get('name', 'anonymous')",
        'login': "******",
        'context_lang': "main.get('context_lang','en_US')",
        'groups_id:id': "main.get('groups_id:id','False')",
    }
}
map = etl.component.transform.map(map_keys)

sqlconnector_partner = etl.connector.sql_connector('localhost', 5432, 'etl',
                                                   'qdp', 'qdp')

sql_in1 = etl.component.transform.sql_join(
    sqlconnector_partner,
    "select res_id from ir_model_data where name = '%s'",
    'user_id',
    outputkey='unique_res_id')

#definition of the transitions
tran0 = etl.transition(csv_in_groups, oo_out_groups)
tran1 = etl.transition(csv_in_users, map)
tran2 = etl.transition(map, oo_out_users)
tran3 = etl.transition(oo_out_users, sql_in1)
tran4 = etl.transition(sql_in1, log1)

job1 = etl.job([oo_out_groups, oo_out_users, log1])
#print job1
job1.run()
#print job1.get_statitic_info()
Beispiel #40
0
in1 = etl.component.input.csv_in(fileconnector_partner, name='Partner Data')
in2 = etl.component.input.csv_in(fileconnector_partner2, name='Partner Data2')
diff1 = etl.component.transform.diff(['id'])

log_1 = etl.component.transform.logger_bloc(name="Original Data")
log_2 = etl.component.transform.logger_bloc(name="Modified Data")

log1 = etl.component.transform.logger(name="Log Same")
log2 = etl.component.transform.logger(name="Log Add")
log3 = etl.component.transform.logger(name="Log Remove")
log4 = etl.component.transform.logger(name="Log Update")

fileconnector_output = etl.connector.localfile('output/subjob2_add.csv', 'w+')
csv_out1 = etl.component.output.csv_out(fileconnector_output, name='Output')

etl.transition(in1, log_1)
etl.transition(in2, log_2)

etl.transition(in1, diff1, channel_destination='original')
etl.transition(in2, diff1, channel_destination='modified')

job1 = etl.job([in1, in2, log_1, log_2, diff1])

subjob = etl.component.transform.subjob(job1)

etl.transition(subjob, log1, channel_source="same")
etl.transition(subjob, log3, channel_source="remove")
etl.transition(subjob, log2, channel_source="add")
etl.transition(subjob, csv_out1, channel_source="add")
etl.transition(subjob, log4, channel_source="update")
#
#    This program is distributed in the hope that it will be useful,
#    but WITHOUT ANY WARRANTY; without even the implied warranty of
#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#    GNU Affero General Public License for more details.
#
#    You should have received a copy of the GNU Affero General Public License
#    along with this program.  If not, see <http://www.gnu.org/licenses/>.     
#
##############################################################################
import etl

xmlrpc_conn=etl.connector.xmlrpc_connector('localhost',5000)

xmlrpc_out = etl.component.output.xmlrpc_out_block(xmlrpc_conn)
filevcard = etl.connector.localfile('input/contacts.vcf')
vcard_in1 = etl.component.input.vcard_in(filevcard)
map = etl.component.transform.map({'main':{    
    'org': "main.get('org',['anonymous'])",
    'fn': "main.get('fn','anonymous')",
    'email': "main.get('email','')"
}})
log2=etl.component.transform.logger(name='File')

tran=etl.transition(vcard_in1,map)
tran=etl.transition(map,xmlrpc_out)
tran=etl.transition(xmlrpc_out,log2)
job1=etl.job([vcard_in1,map,xmlrpc_out,log2])

job1.run()
Beispiel #42
0
#! /usr/bin/python

import etl

in1 = etl.operator.etl_csv_input('data/partner.csv', is_start=True)
out1 = etl.operator.etl_csv_output('output/partner.csv')
sort1 = etl.operator.etl_operator_sort('name')
log1 = etl.operator.etl_operator_log(name='PartnerLogger')
log2 = etl.operator.etl_operator_log(name='OuputLogger')

etl.transition(in1, log1)
etl.transition(log1, sort1)
etl.transition(sort1, out1)
etl.transition(out1, log2)

job = etl.job([in1,out1])
job.run()

#import sys
import etl


ooconnector = etl.connector.openobject_connector('http://mra.tinyerp.co.in:8069', 'etl', 'admin', 'admin', con_type='xmlrpc')
oo_out_partner= etl.component.output.openobject_out(
     ooconnector,
     'res.partner',
     {'id':'id','name':'name'}
            )

xmlrpc_conn=etl.connector.xmlrpc_connector('localhost',5000)

xmlrpc_in= etl.component.input.xmlrpc_in(xmlrpc_conn)
log2=etl.component.transform.logger(name='Partner File')
tran=etl.transition(log2,xmlrpc_in)
tran=etl.transition(xmlrpc_in,oo_out_partner)
#tran=etl.transition(xmlrpc_in,)
job2=etl.job([oo_out_partner])



fileconnector_partner=etl.connector.localfile('input/partner.csv')
csv_in1= etl.component.input.csv_in(fileconnector_partner,name='Partner Data')
log1=etl.component.transform.logger(name='Read Partner File')
xmlrpc_out = etl.component.output.xmlrpc_out(xmlrpc_conn)

tran=etl.transition(csv_in1,log1)
tran=etl.transition(log1,xmlrpc_out)

job1=etl.job([xmlrpc_out])
Beispiel #44
0
#! /usr/bin/python

import etl

in1 = etl.operator.etl_csv_input('data/partner.csv', is_start=True)
out1 = etl.operator.etl_csv_output('output/partner.csv')
sort1 = etl.operator.etl_operator_sort('name')
log1 = etl.operator.etl_operator_log(name='PartnerLogger')
etl.transition(in1, sort1)
etl.transition(sort1, out1)
etl.transition(sort1, log1)

job = etl.job([in1, out1])
job.run()
Beispiel #45
0
#
#    You should have received a copy of the GNU Affero General Public License
#    along with this program.  If not, see <http://www.gnu.org/licenses/>.     
#
##############################################################################
import etl

ooconnector = etl.connector.openobject_connector('http://localhost:8069', 'trunk', 'admin', 'a', con_type='xmlrpc')
map = etl.component.transform.map({'main':{
    'id': "tools.uniq_id(main.get('org', 'anonymous'), prefix='partner_')",
    'address_id': "tools.uniq_id(main.get('fn', 'anonymous'), prefix='contact_')",
    'name': "main.get('org',['anonymous'])[0]",
    'contact_name': "main.get('fn','anonymous')",
    'email': "main.get('email','').upper()"
}})

oo_out= etl.component.output.openobject_out_create(
     ooconnector,
     'res.partner',
     {'name':'name'}
)



tran=etl.transition(map,oo_out)
job1=etl.job([map,oo_out])
xmlrpc_conn=etl.connector.xmlrpc_connector('localhost',5000)
xmlrpc_in= etl.component.input.xmlrpc_in_block(xmlrpc_conn, job1)
job2=etl.job([xmlrpc_in])
job2.run()
Beispiel #46
0
openobject_in1= etl.component.input.openobject_in(
                 openobject_partner,'res.partner.address',
                 fields=['partner_id','title', 'name', 'street', 'street2' , 'phone' , 'city' ,  'zip' ,'state_id' , 'country_id' , 'mobile', 'birthdate'],
                 transformer=transformer)
map_criteria=[
        {'name':'country_id','map':"%(country_id)s and %(country_id)s[1].upper() or ''",'destination':'Country Name'},
        {'name':'state_id','map':"%(state_id)s and %(state_id)s[1].upper() or ''",'destination':'State Name'},
        {'name':'partner_id','map':"%(partner_id)s and %(partner_id)s[1] or ''",'destination':'Partner'},
        {'name':"name",'destination':'Address Name'}
        ]

data_map_component=etl.component.transform.map(map_criteria,transformer=transformer)

filter_criteria=[
        {'name':'Partner','filter':'"%(Partner)s".lower() or ""','operator':'==','operand':"'leclerc'",'condition':'or'},     
        {'name':'Address Name','operator':'==','operand':"'Fabien Pinckaers'"}
        ]

data_filter_component=etl.component.transform.data_filter(filter_criteria,transformer=transformer)

log1=etl.component.transform.logger(name='Read Partner')

tran1=etl.transition(openobject_in1,data_map_component)
tran2=etl.transition(data_map_component,data_filter_component)
tran3=etl.transition(data_filter_component,log1)

job1=etl.job([log1])
job1.run()

Beispiel #47
0
import etl

input_part = etl.component.input.data([
    {'id': 1, 'name': 'Fabien', 'country_id': 3},
    {'id': 2, 'name': 'Luc', 'country_id': 3},
    {'id': 3, 'name': 'Henry', 'country_id': 1}
])
input_cty = etl.component.input.data([
    {'id': 1, 'name': 'Belgium'},
    {'id': 3, 'name': 'France'}
])
map_keys = {'main': {
    'id': "main['id']",
    'name': "main['name'].upper()",
    'country': "country[main['country_id']]['name']"
}}
join_keys = {
         'country': 'id'
    }    

join=etl.component.transform.join(map_keys,join_keys)
log=etl.component.transform.logger(name='Read Partner File')

tran=etl.transition(input_part,join, channel_destination='main')
tran1=etl.transition(input_cty,join, channel_destination='country')
tran4=etl.transition(join,log)

job=etl.job([log])
job.run()

#log1=etl.component.transform.logger(name='After write')
#log=etl.component.transform.logger(name='After map')

map = etl.component.transform.map({'main':{
    'id': "tools.uniq_id(main.get('name', 'anonymous'), prefix='event_')",
    'name': "main.get('name','anonymous')",
#    'date_begin':"main.get(datetime.datetime.fromtimestamp(time.mktime(time.strptime('date_begin','%Y-%m-%dT%H:%M:%S.000Z'))).strftime('%Y-%m-%d %H:%M:%S')) ",
#    'date_end':"main.get(datetime.datetime.fromtimestamp(time.mktime(time.strptime('date_end','%Y-%m-%dT%H:%M:%S.000Z'))).strftime('%Y-%m-%d %H:%M:%S'))",

    'date_begin':"main.get('date_begin') ",
    'date_end':"main.get('date_end')",


    'product_id':"main.get('product_id', 'Advance Product')",
}})


ooconnector = etl.connector.openobject_connector('http://localhost:8069', 'trunk_mra', 'admin', 'admin', con_type='xmlrpc')

oo_out_event= etl.component.output.openobject_out(
     ooconnector,
     'event.event',
     { 'id':'id', 'name':'name', 'date_begin':'date_begin', 'date_end':'date_end','product_id':'product_id'
        })

tran=etl.transition(gcalendar_in_events, map)
tran=etl.transition(map, oo_out_event)
job1=etl.job([gcalendar_in_events,oo_out_event])
job1.run()
#    
#    OpenERP, Open Source Management Solution
#    Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
#    This program is free software: you can redistribute it and/or modify
#    it under the terms of the GNU Affero General Public License as
#    published by the Free Software Foundation, either version 3 of the
#    License, or (at your option) any later version.
#
#    This program is distributed in the hope that it will be useful,
#    but WITHOUT ANY WARRANTY; without even the implied warranty of
#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#    GNU Affero General Public License for more details.
#
#    You should have received a copy of the GNU Affero General Public License
#    along with this program.  If not, see <http://www.gnu.org/licenses/>.     
#
##############################################################################
import sys
sys.path.append('..')

import etl

fileconnector_partner=etl.connector.localfile('input/input.vcf')
vcard_in1= etl.component.input.vcard_in(fileconnector_partner)
log1=etl.component.transform.logger(name='Read Vcard File')

tran=etl.transition(vcard_in1,log1)
job1=etl.job([log1])
job1.run()
Beispiel #50
0
#
#    You should have received a copy of the GNU Affero General Public License
#    along with this program.  If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import threading
import sys
sys.path.append('..')

import etl

gdoc_connector = etl.connector.gdoc_connector(user, password)  # gmail
gdoc_in1 = etl.component.input.gdoc_in(gdoc_connector,
                                       file_path='/home/tiny/Desktop/')

#fileconnector_partner=etl.connector.localfile('/home/tiny/Desktop/partner1.csv')

fileconnector_output = etl.connector.localfile('output/gdoc.csv', 'r+')

#csv_in1= etl.component.input.csv_in(fileconnector_partner,name='Partner Data')
csv_out1 = etl.component.output.csv_out(fileconnector_output,
                                        name='Partner OUT Data1')
log1 = etl.component.transform.logger(name='Read Partner File')

tran = etl.transition(gdoc_in1, log1)
#tran=etl.transition(log1, csv_in1)
tran1 = etl.transition(log1, csv_out1)

job1 = etl.job([gdoc_in1, csv_out1], name="dd")

job1.run()
# -*- encoding: utf-8 -*-
##############################################################################
#    
#    OpenERP, Open Source Management Solution
#    Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
#    This program is free software: you can redistribute it and/or modify
#    it under the terms of the GNU Affero General Public License as
#    published by the Free Software Foundation, either version 3 of the
#    License, or (at your option) any later version.
#
#    This program is distributed in the hope that it will be useful,
#    but WITHOUT ANY WARRANTY; without even the implied warranty of
#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#    GNU Affero General Public License for more details.
#
#    You should have received a copy of the GNU Affero General Public License
#    along with this program.  If not, see <http://www.gnu.org/licenses/>.     
#
##############################################################################
import etl

sort1=etl.component.transform.sort('name')
log1=etl.component.transform.logger(name='After Sort')
tran1=etl.transition(sort1, log1)
job1=etl.job([sort1,log1])
xmlrpc_conn=etl.connector.xmlrpc_connector('localhost',5000)
xmlrpc_in= etl.component.input.xmlrpc_in(xmlrpc_conn, job1)
job2=etl.job([xmlrpc_in])
job2.run()
Beispiel #52
0
#    it under the terms of the GNU Affero General Public License as
#    published by the Free Software Foundation, either version 3 of the
#    License, or (at your option) any later version.
#
#    This program is distributed in the hope that it will be useful,
#    but WITHOUT ANY WARRANTY; without even the implied warranty of
#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#    GNU Affero General Public License for more details.
#
#    You should have received a copy of the GNU Affero General Public License
#    along with this program.  If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import sys
sys.path.append('..')

import etl

sqlconnector_partner = etl.connector.sql_connector('localhost', 5432, 'trunk',
                                                   'fp', 'fp')

sql_in1 = etl.component.input.sql_in(
    sqlconnector_partner, 'select * from res_partner where id<=10 order by id')

log1 = etl.component.transform.logger(name='Read Partner')

tran = etl.transition(sql_in1, log1)

job1 = etl.job([log1])
job1.run()
Beispiel #53
0
in1 = etl.component.input.csv_in(fileconnector_partner,name='Partner Data')
in2 = etl.component.input.csv_in(fileconnector_partner2,name='Partner Data2')
diff1 = etl.component.transform.diff(['id'])

log_1 = etl.component.transform.logger_bloc(name="Original Data")
log_2 = etl.component.transform.logger_bloc(name="Modified Data")

log1 = etl.component.transform.logger(name="Log Same")
log2 = etl.component.transform.logger(name="Log Add")
log3 = etl.component.transform.logger(name="Log Remove")
log4 = etl.component.transform.logger(name="Log Update")

fileconnector_output=etl.connector.localfile('output/test2_add.csv', 'w+')
csv_out1 = etl.component.output.csv_out(fileconnector_output,name='Output')

etl.transition(in1, log_1)
etl.transition(in2, log_2)

etl.transition(in1, diff1, channel_destination='original')
etl.transition(in2, diff1, channel_destination='modified')

etl.transition(diff1, log1, channel_source="same")
etl.transition(diff1, log3, channel_source="remove")
etl.transition(diff1, log2, channel_source="add")
etl.transition(diff1, csv_out1, channel_source="add")
etl.transition(diff1, log4, channel_source="update")

job = etl.job([in1,in2,log_1,log_2,diff1,log1,log2,log3,log4,csv_out1])
job.run()
         'name':{'type':'unicode','size':'20','Is_NULL':False},
         'tel':{'type':'unicode','Is_NULL':False},
         
      }
schema_valid=etl.component.transform.schema_validator(schema)
log1=etl.component.transform.logger(name='invalid_field')
log2=etl.component.transform.logger(name='invalid_name')
log3=etl.component.transform.logger(name='invalid_type')
log4=etl.component.transform.logger(name='invalid_key')
log5=etl.component.transform.logger(name='invalid_null')
log6=etl.component.transform.logger(name='invalid_size')
log7=etl.component.transform.logger(name='invalid_format')
log8=etl.component.transform.logger(name='main')


tran=etl.transition(csv_in1,schema_valid)
tran1=etl.transition(schema_valid,log1,channel_source='invalid_field')
tran2=etl.transition(schema_valid,log2,channel_source='invalid_name')
tran3=etl.transition(schema_valid,log3,channel_source='invalid_type')
tran4=etl.transition(schema_valid,log4,channel_source='invalid_key')
tran5=etl.transition(schema_valid,log5,channel_source='invalid_null')
tran6=etl.transition(schema_valid,log6,channel_source='invalid_size')
tran7=etl.transition(schema_valid,log7,channel_source='invalid_format')
tran8=etl.transition(schema_valid,log8,channel_source='main')


job1=etl.job([log1,log2,log3,log4,log5,log6,log7,log8])


job1.run()