def cron_pipedrive_activity_type_exec(self): _logger.info('cron_pipedrive_activity_type_exec') # params pipedrive_domain = str( self.env['ir.config_parameter'].sudo().get_param( 'pipedrive_domain')) pipedrive_api_token = str( self.env['ir.config_parameter'].sudo().get_param( 'pipedrive_api_token')) # api client client = Client(domain=pipedrive_domain) client.set_api_token(pipedrive_api_token) # get_info response = client.get(client.BASE_URL + 'activityTypes') if 'success' in response: if response['success']: for data_item in response['data']: self.action_item(data_item)
choices = [ 'above 2k', 'between 1k and 2k', 'between 500 and 1000', 'below 500' ] table['Group'] = np.select(conditions, choices, default='null') #Here I show some examples of some Pipedrive library that I use sometimes from pipedrive.client import Client #Here you can get data from your filters, probably you will need to paginate because this get only 100 deals so use the start to decide where you want to begin filtered_deals = client.get_deals(filter_id=id_number, start=pagination) #Here I show some examples of the Close.io api from closeio_api import Client api = Client('api_key') data_lead = api.get( 'lead', params={ 'query': 'your query(you can test it at close io) or you can call a smartview' }) data_opportunity = api.get( 'opportunity', params={ 'lead_query': 'your query(you can test it at close io) or you can call a smartview' }) #Crawler #Remember to use a webdriver from selenium import webdriver from selenium.webdriver.common.keys import Keys #You can use it to several things in several ways I'll just leave a simple example here #Here I access a page with my loggin and password and get some information by the xpath element