def __init__(self, config, plugin_config): Connector.__init__(self, config, plugin_config) FreshdeskConnector.__init__(self, config, plugin_config) view = config.get("view", '') if view == '': view = 'all_tickets' self.path = '/helpdesk/tickets/filter/' + view + '?format=json&wf_order=created_at&page='
def __init__(self, config): Connector.__init__(self, config) # pass the parameters to the base class self.credentials = self.config.get("credentials") self.doc_id = self.config.get("doc_id") self.tab_id = self.config.get("tab_id") self.result_format = self.config.get("result_format") self.list_unique_slugs = [] file = self.credentials.splitlines()[0] if os.path.isfile(file): try: with open(file, 'r') as f: self.credentials = json.load(f) f.close() except Exception as e: raise ValueError("Unable to read the JSON Service Account from file '%s'.\n%s" % (file, e)) else: try: self.credentials = json.loads(self.credentials) except Exception as e: raise Exception("Unable to read the JSON Service Account.\n%s" % e) scope = [ 'https://www.googleapis.com/auth/spreadsheets', #'https://www.googleapis.com/auth/drive' ] credentials = ServiceAccountCredentials.from_json_keyfile_dict(self.credentials, scope) self.gspread_client = gspread.authorize(credentials)
def __init__(self, config): Connector.__init__(self, config) self.client = SalesforceClient(self.config) self.REPORT = self.config.get("report", "") self.RESULT_FORMAT = self.config.get("result_format")
def __init__(self, config, plugin_config): logger.info("SplunkIndexConnector:init") Connector.__init__(self, config, plugin_config) try: self.splunk_instance = config.get('splunk_login')['splunk_instance'] self.parse_url() self.splunk_username = config.get('splunk_login')['splunk_username'] self.splunk_password = config.get('splunk_login')['splunk_password'] except Exception as err: raise Exception("The Splunk instance URL or login details are not filled in. ({})".format(err)) self.splunk_app = config.get('splunk_app') self.index_name = config.get('index_name') self.search_string = config.get('search_string') self.earliest_time = config.get('earliest_time') self.latest_time = config.get('latest_time') if len(self.earliest_time) == 0: self.earliest_time = None if len(self.latest_time) == 0: self.latest_time = None logger.info('init:splunk_instance={}, index_name={}, search_string="{}", earliest_time={}, latest_time={}'.format( self.splunk_instance, self.index_name, self.search_string, self.earliest_time, self.latest_time )) args = { "host": self.splunk_host, "port": self.splunk_port, "username": self.splunk_username, "password": self.splunk_password } if not self.splunk_app == "": args["app"] = self.splunk_app self.client = connect(**args) logger.info("SplunkIndexConnector:Connected to Splunk")
def __init__(self, config): Connector.__init__(self, config) self.api_key = str(self.config.get("api_key")) self.latitude = str(self.config.get("latitude")) self.longitude = str(self.config.get("longitude")) self.from_date = str(self.config.get("from_date")) self.to_date = str(self.config.get("to_date")) self.cache_folder = str(self.config.get("cache_folder")) self.api_limit = int(self.config.get("api_limit", -1)) # Cache file path filename = "cache-forecastio-history-%s.json" % base64.urlsafe_b64encode( str(self.latitude) + '-' + str(self.longitude)) self.cache_file = os.path.join(self.cache_folder, filename) # Cache directory if not os.path.isdir(self.cache_folder): os.makedirs(self.cache_folder) # Create cache file if does not exist if not os.path.exists(self.cache_file): with open(self.cache_file, 'w') as f: json.dump({}, f) f.close() # The API returns the number of call made for today. We keep it to optionnaly limit the number of calls. # For te first call, we don't know the actual value but we assume it is 0. self.api_calls = 0
def __init__(self, config): Connector.__init__(self, config) self.api_key = str(self.config.get("api_key")) self.latitude = str(self.config.get("latitude")) self.longitude = str(self.config.get("longitude")) self.from_date = str(self.config.get("from_date")) self.to_date = str(self.config.get("to_date")) self.cache_folder = str(self.config.get("cache_folder")) self.api_limit = int(self.config.get("api_limit", -1)) # Cache file path filename = "cache-forecastio-history-%s.json" % base64.urlsafe_b64encode(str(self.latitude) + '-' + str(self.longitude)) self.cache_file = os.path.join(self.cache_folder, filename) # Cache directory if not os.path.isdir(self.cache_folder): os.makedirs(self.cache_folder) # Create cache file if does not exist if not os.path.exists(self.cache_file): with open(self.cache_file, 'w') as f: json.dump({}, f) f.close() # The API returns the number of call made for today. We keep it to optionnaly limit the number of calls. # For te first call, we don't know the actual value but we assume it is 0. self.api_calls = 0
def __init__(self, config): Connector.__init__(self, config) self.api_key = str(self.config.get("api_key")) self.location = str(self.config.get("location")) self.from_date = str(self.config.get("from_date")) self.to_date = str(self.config.get("to_date")) self.calls_per_minute = int(self.config.get("calls_per_minute")) self.api_limit = int(self.config.get("api_limit", 500)) self.cache_folder = str( self.config.get("cache_folder", "/tmp/dss-plugin-wunderground")) # Cache file path filename = "cache-wunderground-history-%s.json" % base64.urlsafe_b64encode( self.location) self.cache_file = os.path.join(self.cache_folder, filename) print "Wunderground plugin - Cache file: %s" % self.cache_file # Cache directory if not os.path.isdir(self.cache_folder): os.makedirs(self.cache_folder) # Create cache file if does not exist if not os.path.exists(self.cache_file): with open(self.cache_file, 'w') as f: json.dump({}, f) f.close() # Limit on the number of API calls. At each sample refresh, set to 0 again! self.api_calls = 0
def __init__(self, config, plugin_config): logging.basicConfig(level=logging.INFO, format='dss-plugin-looker-query %(levelname)s - %(message)s') self.logger = logging.getLogger() Connector.__init__(self, config, plugin_config) # pass the parameters to the base class self.base_url = self.plugin_config["Looker API"]["baseurl"] self.client_id = self.plugin_config["Looker API"]["clientid"] self.client_secret = self.plugin_config["Looker API"]["clientsecret"] self.look_id = int(self.config["lookid"]) #self.limit = int(self.config["limit"]) print(self.plugin_config) if not (self.base_url and self.client_id and self.client_secret): self.logger.error('Connection params: {}'.format( {'Client ID:' : self.client_id, 'Client secret:' : '#' * len(self.client_secret), 'Base URL:' : self.base_url}) ) raise ValueError("Client ID, Client secret and Base URL must be filled") if not self.look_id: raise ValueError("Look ID was not set") #if self.limit is None or self.limit = 0: # self.limit = 500 file = open("looker.ini", "w") line_list = ["[Looker]", "api_version=3.1", "base_url=" + self.base_url, "client_id=" + self.client_id, "client_secret=" + self.client_secret, "verify_ssl=True"] file.write('\n'.join(line_list) + '\n') file.close() self.looker_client = client.setup("looker.ini") print(self.looker_client)
def __init__(self, config, plugin_config): Connector.__init__(self, config, plugin_config) warp10_connection = self.config.get('warp10_connection', None) if not warp10_connection: raise ValueError('No Warp10 connection defined') self.warp10_client = Warp10Client(warp10_connection) self.fetch_mode = self.config.get('fetch_mode', None) if not self.fetch_mode: raise ValueError('No fetch mode selected') self.start = self.config.get('start', None) self.stop = self.config.get('stop', None) self.now = self.config.get('now', None) self.timespan = self.config.get('timespan', None) self.fetch_mode_parameters = FetchModeParameters() if self.fetch_mode == 'interval': if not (self.start and self.stop): raise ValueError( 'Start or stop timestamp not defined for interval fetch mode' ) self.fetch_mode_parameters.as_interval(self.start, self.stop) elif self.fetch_mode == 'timespan': self.fetch_mode_parameters.as_timespan(self.now, self.timespan) else: raise ValueError('Unknown fetch mode: ' + self.fetch_mode) self.selector = self.config.get('selector', None)
def __init__(self, config): Connector.__init__(self, config) FreshdeskConnector.__init__(self, config) self.view = config["view"] self.path = '/helpdesk/tickets/filter/' + str( self.view) + '?format=json&wf_order=created_at&page=' print self.path
def __init__(self, config): Connector.__init__(self, config) self.client = SalesforceClient(self.config) self.OBJECT = self.config.get("object", "") self.LIMIT = self.config.get("limit", "") self.RESULT_FORMAT = self.config.get("result_format")
def __init__(self, config, plugin_config): Connector.__init__( self, config, plugin_config) # pass the parameters to the base class # perform some more initialization self.P_CENSUS_CONTENT = self.config.get("param_census_content")
def __init__(self, config, plugin_config): Connector.__init__(self, config, plugin_config) self.api_key = self.config.get("api_key", {}).get("api_key", "") self.object = self.config.get("object") self.custom_object = self.config.get("custom_object") self.result_format = self.config.get("result_format", "readable") if self.object == "other" and self.custom_object: self.object = self.custom_object
def __init__(self, config): Connector.__init__(self, config) # pass the parameters to the base class self.credentials = json.loads(self.config.get("credentials")) self.doc_id = self.config.get("doc_id") self.tab_id = self.config.get("tab_id") self.result_format = self.config.get("result_format") self.list_unique_slugs = []
def __init__(self, config): Connector.__init__(self, config) self.CONFIG_API = { "API_BASE_URL": "https://api.pipedrive.com/v1/", "API_KEY": self.config.get("api_key"), "PAGINATION": 200, } self.RESULT_FORMAT = self.config.get("result_format")
def __init__(self, config, plugin_config): Connector.__init__(self, config, plugin_config) # pass the parameters to the base class # Form data self.api_key = self.config.get("api_key", None) self.dataset_id = self.config.get("dataset_id", None) self.api = DrdvApi(self.api_key)
def __init__(self, config): Connector.__init__(self, config) self.CONFIG_API = { 'API_BASE_URL': 'https://api.pipedrive.com/v1/', 'API_KEY': self.config.get("api_key"), 'PAGINATION': 200 } self.RESULT_FORMAT = self.config.get("result_format")
def __init__(self, config, plugin_config): Connector.__init__(self, config, plugin_config) access_token = get_token_from_config(config) self.client = GoogleCalendarClient(access_token) self.from_date = self.config.get("from_date", None) self.to_date = self.config.get("to_date", None) assert_no_temporal_paradox(self.from_date, self.to_date) self.calendar_id = self.config.get("calendar_id", constants.DEFAULT_CALENDAR_ID) self.raw_results = self.config.get("raw_results", False)
def __init__(self, config): Connector.__init__(self, config) # pass the parameters to the base class # perform some more initialization self.login = self.config.get("login") self.password = self.config.get("password") self.domain = self.config.get("domain") self.from_date = self.config.get("from_date") self.to_date = self.config.get("to_date")
def __init__(self, config, plugin_config): Connector.__init__( self, config, plugin_config) # pass the parameters to the base class logger.info('Init Zendesk plugin') self.client = ZendeskClient(config) self.search_parameters = self.filter_parameters(config) self.zendesk_api = self.config.get("zendesk_api")
def __init__(self, config, plugin_config): Connector.__init__(self, config, plugin_config) # pass the parameters to the base class self.base = self.config.get("base") self.table = self.config.get("table") self.key = self.config.get("key") if self.base is None or self.table is None or self.key is None: raise ValueError("Missing parameters (Base ID, or Table name, or API key")
def __init__(self, config): """ The configuration parameters set up by the user in the settings tab of the dataset are passed as a json object 'config' to the constructor """ Connector.__init__(self, config) # pass the parameters to the base class # Fetch configuration self.mirror = self.config["mirror"] self.book_id = self.config["book_id"]
def __init__(self, config): Connector.__init__(self, config) self.client_customer_ids = self.parse_client_customer_ids(config['client_customer_ids']) self.report_fields = self.parse_report_fields(config.get('report_fields', '')) self.cache_config = {key: config[key] for key in self.CACHE_KEYS} self.credentials = {key: config[key] for key in self.CREDENTIALS_KEYS} self.report_definition = {key: config[key] for key in self.REPORT_KEYS} self.report_definition['report_fields'] = self.report_fields
def __init__(self, config): """ The configuration parameters set up by the user in the settings tab of the dataset are passed as a json object 'config' to the constructor """ Connector.__init__(self, config) # pass the parameters to the base class # Fetch configuration self.mirror = self.config['mirror'] self.book_id = self.config['book_id']
def __init__(self, config, plugin_config): Connector.__init__(self, config, plugin_config) print "==============================================================" print self.config print self.plugin_config self.ACCOUNT_ID = self.config.get("account_id", None) self.ACCESS_TOKEN = self.config.get("access_token", None) self.ACCOUNT_LIST = self.config.get("account_list", None) self.session = requests.Session() self.API = 'https://api.instagram.com/v1' self.IS_LIST = self.ACCOUNT_LIST is not None print self.IS_LIST
def __init__(self, config, plugin_config): """ The configuration parameters set up by the user in the settings tab of the dataset are passed as a json object 'config' to the constructor. The static configuration parameters set up by the developer in the optional file settings.json at the root of the plugin directory are passed as a json object 'plugin_config' to the constructor """ Connector.__init__(self, config, plugin_config) self.odata_list_title = self.config.get("odata_list_title") self.bulk_size = config.get("bulk_size", 10000) self.client = ODataClient(config)
def __init__(self, config): """ The configuration parameters set up by the user in the settings tab of the dataset are passed as a json object 'config' to the constructor """ Connector.__init__(self, config) # pass the parameters to the base class self.cache_folder = self.config.get("cache_folder") self.test_mode = self.config["test_mode"] self.all_years = self.config["all_years"] print 'Running Patent Connector cache=%s test=%s all=%s' % (self.cache_folder, self.test_mode, self.all_years)
def __init__(self, config, plugin_config): Connector.__init__(self, config, plugin_config) self.sharepoint_list_title = self.config.get("sharepoint_list_title") self.auth_type = config.get('auth_type') logger.info('init:sharepoint_list_title={}, auth_type={}'.format(self.sharepoint_list_title, self.auth_type)) self.column_ids = {} self.column_names = {} self.expand_lookup = config.get("expand_lookup", False) self.column_to_expand = {} self.metadata_to_retrieve = config.get("metadata_to_retrieve", []) self.display_metadata = len(self.metadata_to_retrieve) > 0 self.client = SharePointClient(config)
def __init__(self, config): Connector.__init__(self, config) token = salesforce.get_json(self.config.get("token")) try: salesforce.API_BASE_URL = token.get('instance_url') salesforce.ACCESS_TOKEN = token.get('access_token') except Exception as e: raise ValueError( "JSON token must contain access_token and instance_url") self.RESULT_FORMAT = self.config.get("result_format")
def __init__(self, config): Connector.__init__(self, config) if not self.config['api_url'].startswith('https://api.import.io/'): raise Exception('It looks like this URL is not an API URL. URLs to call the API (and get a json response) start with "api.import.io" .') print '[import.io connector] calling API...' response = requests.get(self.config['api_url']) print '[import.io connector] got response' try: self.json = response.json() except Exception as e: print e print 'response was:\n', response.text raise
def __init__(self, config, plugin_config): Connector.__init__( self, config, plugin_config) # pass the parameters to the base class self.baseurl = self.config["baseurl"] self.clientid = self.config["clientid"] self.clientsecret = self.config["clientsecret"] self.lookid = int(self.config["lookid"]) if self.baseurl is None or self.clientid is None or self.clientsecret is None: raise ValueError( "Missing parameters (Base URL, or Client ID, or Client Secret")
def __init__(self, config, plugin_config): Connector.__init__(self, config, plugin_config) # pass the parameters to the base class logger.info("config={}".format(logger.filter_secrets(config))) endpoint_parameters = get_endpoint_parameters(config) credential = config.get("credential", {}) custom_key_values = get_dku_key_values(config.get("custom_key_values", {})) self.client = RestAPIClient(credential, endpoint_parameters, custom_key_values) extraction_key = endpoint_parameters.get("extraction_key", None) if extraction_key == '': extraction_key = None self.extraction_key = extraction_key self.raw_output = endpoint_parameters.get("raw_output", None)
def __init__(self, config, plugin_config): Connector.__init__( self, config, plugin_config) # pass the parameters to the base class logging.info("JiraConnector init") self.access_type = self.config.get("access_type", "token_access") self.endpoint_name = self.config.get("endpoint_name", "") self.item_value = self.config.get("item_value", "") self.data = self.config.get("data", None) self.queue_id = self.config.get("queue_id", None) self.expand = self.config.get("expand", []) connection_details = self.config.get(self.access_type) self.client = JiraClient(connection_details)
def __init__(self, config): """ The configuration parameters set up by the user in the settings tab of the dataset are passed as a json object 'config' to the constructor """ Connector.__init__(self, config) # pass the parameters to the base class self.cache_folder = self.config.get("cache_folder") self.test_mode = self.config["test_mode"] self.all_years = self.config["all_years"] print 'Running Patent Connector cache=%s test=%s all=%s' % ( self.cache_folder, self.test_mode, self.all_years)
def __init__(self, config): """ The configuration parameters set up by the user in the settings tab of the dataset are passed as a json object 'config' to the constructor """ Connector.__init__(self, config) # pass the parameters to the base class # perform some more initialization self.CONFIG_API = { 'API_BASE_URL': 'https://api.pipedrive.com/v1/', 'API_KEY': self.config.get("api_key"), 'PAGINATION': 200 } self.RESULT_FORMAT = self.config.get("result_format")
def __init__(self, config): """ The configuration parameters set up by the user in the settings tab of the dataset are passed as a json object 'config' to the constructor """ Connector.__init__(self, config) # pass the parameters to the base class # perform some more initialization credentials = json.loads(self.config.get("credentials")) self.client_email = credentials["client_email"] self.private_key = credentials["private_key"] self.doc_id = self.config.get("doc_id") self.tab_id = self.config.get("tab_id") self.result_format = self.config.get("result_format") self.list_unique_slugs = []
def __init__(self, config, plugin_config): Connector.__init__( self, config, plugin_config) # pass the parameters to the base class # perform some more initialization self.P_state_list_str = str( self.config.get("param_state_list")) #, "defaultValue") self.P_STATES_TYPE_NAME = self.config.get("param_state_format") self.P_CENSUS_CONTENT = self.config.get("param_census_content") self.P_CENSUS_LEVEL = self.config.get("param_census_level") self.P_census_fields = str(self.config.get("param_fields")) self.P_USE_PREVIOUS_SOURCES = self.config.get( "param_re_use_collected_census_sources") self.P_DELETE_US_CENSUS_SOURCES = self.config.get( "param_delete_census_sources")
def __init__(self, config): """Make the only API call, which downloads the data""" Connector.__init__(self, config) if self.config['api_url'].startswith('https://api.import.io/'): self.api_version = 'api' elif self.config['api_url'].startswith('https://extraction.import.io/'): self.api_version = 'extraction' else: raise Exception( 'It looks like this URL is not an API URL. URLs to call the API (and get a json response) start with "https://api.import.io" .') print '[import.io connector] calling API...' response = requests.get(self.config['api_url']) print '[import.io connector] got response' try: self.json = response.json() except Exception as e: print e print 'response was:\n', response.text raise
def __init__(self, config): Connector.__init__(self, config)
def __init__(self, config): Connector.__init__(self, config) gh = github.Github(config["login"], config["password"]) self.repos = gh.get_repo(config["repos"])
def __init__(self, config): Connector.__init__(self, config) self.endpoint = config["endpoint"] self.key = config["apiKey"]
def __init__(self, config, plugin_config): Connector.__init__(self, config, plugin_config)
def __init__(self, config): Connector.__init__(self, config) self.base_uri = "%s/v2/room/%s/history" %(self.config["api_endpoint"], self.config["room_name"])
def __init__(self, config): Connector.__init__(self, config) FreshdeskConnector.__init__(self, config) self.path = '/helpdesk/tickets/filter/all_tickets?format=json&wf_order=created_at&page='
def __init__(self, config): Connector.__init__(self, config) FreshdeskConnector.__init__(self, config) self.path = '/contacts.json?page='
def __init__(self, config): Connector.__init__(self, config) FreshdeskConnector.__init__(self, config) self.view = config["view"] self.path = '/helpdesk/tickets/filter/' + str(self.view) + '?format=json&wf_order=created_at&page=' print self.path