async def get(self, id: Union[int, str], *, organization_id: Union[str, int], columns=None): url = '{base_url}/{id}?{query}'.format(base_url=self.module_url, id=id, query=urlencode({ 'organization_id': organization_id, **self.base_query })) try: logger.info('GET: {}'.format(url)) response = await self.http_client.fetch(url, method='GET') except HTTPClientError as http_error: http_code = http_error.code response = json_decode(http_error.response.body.decode("utf-8")) message = str(response['code']) + ': ' + response['message'] raise HTTPError(http_code, reason=message) else: response = json_decode(response.body.decode("utf-8")) results = [ v for k, v in response.items() if k not in ['code', 'message'] ] # noqa if len(results) != 1: ValueError('More then one resource was returned.') return results[0]
async def get(self, id: Union[int, str], *, organization_id: Union[str, int], columns=None): url = '{base_url}/{id}?{query}'.format( base_url=self.module_url, id=id, query=urlencode({ 'organization_id': organization_id, **self.base_query})) try: logger.info('GET: {}'.format(url)) response = await self.http_client.fetch(url, method='GET') except HTTPClientError as http_error: http_code = http_error.code response = json_decode(http_error.response.body.decode("utf-8")) message = str(response['code']) + ': ' + response['message'] raise HTTPError(http_code, reason=message) else: response = json_decode(response.body.decode("utf-8")) results = [v for k, v in response.items() if k not in ['code', 'message']] # noqa if len(results) != 1: ValueError('More then one resource was returned.') return results[0]
async def get(self, id: Union[int, str], *, portal, department, columns=None): url = '{}getrecordsbyid'.format(self.module_url(self.name)) query = { 'id': id, 'portal': portal, 'department': department, **self.base_query } if columns: query['selectfields'] = select_columns(self.name, columns) url = url + '?' + urlencode(query) logger.info('GET: {}'.format(url)) response = await self.http_client.fetch(url, method='GET') body = json_decode(response.body.decode("utf-8")) [item] = unwrap_items(body) return item
async def update(self, record: dict, *, trigger_workflows: bool=True): module_map = await self.get_canonical_map() module_name = module_map.canonical_name module_url = self.module_url(module_name) module_key = make_module_id_name(module_map=module_map) record_id = record.pop(module_key) xml_record = wrap_items(record, module_name=module_name) url = '{module_url}/updateRecords'.format(module_url=module_url) body = urlencode({ 'version': 2, 'newFormat': 2, 'wfTrigger': str(trigger_workflows).lower(), 'id': record_id, 'xmlData': xml_record, **self.base_query}) logger.info('POST: {}, BODY: {}'.format(url, body)) response = await self.http_client.fetch(url, method='POST', body=body) body = json_decode(response.body.decode('utf-8')) [item] = unwrap_items(body) return item['Id']
async def filter(self, *, organization_id: Union[str, int], term: Optional[str]=None, columns: Optional[list]=None, offset: int=0, limit: Optional[int]=None): if limit == 0: return [] elif not term and limit and limit <= self.service.MAX_PAGE_SIZE: batch_size = limit else: batch_size = self.service.MAX_PAGE_SIZE paging = True page_index = max(ceil(offset / batch_size), 1) results = [] # Loop until we reach index we need, unless their is a search term. # If search term we need all records. while paging and (term or not limit or len(results) < limit): url = '{base_url}?{query}'.format( base_url=self.module_url, query=urlencode({ 'organization_id': organization_id, 'per_page': batch_size, 'page': page_index, **self.base_query})) try: logger.info('GET: {}'.format(url)) response = await self.http_client.fetch(url, method='GET') except HTTPClientError as http_error: http_code = http_error.code body = http_error.response.body response = json_decode(body.decode("utf-8")) message = str(response['code']) + ': ' + response['message'] raise HTTPError(http_code, reason=message) else: response = json_decode(response.body.decode("utf-8")) results += response[self.name] page_index += 1 paging = response['page_context']['has_more_page'] def fuzzy_score(items): values = [str(v).lower() for v in items.values() if v] target = ' '.join(values) return fuzz.partial_ratio(term, target) if term: results = sorted(results, key=fuzzy_score, reverse=True) results = results[:limit] if columns: return [{k: pl[k] for k in columns if k in columns} for pl in results] else: return results
async def insert(self, record: dict): url = self.module_url(self.name) record = {k: to_zoho_value(v) for k, v in record.items()} body = urlencode({**record, **self.base_query}) logger.info('POST: {}, BODY: {}'.format(url, body)) response = await self.http_client.fetch(url, method='POST', body=body) body = json_decode(response.body.decode('utf-8')) [item] = unwrap_items(body) return item['id']
async def get(self, id: Union[int, str], *, columns=None): url = '{module_url}{id}/?{query}'.format( module_url=self.module_url(self.name), id=id, query=urlencode(self.base_query)) logger.info('GET: {}'.format(url)) response = await self.http_client.fetch(url, method='GET') body = json_decode(response.body.decode("utf-8")) [item] = unwrap_items(body) return {k: v for k, v in item.items() if not columns or k in columns}
async def update(self, record: dict): url = '{module_url}{id}/'.format( module_url=self.module_url(self.name), id=record.pop('id')) body = urlencode({**record, **self.base_query}) logger.info('POST: {}, BODY: {}'.format(url, body)) response = await self.http_client.fetch(url, method='POST', body=body) body = json_decode(response.body.decode('utf-8')) [item] = unwrap_items(body) return item
async def filter(self, *, term: Optional[str]=None, columns: Optional[list]=None, offset: int=0, limit: Optional[int]=None): if limit == 0: return [] elif not term and limit and limit <= self.service.MAX_PAGE_SIZE: batch_size = limit else: batch_size = self.service.MAX_PAGE_SIZE paging = True from_index = offset + 1 # Zoho indexes at one not zero to_index = offset + batch_size results = [] while paging and (term or limit is None or to_index <= limit): query = { 'index': from_index, 'range': batch_size, **self.base_query} url = '{module_url}?{query}'.format( module_url=self.module_url(self.name), query=urlencode(query)) logger.info('GET: {}'.format(url)) response = await self.http_client.fetch(url, method='GET') if response.code == 204 and from_index - 1 != offset: # if paging and hit end finish paging paging = False elif response.code == 204: # unless first request caused the 204 raise HTTPError(204, reason='No items found') else: body = json_decode(response.body.decode('utf-8')) items = unwrap_items(body) results += items from_index = to_index + 1 to_index += batch_size def fuzzy_score(resource): values = [str(v) for v in resource.values() if v] target = ' '.join(values) return fuzz.partial_ratio(term, target) if term: results = sorted(results, key=fuzzy_score, reverse=True) return results[:limit]
async def delete(self, id: Union[int, str]): url = '{module_url}{id}/?{query}'.format( module_url=self.module_url(self.name), id=id, query=urlencode(self.base_query)) logger.info('DELETE: {}'.format(url)) # mimic zoho crm response and always return True # TODO: add get test to zoho CRM try: await self.http_client.fetch(url, method='DELETE') except HTTPClientError: pass return True
async def delete_file(self, id: Union[int, str]): module_map = await self.get_canonical_map() module_name = module_map.canonical_name module_url = self.module_url(module_name) query = {'id': id, **self.base_query} url = '{module_url}/deleteFile?{query}'.format(module_url=module_url, query=urlencode(query)) logger.info('GET: {}'.format(url)) response = await self.http_client.fetch(url, method='GET') body = json_decode(response.body.decode('utf-8')) success = unwrap_items(body) return success
async def get_module_maps(self): # TODO: only do if map alias is True url = '{base_url}/Info/getModules?{query}'.format( base_url=self.base_url, query=urlencode(self.base_query)) logger.info('GET: {}'.format(url)) response = await self.http_client.fetch(url) body = json_decode(response.body.decode('utf-8')) maps = body['response']['result']['row'] return [ ModuleMap(canonical_name=m['content'], singular_alias=m['sl'], plural_alias=m['pl']) for m in maps ]
async def get_module_maps(self): # TODO: only do if map alias is True url = '{base_url}/Info/getModules?{query}'.format( base_url=self.base_url, query=urlencode(self.base_query)) logger.info('GET: {}'.format(url)) response = await self.http_client.fetch(url) body = json_decode(response.body.decode('utf-8')) maps = body['response']['result']['row'] return [ModuleMap( canonical_name=m['content'], singular_alias=m['sl'], plural_alias=m['pl']) for m in maps]
async def delete_file(self, id: Union[int, str]): module_map = await self.get_canonical_map() module_name = module_map.canonical_name module_url = self.module_url(module_name) query = {'id': id, **self.base_query} url = '{module_url}/deleteFile?{query}'.format( module_url=module_url, query=urlencode(query)) logger.info('GET: {}'.format(url)) response = await self.http_client.fetch(url, method='GET') body = json_decode(response.body.decode('utf-8')) success = unwrap_items(body) return success
async def upload_file(self, *, record_id: Union[int, str], url: str): module_map = await self.get_canonical_map() module_name = module_map.canonical_name module_url = self.module_url(module_name) endpoint = '{module_url}/uploadFile'.format(module_url=module_url) body = urlencode({ 'id': record_id, 'attachmentUrl': url, **self.base_query}) logger.info('POST: {}, BODY: {}'.format(endpoint, body)) response = await self.http_client.fetch(endpoint, method='POST', body=body) body = json_decode(response.body.decode('utf-8')) [item] = unwrap_items(body) return item['Id']
async def get(self, id: Union[int, str], *, columns=None): module_map = await self.get_canonical_map() module_name = module_map.canonical_name module_url = self.module_url(module_name) query = {'id': id, 'version': 2, 'newFormat': 2, **self.base_query} if columns: query['selectColumns'] = select_columns(module_name, columns) url = '{module_url}/getRecordById?{query}'.format( module_url=module_url, query=urlencode(query)) logger.info('GET: {}'.format(url)) response = await self.http_client.fetch(url, method='GET') body = json_decode(response.body.decode('utf-8')) [item] = unwrap_items(body) return item
async def get(self, id: Union[int, str], *, portal, department, columns=None): url = '{}getrecordsbyid'.format(self.module_url(self.name)) query = { 'id': id, 'portal': portal, 'department': department, **self.base_query} if columns: query['selectfields'] = select_columns(self.name, columns) url = url + '?' + urlencode(query) logger.info('GET: {}'.format(url)) response = await self.http_client.fetch(url, method='GET') body = json_decode(response.body.decode("utf-8")) [item] = unwrap_items(body) return item
async def upload_file(self, *, record_id: Union[int, str], url: str): module_map = await self.get_canonical_map() module_name = module_map.canonical_name module_url = self.module_url(module_name) endpoint = '{module_url}/uploadFile'.format(module_url=module_url) body = urlencode({ 'id': record_id, 'attachmentUrl': url, **self.base_query }) logger.info('POST: {}, BODY: {}'.format(endpoint, body)) response = await self.http_client.fetch(endpoint, method='POST', body=body) body = json_decode(response.body.decode('utf-8')) [item] = unwrap_items(body) return item['Id']
async def insert(self, record: dict, *, trigger_workflows: bool = True): module_map = await self.get_canonical_map() module_name = module_map.canonical_name module_url = self.module_url(module_name) xml_record = wrap_items(record, module_name=module_name) url = '{module_url}/insertRecords'.format(module_url=module_url) body = urlencode({ 'version': 2, 'xmlData': xml_record, 'newFormat': 2, 'wfTrigger': str(trigger_workflows).lower(), 'duplicateCheck': 1, **self.base_query }) logger.info('POST: {}, BODY: {}'.format(url, body)) response = await self.http_client.fetch(url, method='POST', body=body) body = json_decode(response.body.decode('utf-8')) [item] = unwrap_items(body) return item['Id']
async def get(self, id: Union[int, str], *, columns=None): module_map = await self.get_canonical_map() module_name = module_map.canonical_name module_url = self.module_url(module_name) query = { 'id': id, 'version': 2, 'newFormat': 2, **self.base_query} if columns: query['selectColumns'] = select_columns(module_name, columns) url = '{module_url}/getRecordById?{query}'.format( module_url=module_url, query=urlencode(query)) logger.info('GET: {}'.format(url)) response = await self.http_client.fetch(url, method='GET') body = json_decode(response.body.decode('utf-8')) [item] = unwrap_items(body) return item
async def filter(self, *, term: Optional[str] = None, columns: Optional[list] = None, offset: int = 0, limit: Optional[int] = None): module_map = await self.get_canonical_map() module_name = module_map.canonical_name module_url = self.module_url(module_name) if limit == 0: return [] elif not term and limit and limit <= self.service.MAX_PAGE_SIZE: batch_size = limit else: batch_size = self.service.MAX_PAGE_SIZE paging = True from_index = offset + 1 # Zoho indexes at one not zero to_index = offset + batch_size results = [] # Loop until we reach index we need, unless their is a search term. # If search term we need all records. while paging and (term or limit is None or to_index <= limit): query = { 'fromIndex': from_index, 'toIndex': to_index, 'newFormat': 2, 'version': 2, **self.base_query } if columns: query['selectColumns'] = select_columns(module_name, columns) url = '{module_url}/getRecords?{query}'.format( module_url=module_url, query=urlencode(query)) logger.info('GET: {}'.format(url)) response = await self.http_client.fetch(url, method='GET') body = json_decode(response.body.decode('utf-8')) try: items = unwrap_items(body) except HTTPError as http_error: # if paging and hit end suppress error # unless first request caused the 404 if http_error.status_code == 404 and from_index - 1 != offset: paging = False else: raise else: results += items from_index = to_index + 1 to_index += batch_size def fuzzy_score(resource): values = [str(v) for v in resource.values() if v] target = ' '.join(values) return fuzz.partial_ratio(term, target) if term: results = sorted(results, key=fuzzy_score, reverse=True) return results[:limit]
async def filter(self, *, organization_id: Union[str, int], term: Optional[str] = None, columns: Optional[list] = None, offset: int = 0, limit: Optional[int] = None): if limit == 0: return [] elif not term and limit and limit <= self.service.MAX_PAGE_SIZE: batch_size = limit else: batch_size = self.service.MAX_PAGE_SIZE paging = True page_index = max(ceil(offset / batch_size), 1) results = [] # Loop until we reach index we need, unless their is a search term. # If search term we need all records. while paging and (term or not limit or len(results) < limit): url = '{base_url}?{query}'.format(base_url=self.module_url, query=urlencode({ 'organization_id': organization_id, 'per_page': batch_size, 'page': page_index, **self.base_query })) try: logger.info('GET: {}'.format(url)) response = await self.http_client.fetch(url, method='GET') except HTTPClientError as http_error: http_code = http_error.code body = http_error.response.body response = json_decode(body.decode("utf-8")) message = str(response['code']) + ': ' + response['message'] raise HTTPError(http_code, reason=message) else: response = json_decode(response.body.decode("utf-8")) results += response[self.name] page_index += 1 paging = response['page_context']['has_more_page'] def fuzzy_score(items): values = [str(v).lower() for v in items.values() if v] target = ' '.join(values) return fuzz.partial_ratio(term, target) if term: results = sorted(results, key=fuzzy_score, reverse=True) results = results[:limit] if columns: return [{k: pl[k] for k in columns if k in columns} for pl in results] else: return results
async def filter(self, *, term: Optional[str]=None, columns: Optional[list]=None, offset: int=0, limit: Optional[int]=None): module_map = await self.get_canonical_map() module_name = module_map.canonical_name module_url = self.module_url(module_name) if limit == 0: return [] elif not term and limit and limit <= self.service.MAX_PAGE_SIZE: batch_size = limit else: batch_size = self.service.MAX_PAGE_SIZE paging = True from_index = offset + 1 # Zoho indexes at one not zero to_index = offset + batch_size results = [] # Loop until we reach index we need, unless their is a search term. # If search term we need all records. while paging and (term or limit is None or to_index <= limit): query = { 'fromIndex': from_index, 'toIndex': to_index, 'newFormat': 2, 'version': 2, **self.base_query} if columns: query['selectColumns'] = select_columns(module_name, columns) url = '{module_url}/getRecords?{query}'.format( module_url=module_url, query=urlencode(query)) logger.info('GET: {}'.format(url)) response = await self.http_client.fetch(url, method='GET') body = json_decode(response.body.decode('utf-8')) try: items = unwrap_items(body) except HTTPError as http_error: # if paging and hit end suppress error # unless first request caused the 404 if http_error.status_code == 404 and from_index - 1 != offset: paging = False else: raise else: results += items from_index = to_index + 1 to_index += batch_size def fuzzy_score(resource): values = [str(v) for v in resource.values() if v] target = ' '.join(values) return fuzz.partial_ratio(term, target) if term: results = sorted(results, key=fuzzy_score, reverse=True) return results[:limit]