def sync_tickets(client, fetch_ticket_status=None, fetch_sub_entities=None): bookmark_property = 'updated_at' singer.write_schema("tickets", utils.load_schema("tickets"), ["id"], bookmark_properties=[bookmark_property]) singer.write_schema("conversations", utils.load_schema("conversations"), ["id"], bookmark_properties=[bookmark_property]) singer.write_schema("satisfaction_ratings", utils.load_schema("satisfaction_ratings"), ["id"], bookmark_properties=[bookmark_property]) singer.write_schema("time_entries", utils.load_schema("time_entries"), ["id"], bookmark_properties=[bookmark_property]) if 'all' in fetch_ticket_status: sync_tickets_by_filter(client, bookmark_property, fetch_sub_entities=fetch_sub_entities) if 'all' in fetch_ticket_status or 'deleted' in fetch_ticket_status: sync_tickets_by_filter(client, bookmark_property, predefined_filter="deleted", fetch_sub_entities=fetch_sub_entities) if 'all' in fetch_ticket_status or 'spam' in fetch_ticket_status: sync_tickets_by_filter(client, bookmark_property, predefined_filter="spam", fetch_sub_entities=fetch_sub_entities)
def sync_tickets(): bookmark_property = 'updated_at' singer.write_schema("tickets", utils.load_schema("tickets"), ["id"], bookmark_properties=[bookmark_property]) singer.write_schema("conversations", utils.load_schema("conversations"), ["id"], bookmark_properties=[bookmark_property]) singer.write_schema("satisfaction_ratings", utils.load_schema("satisfaction_ratings"), ["id"], bookmark_properties=[bookmark_property]) singer.write_schema("time_entries", utils.load_schema("time_entries"), ["id"], bookmark_properties=[bookmark_property]) sync_tickets_by_filter(bookmark_property) sync_tickets_by_filter(bookmark_property, "deleted") sync_tickets_by_filter(bookmark_property, "spam")
def sync_ticket_activities(): logger.info("Getting ticket_activities URL") auth = (CONFIG['api_key'], "") endpoint = "ticket_activities" activities_date = get_start(endpoint) while True: params = { 'created_at': activities_date } try: data = request(get_url(endpoint), params, auth).json() except HTTPError as e: resp_json = e.response.json() if 'message' in resp_json and resp_json['message'] == 'file_not_found': break else: raise e export_url = data['export'][0]['url'] data = request(export_url).json() updated_schema = { "properties": {} } for row in data['activities_data']: for key in row['activity'].keys(): if key not in ['note', 'automation', 'association','timesheet','send_email','requester_id', 'source', 'priority', 'new_ticket', 'agent_id','added_tags','removed_tags','added_watcher','removed_watcher','Updated Amendment Tool in Internal Tools','thank_you_note','spam','deleted']: updated_schema['properties'][key] = { "type": ["null", "string"] } key = str(key) row['performed_at'] = datetime.strftime(datetime.strptime(row['performed_at'], '%d-%m-%Y %H:%M:%S %z'), '%Y-%m-%dT%H:%M:%SZ') bookmark_property = 'performed_at' schema = utils.load_schema('ticket_activities') schema['properties']['activity']['properties'].update(updated_schema['properties']) singer.write_schema('ticket_activities', schema, [], bookmark_properties=[bookmark_property]) activities_date = datetime.strftime(datetime.strptime(activities_date, TICKETS_DATETIME_FMT) + timedelta(days=1), TICKETS_DATETIME_FMT) for row in data['activities_data']: logger.info("Ticket {}: Syncing".format(row['ticket_id'])) singer.write_record('ticket_activities', row, time_extracted=singer.utils.now()) utils.update_state(STATE, "ticket_activities", activities_date) singer.write_state(STATE)
def sync_time_filtered(entity): singer.write_schema(entity, utils.load_schema(entity), ["id"]) start = get_start(entity) logger.info("Syncing {} from {}".format(entity, start)) for row in gen_request(get_url(entity)): if row['updated_at'] >= start: if 'custom_fields' in row: row['custom_fields'] = transform_dict(row['custom_fields'], force_str=True) utils.update_state(STATE, entity, row['updated_at']) singer.write_record(entity, row) singer.write_state(STATE)
def sync_time_filtered(entity): bookmark_property = 'updated_at' singer.write_schema(entity, utils.load_schema(entity), ["id"], bookmark_properties=[bookmark_property]) start = get_start(entity) logger.info("Syncing {} from {}".format(entity, start)) for row in gen_request(get_url(entity)): if row[bookmark_property] >= start: if 'custom_fields' in row: row['custom_fields'] = transform_dict(row['custom_fields'], force_str=True) utils.update_state(STATE, entity, row[bookmark_property]) singer.write_record(entity, row, time_extracted=singer.utils.now()) singer.write_state(STATE)
def sync_tickets(): bookmark_property = 'updated_at' singer.write_schema("tickets", utils.load_schema("tickets"), ["id"], bookmark_properties=[bookmark_property]) singer.write_schema("conversations", utils.load_schema("conversations"), ["id"], bookmark_properties=[bookmark_property]) singer.write_schema("satisfaction_ratings", utils.load_schema("satisfaction_ratings"), ["id"], bookmark_properties=[bookmark_property]) singer.write_schema("time_entries", utils.load_schema("time_entries"), ["id"], bookmark_properties=[bookmark_property]) start = get_start("tickets") params = { 'updated_since': start, 'order_by': bookmark_property, 'order_type': "asc", } for i, row in enumerate(gen_request(get_url("tickets"), params)): logger.info("Ticket {}: Syncing".format(row['id'])) row.pop('attachments', None) row['custom_fields'] = transform_dict(row['custom_fields'], force_str=True) # get all sub-entities and save them logger.info("Ticket {}: Syncing conversations".format(row['id'])) try: for subrow in gen_request( get_url("sub_ticket", id=row['id'], entity="conversations")): subrow.pop("attachments", None) subrow.pop("body", None) if subrow[bookmark_property] >= start: singer.write_record("conversations", subrow, time_extracted=singer.utils.now()) except HTTPError as e: if e.response.status_code == 403: logger.info( 'Invalid ticket ID requested from Freshdesk {0}'.format( row['id'])) else: raise try: logger.info("Ticket {}: Syncing satisfaction ratings".format( row['id'])) for subrow in gen_request( get_url("sub_ticket", id=row['id'], entity="satisfaction_ratings")): subrow['ratings'] = transform_dict(subrow['ratings'], key_key="question") if subrow[bookmark_property] >= start: singer.write_record("satisfaction_ratings", subrow, time_extracted=singer.utils.now()) except HTTPError as e: if e.response.status_code == 403: logger.info( "The Surveys feature is unavailable. Skipping the satisfaction_ratings stream." ) else: raise try: logger.info("Ticket {}: Syncing time entries".format(row['id'])) for subrow in gen_request( get_url("sub_ticket", id=row['id'], entity="time_entries")): if subrow[bookmark_property] >= start: singer.write_record("time_entries", subrow, time_extracted=singer.utils.now()) except HTTPError as e: if e.response.status_code == 403: logger.info( "The Timesheets feature is unavailable. Skipping the time_entries stream." ) else: raise utils.update_state(STATE, "tickets", row[bookmark_property]) singer.write_record("tickets", row, time_extracted=singer.utils.now()) singer.write_state(STATE)
def sync_tickets_by_filter(client, bookmark_property, predefined_filter=None, fetch_sub_entities=None): endpoint = "tickets" state_entity = endpoint if predefined_filter: state_entity = state_entity + "_" + predefined_filter start = get_start(state_entity) params = { 'updated_since': start, 'order_by': bookmark_property, 'order_type': "asc", 'include': "company,requester,stats" } if predefined_filter: logger.info("Syncing tickets with filter {}".format(predefined_filter)) if predefined_filter: params['filter'] = predefined_filter tickets_schema = utils.load_schema('tickets') conversations_schema = utils.load_schema('conversations') ratings_schema = utils.load_schema('satisfaction_ratings') time_entries_schema = utils.load_schema('time_entries') for i, row in enumerate(gen_request(client, get_url(endpoint), params)): logger.info("Ticket {}: Syncing".format(row['id'])) row.pop('attachments', None) row['custom_fields'] = transform_dict(row['custom_fields'], force_str=True) # get all sub-entities and save them if 'conversations' in fetch_sub_entities: try: logger.info("Ticket {}: Syncing conversations".format( row['id'])) for subrow in gen_request( client, get_url("sub_ticket", id=row['id'], entity="conversations")): subrow.pop("attachments", None) subrow.pop("body", None) if subrow[bookmark_property] >= start: subrow = utils.reorder_fields_by_schema( subrow, conversations_schema) singer.write_record("conversations", subrow, time_extracted=singer.utils.now()) except HTTPError as e: if e.response.status_code == 403: logger.info( 'Invalid ticket ID requested from Freshdesk {0}'. format(row['id'])) else: raise if 'satisfaction_ratings' in fetch_sub_entities: try: logger.info("Ticket {}: Syncing satisfaction ratings".format( row['id'])) for subrow in gen_request( client, get_url("sub_ticket", id=row['id'], entity="satisfaction_ratings")): subrow['ratings'] = transform_dict(subrow['ratings'], key_key="question") if subrow[bookmark_property] >= start: subrow = utils.reorder_fields_by_schema( subrow, ratings_schema) singer.write_record("satisfaction_ratings", subrow, time_extracted=singer.utils.now()) except HTTPError as e: if e.response.status_code == 403: logger.info( "The Surveys feature is unavailable. Skipping the satisfaction_ratings stream." ) else: raise if 'time_entries' in fetch_sub_entities: try: logger.info("Ticket {}: Syncing time entries".format( row['id'])) for subrow in gen_request( client, get_url("sub_ticket", id=row['id'], entity="time_entries")): if subrow[bookmark_property] >= start: subrow = utils.reorder_fields_by_schema( subrow, time_entries_schema) singer.write_record("time_entries", subrow, time_extracted=singer.utils.now()) except HTTPError as e: if e.response.status_code == 403: logger.info( "The Timesheets feature is unavailable. Skipping the time_entries stream." ) elif e.response.status_code == 404: # 404 is being returned for deleted tickets and spam logger.info( "Could not retrieve time entries for ticket id {}. This may be caused by tickets " "marked as spam or deleted.".format(row['id'])) else: raise utils.update_state(STATE, state_entity, row[bookmark_property]) row = utils.reorder_fields_by_schema(row, tickets_schema) singer.write_record(endpoint, row, time_extracted=singer.utils.now()) singer.write_state(STATE)