def export_total_talk_time_per_lead_for_each_org(): """For each api key given, upload a CSV to dropbox of the total talk time per lead per user for an organization. """ global leads global calls_per_lead global user_ids_to_names global api for api_key in os.environ.get('CLOSE_API_KEYS').split(','): ## Initiate Close API leads = [] calls_per_lead = [] user_ids_to_names = {} api = CloseIO_API(api_key.strip()) try: org = api.get('me')['organizations'][0] org_name = org['name'].replace('/', ' ') org_id = org['id'] org_memberships = api.get( 'organization/' + org['id'], params={'_fields': 'memberships,inactive_memberships'}) user_ids_to_names = { k['user_id']: k['user_full_name'] for k in org_memberships['memberships'] + org_memberships['inactive_memberships'] } except APIError as e: print(f'Failed to pull org data because {str(e)} for {api_key}') continue try: name_keys = [ f'{v} Total Talk Time' for v in user_ids_to_names.values() ] name_keys = sorted(name_keys) print(f'Getting calls for {org_name}') final_calls_per_lead = _get_call_duration_per_lead() final_calls_per_lead = sorted(final_calls_per_lead, key=itemgetter('Lead Name')) except Exception as e: print(f'Failed to pull calls for {org_name} because {str(e)}') continue ordered_keys = ['Lead ID', 'Lead Name', 'Total Talk Time'] + name_keys output = io.StringIO() writer = csv.DictWriter(output, ordered_keys) writer.writeheader() writer.writerows(final_calls_per_lead) csv_output = output.getvalue().encode('utf-8') file_name = f"{org_name}/{org_name} Total Talk Time {datetime.today().strftime('%Y-%m-%d')}.csv" upload_to_dropbox(file_name, csv_output)
def run(api_key, development, confirmed, limit=100): api = CloseIO_API(api_key, development=development) # loop through existing leads with multiple addresses LEADS_QUERY_WITH_MULTIPLE_ADDRESSES = "addresses > 1 sort:activities" has_more = True while has_more: resp = api.get('lead', params={ 'query': LEADS_QUERY_WITH_MULTIPLE_ADDRESSES, '_fields': 'id,addresses', '_limit': limit, }) leads = resp['data'] for lead in leads: if len(lead['addresses']) < 2: logging.warning("unexpected result: %s", lead) continue # this shouldn't happen based on the search query, but just to be safe... if confirmed: api.put('lead/' + lead['id'], data={'addresses': lead['addresses'][:1]}) logging.info("removed %d extra address(es) for %s\n%s" % (len(lead['addresses'][1:]), lead['id'], lead['addresses'][1:])) has_more = resp['has_more'] time.sleep(2) # give the search indexer some time to catch up with the changes
def get_list_of_users(api_key): closeio_api = Client(api_key) has_more = True offset = 0 limit = 100 list_of_users = [] while has_more: response = closeio_api.get('user', params={ '_skip': offset, '_limit': limit }) users = response['data'] for user in users: list_of_users.append(user) has_more = response['has_more'] offset += limit return list_of_users
def run(api_key, development, confirmed, limit=100): api = CloseIO_API(api_key, development=development) # loop through existing leads with multiple addresses LEADS_QUERY_WITH_MULTIPLE_ADDRESSES = "addresses > 1 sort:activities" has_more = True while has_more: resp = api.get('lead', data={ 'query': LEADS_QUERY_WITH_MULTIPLE_ADDRESSES, '_fields': 'id,addresses', '_limit': limit, }) leads = resp['data'] for lead in leads: if len(lead['addresses']) < 2: logging.warning("unexpected result: %s", lead) continue # this shouldn't happen based on the search query, but just to be safe... if confirmed: api.put('lead/' + lead['id'], data={'addresses': lead['addresses'][:1]}) logging.info("removed %d extra address(es) for %s\n%s" % (len( lead['addresses'][1:]), lead['id'], lead['addresses'][1:])) has_more = resp['has_more'] time.sleep( 2 ) # give the search indexer some time to catch up with the changes
def generate_user_activity_report(api_key, org_id, date_start, date_end, user_id): closeio_api = Client(api_key) user_activity_report = closeio_api.get('report/activity/{}'.format(org_id), params={ 'date_start': date_start, 'date_end': date_end, 'user_id': user_id, }) return user_activity_report
def get_list_of_leads_via_search_query(api_key, search_query): closeio_api = Client(api_key) has_more = True offset = 0 limit = 100 list_of_leads = [] while has_more: response = closeio_api.get('lead', params={ '_skip': offset, '_limit': limit, 'query': search_query }) leads = response['data'] for lead in leads: list_of_leads.append(lead) has_more = response['has_more'] offset += limit return list_of_leads # # Retrieve lead data from Lead ID # def get_lead_data(api_key, lead_id): # closeio_api = Client(api_key) # api_url = 'lead/{}'.format(lead_id) # lead_data = closeio_api.get(api_url) # return lead_data
parser = argparse.ArgumentParser(description='Delete the fields: ' + ', '.join(DELETE_FIELDS)) parser.add_argument('--api_key', '-k', required=True, help='API Key') parser.add_argument('--development', action='store_true', help='Use a development server rather than production.') args = parser.parse_args() api = CloseIO_API(args.api_key, development=args.development) skip = 0 has_more = True while has_more: resp = api.get('lead', data={'_skip': skip}) leads = resp['data'] for lead in leads: n_fields_deleted = 0 custom = lead['custom'].copy() for field in DELETE_FIELDS: if custom.get(field): del custom[field] n_fields_deleted += 1 if n_fields_deleted: print "LEAD: %s" % lead['id'] print "\tBEFORE", lead['custom'] print "\tAFTER", custom
from closeio_api.utils import CsvReader parser = argparse.ArgumentParser(description='Remove email addresses from contacts in CSV file') parser.add_argument('--api-key', '-k', required=True, help='API Key') parser.add_argument('--confirmed', action='store_true', help='Really run this?') parser.add_argument('file', help='Path to the csv file') args = parser.parse_args() reader = CsvReader(args.file) headers = dict([(name, idx,) for idx, name in enumerate(reader.next())]) # skip the 1st line header if any(field not in headers for field in ['contact_id', 'email_address']): print 'contact_id or email_address headers could not be found in your csv file.' sys.exit(-1) api = CloseIO_API(args.api_key, async=False) for row in reader: contact_id = row[headers['contact_id']] email_address = row[headers['email_address']] try: contact = api.get('contact/' + contact_id) if not contact['emails']: continue emails = filter(lambda email: email['email'] != email_address, contact['emails']) if args.confirmed: resp = api.put('contact/' + contact_id, {'emails': emails}) except APIError: pass
ch.setFormatter(formatter) logger.addHandler(ch) return logger logger = setup_logger() output = open(args.output, "w") output.write('{"events": [') has_more = True cursor = None first_iter = True while has_more: resp = api.get('event', params={ '_cursor': cursor, 'request_id': args.request_id }) cursor = resp['cursor_next'] has_more = bool(cursor) for event in resp['data']: if not first_iter: output.write(",") json.dump(event, output, indent=4) first_iter = False output.write("]}") output.close()
destination_lead['id'], destination_lead['display_name'], resp) if __name__ == "__main__": has_more = True offset = 0 total_leads_merged = 0 first_iteration = True while has_more: resp = api.get( 'lead', params={ 'query': 'sort:date_created', # sort by date_created so that the oldest lead is always merged into '_skip': offset, '_fields': 'id,display_name,contacts,status_label,opportunities' }) leads = resp['data'] leads_merged_this_page = 0 duplicates_this_page = set() if first_iteration: total_leads = resp['total_results'] progress_widgets = [ 'Analyzing %d Leads: ' % total_leads, Counter(), ' ', Percentage(), ' ', Bar(), ' ',
log_format = "[%(asctime)s] %(levelname)s %(message)s" if not args.confirmed: log_format = "DRY RUN: " + log_format logging.basicConfig(level=logging.INFO, format=log_format) logging.debug("parameters: %s" % vars(args)) sniffer = csv.Sniffer() dialect = sniffer.sniff(args.csvfile.read(1000000)) args.csvfile.seek(0) c = csv.DictReader(args.csvfile, dialect=dialect) assert any(x in ("company", "lead_id") for x in c.fieldnames), 'ERROR: column "company" or "lead_id" is not found' api = CloseIO_API(args.api_key, development=args.development) resp = api.get("custom_fields/lead") available_custom_fieldnames = [x["name"] for x in resp["data"]] new_custom_fieldnames = [ x for x in [y.split(".", 1)[1] for y in c.fieldnames if y.startswith("custom.")] if x not in available_custom_fieldnames ] if new_custom_fieldnames: if args.create_custom_fields: for field in new_custom_fieldnames: if args.confirmed: api.post("custom_fields/lead", data={"name": field, "type": "text"}) available_custom_fieldnames.append(field) logging.info('added new custom field "%s"' % field) else:
def run(api_key, confirmed, development=False, use_existing_contact=False, new_contact_name='', phones_custom_field='all phones', emails_custom_field='all emails'): """ After an import from a different CRM, for all leads, move emails and phones that were put in in a lead custom field to the lead's first contact (if--use_existing_contact flag was used) or create a new contact. """ print 'confirmed:', `confirmed` print 'phones_custom_field:', `phones_custom_field` print 'emails_custom_field:', `emails_custom_field` print 'use_existing_contact:', `use_existing_contact` api = CloseIO_API(api_key, development=development) has_more = True offset = 0 while has_more: # Get a page of leads resp = api.get('lead', data={ 'query': '"custom.Source CRM":* not "custom.Migration completed":* sort:created', '_skip': offset, '_fields': 'id,display_name,name,contacts,custom', }) leads = resp['data'] for lead in leads: contacts = lead['contacts'] custom = lead['custom'] company_emails = custom.get(emails_custom_field, '') company_phones = custom.get(phones_custom_field, '') if not company_phones and not company_emails: continue if company_emails : if company_emails.startswith('["'): company_emails = company_emails[2:-2].split('", "') else: company_emails = [company_emails] if company_phones: if company_phones.startswith('["'): company_phones = company_phones[2:-2].split('", "') else: company_phones = [company_phones] if contacts and use_existing_contact: contact = contacts[0] else: contact = { 'lead_id': lead['id'], 'phones': [], 'emails': [] } if new_contact_name: contact['name'] = new_contact_name for pn in company_phones: contact['phones'].append({ 'type': 'office', 'phone': pn }) for e in company_emails: contact['emails'].append({ 'type': 'office', 'email': e }) print 'Lead:', lead['id'], lead['name'].encode('utf8') print 'Emails:', `custom.get(emails_custom_field)`, ' => ', `company_emails` print 'Phones:', `custom.get(phones_custom_field)`, ' => ', `company_phones` try: if contact.get('id'): print 'Updating an existing contact', contact['id'] if confirmed: api.put('contact/%s' % contact['id'], data={ 'phones': contact['phones'], 'emails': contact['emails'], }) else: print 'Creating a new contact' if confirmed: api.post('contact', data=contact) print 'Payload:', contact if confirmed: api.put('lead/%s' % lead['id'], data={ 'custom.Migration completed': 'Yes' }) except APIError as e: print e print 'Payload:', contact if confirmed: api.put('lead/%s' % lead['id'], data={ 'custom.Migration completed': 'skipped' }) print '' if not confirmed: # If we don't actually update the "Migration completed" custom field, # we need to paginate offset += len(leads) has_more = resp['has_more'] print 'Done'
args.new_code = args.new_code.upper() assert args.old_code in ISO_COUNTRIES.keys(), '%s country code is not valid' % args.old_code assert args.new_code in ISO_COUNTRIES.keys(), '%s country code is not valid' % args.new_code assert args.old_code != args.new_code, 'equal country codes' logging.info('old country: %s (%s) -> new country: %s (%s) ' % (args.old_code, ISO_COUNTRIES[args.old_code], args.new_code, ISO_COUNTRIES[args.new_code])) api = CloseIO_API(args.api_key, development=args.development) has_more = True offset = 0 while has_more: resp = api.get('lead', data={ 'query': LEADS_QUERY, '_skip': offset, '_fields': 'id,addresses' }) leads = resp['data'] for lead in leads: need_update = False for address in lead['addresses']: if address['country'] == args.old_code: address['country'] = args.new_code need_update = True if need_update: if args.confirmed: api.put('lead/'+lead['id'], data={'addresses': lead['addresses']}) logging.info('updated %s' % lead['id'])
api = CloseIO_API(args.api_key, development=args.development) progress_widgets = ['Importing %d rows: ' % import_count, Percentage(), ' ', Bar(), ' ', ETA(), ' ', FileTransferSpeed()] pbar = ProgressBar(widgets=progress_widgets, maxval=import_count).start() dupes_cnt = 0 for key, val in unique_leads.items(): retries = 5 # check if it's a duplicate dupe = False if args.skip_duplicates and val.get('name'): # get the org id necessary for search org_id = api.get('api_key')['data'][0]['organization_id'] # get all the search results for given lead name search_results = [] filters = { 'organization_id': org_id, 'query': 'name:"%s"' % key, } has_more = True skip = 0 while has_more: filters['_skip'] = skip resp = api.get('lead', params=filters) results = resp['data'] search_results.extend(results) has_more = resp['has_more']
action='store_true', help= 'Use this field to print lead_ids deleted in an array at the end of the script', ) args = parser.parse_args() api = CloseIO_API(args.api_key) has_more = True cursor = '' events = [] leads = [] reverted_imports = {} me = api.get('me') org_id = me['organizations'][0]['id'] org = api.get( f'organization/{org_id}', params={'_fields': 'name,memberships,inactive_memberships'}, ) org_memberships = org['memberships'] + org['inactive_memberships'] org_name = org['name'] memberships = me['memberships'] assert ( len(memberships) and memberships[0]['role_id'] == 'admin' ), 'ERROR: You must be an admin in your Close organization to run this script' users = {}
parser.error("at least one option required") log_format = "[%(asctime)s] %(levelname)s %(message)s" if not args.confirmed: log_format = 'DRY RUN: '+log_format logging.basicConfig(level=logging.INFO, format=log_format) logging.debug('parameters: %s' % vars(args)) api = CloseIO_API(args.api_key, development=args.development) emails_to_ids = {} if any([args.from_user_email, args.to_user_email]): has_more = True offset = 0 while has_more: resp = api.get('user', data={'_skip': offset}) for user in resp['data']: emails_to_ids[user['email']] = user['id'] offset += len(resp['data']) has_more = resp['has_more'] logging.debug(emails_to_ids) if args.from_user_email: from_user_id = emails_to_ids[args.from_user_email] else: # for exception, if user_id is not present in the database resp = api.get('user/'+args.from_user_id, data={ '_fields': 'id,email' })
query = "call((recording_duration > 0 or voicemail_duration > 0)" if args.start_date: params['date_created__gte'] = args.start_date query = query + ' date >= "%s"' % args.start_date if args.end_date: params['date_created__lte'] = args.end_date query = query + ' date <= "%s"' % args.end_date query = query + ")" while has_more: resp = api.get('lead', params={ '_skip': offset, 'query': query, '_fields': 'id,display_name' }) for lead in resp['data']: leads[lead['id']] = lead['display_name'] offset += len(resp['data']) has_more = resp['has_more'] has_more = True offset = 0 params[ '_fields'] = 'recording_url,voicemail_url,date_created,lead_id,duration,voicemail_duration' while has_more: params['_skip'] = offset resp_calls = api.get('activity/call', params=params)
parser.add_argument('--api_key', type=str, required=True, help='API key') parser.add_argument('--status', type=str, required=True, help='Label of the new status') parser.add_argument('--dev', action='store_true', help='Use the dev server', default=False) args = parser.parse_args() # Should tell you how many leads are going to be affected api = CloseIO_API(args.api_key, development=args.dev) # Get the status_id org_id = api.get('api_key')['data'][0]['organization_id'] statuses = api.get('organization/{0}'.format(org_id))['opportunity_statuses'] new_status_id = [ st['id'] for st in statuses if st['label'].lower() == args.status.lower() ] if not new_status_id: print 'Status not found: {0}'.format(args.status) sys.exit(1) new_status_id = new_status_id[0] print 'Gathering opportunities for {0}'.format(args.query) has_more = True offset = 0 limit = 50
from closeio_api import Client as CloseIO_API DELETE_FIELDS = ['Field1', 'Field2', 'Field3'] parser = argparse.ArgumentParser(description='Delete the fields: ' + ', '.join(DELETE_FIELDS)) parser.add_argument('--api_key', '-k', required=True, help='API Key') parser.add_argument('--development', action='store_true', help='Use a development server rather than production.') args = parser.parse_args() api = CloseIO_API(args.api_key, development=args.development) skip = 0 has_more = True while has_more: resp = api.get('lead', data={'_skip': skip}) leads = resp['data'] for lead in leads: n_fields_deleted = 0 custom = lead['custom'].copy() for field in DELETE_FIELDS: if custom.get(field): del custom[field] n_fields_deleted += 1 if n_fields_deleted: print "LEAD: %s" % lead['id'] print "\tBEFORE", lead['custom'] print "\tAFTER", custom
parser.error("at least one option required") log_format = "[%(asctime)s] %(levelname)s %(message)s" if not args.confirmed: log_format = 'DRY RUN: ' + log_format logging.basicConfig(level=logging.INFO, format=log_format) logging.debug(f'parameters: {vars(args)}') api = CloseIO_API(args.api_key) emails_to_ids = {} if any([args.from_user_email, args.to_user_email]): has_more = True offset = 0 while has_more: resp = api.get('user', params={'_skip': offset}) for user in resp['data']: emails_to_ids[user['email']] = user['id'] offset += len(resp['data']) has_more = resp['has_more'] logging.debug(emails_to_ids) if args.from_user_email: from_user_id = emails_to_ids[args.from_user_email] else: # for exception, if user_id is not present in the database resp = api.get('user/' + args.from_user_id, params={'_fields': 'id,email'}) from_user_id = resp['id'] emails_to_ids[resp['email']] = resp['id']
args.csvfile.seek(0) c = csv.DictReader(args.csvfile, dialect=dialect, fieldnames=['tag', 'custom_field_name', 'custom_field_value']) c.next() for r in c: if r: assert len(r) == 3, 'Invalid csv format at line %d' % (c.line_num,) tag_templates[r['tag'].lower()] = (r['custom_field_name'], r['custom_field_value']) api = CloseIO_API(args.api_key, development=args.development) has_more = True offset = 0 while has_more: resp = api.get('lead', data={ 'query': 'custom.Tags:* sort:created', '_skip': offset, '_fields': 'id,custom' }) leads = resp['data'] for l in leads: if 'Tags' in l['custom'].keys(): tags = [t.strip() for t in l['custom']['Tags'].split(',')] new_fields = {} for t in tags: t_lower = t.lower() if t_lower in tag_templates.keys(): new_fields['custom.' + tag_templates[t_lower][0]] = tag_templates[t_lower][1] print l['id'], 'Tags:', l['custom']['Tags']
parser.add_argument( '--lead-id', '-l', help= 'Use this field if you want to narrow your search to a specific lead_id', ) parser.add_argument( '--user-id', '-u', help= 'Use this field if you want to narrow your search to changes done by a specific user', ) args = parser.parse_args() api = CloseIO_API(args.api_key) org_id = api.get('me')['organizations'][0]['id'] org = api.get( 'organization/' + org_id, params={ '_fields': 'id,name,memberships,inactive_memberships,lead_custom_fields' }, ) org_name = org['name'].replace('/', "") org_memberships = org['memberships'] + org['inactive_memberships'] try: custom_field_name = [ i for i in org['lead_custom_fields'] if i['id'] == args.custom_field ][0]['name'] except IndexError as e: print(
has_more = True offset = 0 while has_more: resp = api.get( 'activity/' + endpoint, params={ '_skip': offset, 'date_created__gte': day['start_date'], 'date_created__lte': day['end_date'], }, ) for activity in resp['data']: activities.append(activity) offset += len(resp['data']) has_more = resp['has_more'] pool = Pool(5) pool.map(getActivities, days) # Sort all activities by date_created to be in order because they were pulled in parallel activities = sorted(activities, key=itemgetter('date_created'), reverse=True) org_name = api.get('me')['organizations'][0]['name'].replace('/', '') with open( '%s - %s activity export between %s and %s.json' % (org_name, args.activity_type, args.date_start, args.date_end), 'w', ) as outfile: json.dump(activities, outfile, indent=4)
'-e', required=True, help='The yyyy-mm-dd you want to end looking for activities') parser.add_argument('--activity-type', '-t', choices=[ 'call', 'created', 'email', 'lead_status_change', 'note', 'opportunity_status_change', 'sms', 'task_completed' ], required=True, help='The type of activity you\'d like to export to JSON') args = parser.parse_args() api = CloseIO_API(args.api_key) org_id = api.get('api_key/' + args.api_key, params={'_fields': 'organization_id'})['organization_id'] org_name = api.get('organization/' + org_id, params={'_fields': 'name'})['name'].replace('/', '') days = [] activities = [] endpoint = args.activity_type if endpoint == 'opportunity_status_change': endpoint = 'status_change/opportunity' elif endpoint == 'lead_status_change': endpoint = 'status_change/lead' starting_date = datetime.strptime(args.date_start, '%Y-%m-%d') ending_date = starting_date + relativedelta(days=+1) - relativedelta( seconds=+1) ending_date_final = datetime.strptime(args.date_end, '%Y-%m-%d')
api = CloseIO_API(args.api_key, development=args.development) progress_widgets = ['Importing %d rows: ' % import_count, Percentage(), ' ', Bar(), ' ', ETA(), ' ', FileTransferSpeed()] pbar = ProgressBar(widgets=progress_widgets, maxval=import_count).start() dupes_cnt = 0 for key, val in unique_leads.items(): retries = 5 # check if it's a duplicate dupe = False if args.skip_duplicates and val.get('name'): # get the org id necessary for search org_id = api.get('api_key')['data'][0]['organization_id'] # get all the search results for given lead name search_results = [] filters = { 'organization_id': org_id, 'query': 'name:"%s"' % key, } skip = 0 limit = 100 while True: filters['_skip'] = skip filters['_limit'] = skip + limit results = api.get('lead', data=filters)['data'] search_results.extend(results) if len(results) < limit:
parser = argparse.ArgumentParser( description= 'Find duplicate contacts on a lead in your Close org via contact_name, email address, or phone number' ) parser.add_argument('--api-key', '-k', required=True, help='API Key') parser.add_argument('--field', '-f', default='all', choices=['contact_name', 'email', 'phone', 'all'], required=False, help="Specify a field to compare uniqueness") args = parser.parse_args() ## Initialize Close API Wrapper api = CloseIO_API(args.api_key) org_id = api.get('api_key/' + args.api_key)['organization_id'] org_name = api.get('organization/' + org_id, params={'_fields': 'name'})['name'].replace('/', '') ## Calculate number of slices necessary to get all leads total_leads = api.get('lead', params={ '_limit': 0, 'query': 'sort:created contacts > 1' })['total_results'] total_slices = int(math.ceil(float(total_leads) / 1000)) slices = range(1, total_slices + 1) leads = [] ## Write data to a CSV
" ", FileTransferSpeed(), ] pbar = ProgressBar(widgets=progress_widgets, maxval=import_count).start() dupes_cnt = 0 for key, val in unique_leads.items(): retries = 5 # check if it's a duplicate dupe = False if args.skip_duplicates and val.get("name"): # get the org id necessary for search org_id = api.get("api_key")["data"][0]["organization_id"] # get all the search results for given lead name search_results = [] filters = {"organization_id": org_id, "query": 'name:"%s"' % key} has_more = True skip = 0 while has_more: filters["_skip"] = skip resp = api.get("lead", data=filters) results = resp["data"] search_results.extend(results) has_more = resp["has_more"] skip += len(results) for result in search_results:
parser.error("at least one option required") log_format = "[%(asctime)s] %(levelname)s %(message)s" if not args.confirmed: log_format = 'DRY RUN: '+log_format logging.basicConfig(level=logging.INFO, format=log_format) logging.debug('parameters: %s' % vars(args)) api = CloseIO_API(args.api_key, development=args.development) emails_to_ids = {} if any([args.from_user_email, args.to_user_email]): has_more = True offset = 0 while has_more: resp = api.get('user') for user in resp['data']: emails_to_ids[user['email']] = user['id'] offset += len(resp['data']) has_more = resp['has_more'] logging.debug(emails_to_ids) if args.from_user_email: from_user_id = emails_to_ids[args.from_user_email] else: # for exception, if user_id is not present in the database resp = api.get('user/'+args.from_user_id, data={ '_fields': 'id,email' })
from closeio_api import Client as CloseIO_API, APIError import csv import gevent import gevent.monkey from gevent.pool import Pool reload(sys) sys.setdefaultencoding('utf-8') gevent.monkey.patch_all() parser = argparse.ArgumentParser(description='Get a list of all lead merge events for the last 30 days from your Close organization') parser.add_argument('--api-key', '-k', required=True, help='API Key') args = parser.parse_args() ## Initialize the Close API and get all users in the org api = CloseIO_API(args.api_key) org_id = api.get('api_key/' + args.api_key)['organization_id'] org = api.get('organization/' + org_id, params={ '_fields': 'inactive_memberships,memberships,name' }) org_name = org['name'].replace('/', '') memberships = org['memberships'] + org['inactive_memberships'] users = { membership['user_id'] : membership['user_full_name'] for membership in memberships } ## Method to get data about the deleted source lead added to the event def getSourceLeadData(event): print "%s of %s: %s" % (events.index(event) + 1, len(events), event['Merge Event ID']) source_delete_event = api.get('event', params={ 'object_type': 'lead', 'action': 'deleted', 'lead_id': event['Source Lead ID'] }) if len(source_delete_event['data']) > 0: delete_event = source_delete_event['data'][0] if delete_event.get('previous_data'): event['Source Lead Status'] = delete_event['previous_data'].get('status_label') event['Source Lead Name'] = delete_event['previous_data'].get('display_name')
print "Please specify only one action type per field_name." sys.exit(1) choices_data[r["field_name"]].append(r["choice"]) else: if r["action"].lower().strip() not in ("create", "update", "replace"): print "Error: " + r["action"] + " not a valid action." print "Use create, update, or replace only." sys.exit(1) action_data[r["field_name"]] = r["action"].lower().strip() choices_data[r["field_name"]] = [r["choice"]] # Pulls our existing custom field information. resp = api.get("custom_fields/lead") fields = resp["data"] for field_name, action in action_data.items(): field_id = None # Checks if our field_name already exists in our org. # If updating the field_id is saved. # If creating then a existing field throws an error. for field in fields: if field["name"].lower().strip() == field_name.lower().strip(): field_id = field["id"] if action == "update": choices_data[field_name].extend(field["choices"])
logger.info("Merged source:%s (%s) and destination:%s (%s) response_body:%s", source_lead['id'], source_lead['display_name'], destination_lead['id'], destination_lead['display_name'], resp) if __name__ == "__main__": has_more = True offset = 0 total_leads_merged = 0 first_iteration = True while has_more: resp = api.get('lead', params={ 'query': 'sort:date_created', # sort by date_created so that the oldest lead is always merged into '_skip': offset, '_fields': 'id,display_name,contacts,status_label,opportunities' }) leads = resp['data'] leads_merged_this_page = 0 duplicates_this_page = set() if first_iteration: total_leads = resp['total_results'] progress_widgets = ['Analyzing %d Leads: ' % total_leads, Counter(), ' ', Percentage(), ' ', Bar(), ' ', AdaptiveETA(), ' ', FileTransferSpeed()] pbar = ProgressBar(widgets=progress_widgets, maxval=total_leads).start() pbar.update(offset) first_iteration = False for idx, lead in enumerate(leads): logger.debug("-------------------------------------------------")
combined["name"] = {} statusLabelOrder = {"Kalt":0,"Kontakt":1,"Termin":2,"Kunde":3,"Tot":4,"Investor":5} """ get all leads from close io and match them by phone, email and name """ num = 0 totalResults = 0 offset = 0 print "loading..." while has_more: leads_merged_this_page = 0 resp = api.get('lead', data={ 'query': 'sort:display_name', '_skip': offset, '_fields': 'id,display_name,name,status_label,contacts,opportunities,email_addresses,addresses,phone_numbers,custom' }) leads = resp["data"] if "total_results" in resp: print str(int(100.0*offset/resp['total_results']))+"%" for lead in leads: leadId = lead['id'] num = num +1 combinedLeads[leadId] = lead checkType = "name" if lead[checkType]: item = strCleanup(lead[checkType]) if item not in combined[checkType]: combined[checkType][item] = [] if leadId not in combined[checkType][item]:
from requests.exceptions import ConnectionError from closeio_api import APIError, Client as CloseIO_API from closeio_api.utils import CsvReader parser = argparse.ArgumentParser(description='Remove tasks associated with inactive users') parser.add_argument('--api-key', '-k', required=True, help='API Key') parser.add_argument('--confirmed', action='store_true', help='Confirm making changes. Otherwise this script is not going to modify any data.') parser.add_argument('--verbose', '-v', action='store_true', help='Increase logging verbosity.') args = parser.parse_args() api = CloseIO_API(args.api_key, async=False) # Get IDs of all inactive users in a given org org_id = api.get('api_key/' + args.api_key)['organization_id'] org = api.get('organization/' + org_id) inactive_users = [m['user_id'] for m in org['inactive_memberships']] # Get IDs of all the tasks assigned to these inactive users task_ids = [] total_cnt = len(inactive_users) for idx, user_id in enumerate(inactive_users): if args.verbose: print 'Gathering tasks for %s (%d/%d)' % (user_id, idx + 1, total_cnt) has_more = True skip = 0 limit = 100 while has_more: resp = api.get('task', params={
if not args.confirmed: log_format = 'DRY RUN: '+log_format logging.basicConfig(level=logging.INFO, format=log_format) logging.debug('parameters: %s' % vars(args)) sniffer = csv.Sniffer() dialect = sniffer.sniff(args.csvfile.read(2048)) args.csvfile.seek(0) c = csv.DictReader(args.csvfile, dialect=dialect) assert any(x in ('company', 'lead_id') for x in c.fieldnames), \ 'ERROR: column "company" or "lead_id" is not found' api = CloseIO_API(args.api_key, development=args.development) resp = api.get('custom_fields/lead') available_custom_fieldnames = [x['name'] for x in resp['data']] new_custom_fieldnames = [x for x in [y.split('.', 1)[1] for y in c.fieldnames if y.startswith('custom.')] if x not in available_custom_fieldnames] if new_custom_fieldnames: if args.create_custom_fields: for field in new_custom_fieldnames: if args.confirmed: api.post('custom_fields/lead', data={'name': field, 'type': 'text'}) available_custom_fieldnames.append(field) logging.info('added new custom field "%s"' % field) else: logging.error('unknown custom fieldnames: %s' % new_custom_fieldnames) sys.exit(1)
'all', 'custom', ], help="Specify a field to compare uniqueness", ) parser.add_argument( '--custom-field-name', '-c', help= "Specify the custom field name if you're deduplicating by `custom` field", ) args = parser.parse_args() # Initialize Close API Wrapper api = CloseIO_API(args.api_key) organization = api.get('me')['organizations'][0] org_id = organization['id'] org_name = organization['name'] # Calculate number of slices necessary to get all leads total_leads = api.get('lead', params={ '_limit': 0, 'query': 'sort:created' })['total_results'] total_slices = int(math.ceil(float(total_leads) / 1000)) slices = range(1, total_slices + 1) leads = [] # Write data to a CSV def write_to_csv_file(type_name, items, ordered_keys):
if not args.confirmed: log_format = 'DRY RUN: ' + log_format logging.basicConfig(level=logging.INFO, format=log_format) logging.debug('parameters: %s' % vars(args)) sniffer = csv.Sniffer() dialect = sniffer.sniff(args.csvfile.read(1000000)) args.csvfile.seek(0) c = csv.DictReader(args.csvfile, dialect=dialect) assert any(x in ('company', 'lead_id') for x in c.fieldnames), \ 'ERROR: column "company" or "lead_id" is not found' api = CloseIO_API(args.api_key, development=args.development) resp = api.get('custom_fields/lead') available_custom_fieldnames = [x['name'] for x in resp['data']] new_custom_fieldnames = [ x for x in [y.split('.', 1)[1] for y in c.fieldnames if y.startswith('custom.')] if x not in available_custom_fieldnames ] if new_custom_fieldnames: if args.create_custom_fields: for field in new_custom_fieldnames: if args.confirmed: api.post('custom_fields/lead', data={ 'name': field, 'type': 'text'
from closeio_api import Client as CloseIO_API parser = argparse.ArgumentParser(description="Change all the opportunities for a given leads' search query to a given status.") parser.add_argument('--query', type=str, required=True, help='Search query.') parser.add_argument('--api_key', type=str, required=True, help='API key') parser.add_argument('--status', type=str, required=True, help='Label of the new status') parser.add_argument('--dev', action='store_true', help='Use the dev server', default=False) args = parser.parse_args() # Should tell you how many leads are going to be affected api = CloseIO_API(args.api_key, development=args.dev) # Get the status_id org_id = api.get('api_key')['data'][0]['organization_id'] statuses = api.get('organization/{0}'.format(org_id))['opportunity_statuses'] new_status_id = [st['id'] for st in statuses if st['label'].lower() == args.status.lower()] if not new_status_id: print 'Status not found: {0}'.format(args.status) sys.exit(1) new_status_id = new_status_id[0] print 'Gathering opportunities for {0}'.format(args.query) has_more = True offset = 0 limit = 50 opp_ids = []
'List of lead IDs in a form of a textual file with single column of lead IDs', ) args = parser.parse_args() api = CloseIO_API(args.api_key) # Array of Lead IDs. Add the IDs you want to restore here. if args.leads: lead_ids = args.leads.split(",") elif args.leads_file: with open(args.leads_file) as f: lines = f.readlines() lead_ids = [el.strip() for el in lines] # Strip new lines lead_ids = list(filter(None, lead_ids)) # Strip empty lines # Create a list of active users for the sake of posting opps. org_id = api.get('me')['organizations'][0]['id'] memberships = api.get('organization/' + org_id, params={'_fields': 'memberships'})['memberships'] active_users = [i['user_id'] for i in memberships] # Array to keep track of number of leads restored. Because we use gevent, we can't have a standard counter variable. total_leads_restored = [] # This is a list of object types you want to restore on the lead. We can also add activity.email, but in this script # it's assumed that email sync will take care of all of the emails that were deleted, assuming the same email accounts # are connected to Close. object_types = [ 'contact', 'opportunity', 'task.lead', 'activity.call',
HEADERS = ['lead_name', 'status_type', 'status_label', 'confidence', 'user_name', 'value', 'value_period', 'note', 'date_created', 'date_updated', 'date_won'] parser = argparse.ArgumentParser(description='Export Opportunities to CSV') parser.add_argument('--api_key', '-k', required=True, help='API Key') parser.add_argument('--output', '-o', required=True, help='Output filename') parser.add_argument('--development', action='store_true', help='Use a development server rather than production.') args = parser.parse_args() with open(args.output, 'wb') as f: writer = CsvWriter(f) api = CloseIO_API(args.api_key, development=args.development) writer.writerow(HEADERS) skip = 0 has_more = True while has_more: resp = api.get('opportunity', data={'_skip': skip}) opportunities = resp['data'] for opportunity in opportunities: row = [] for header in HEADERS: row.append(opportunity.get(header) or '') writer.writerow(row) skip += len(opportunities) has_more = resp['has_more']
assert args.new_code in ISO_COUNTRIES.keys( ), '%s country code is not valid' % args.new_code assert args.old_code != args.new_code, 'equal country codes' logging.info('old country: %s (%s) -> new country: %s (%s) ' % (args.old_code, ISO_COUNTRIES[args.old_code], args.new_code, ISO_COUNTRIES[args.new_code])) api = CloseIO_API(args.api_key, development=args.development) has_more = True offset = 0 while has_more: resp = api.get('lead', params={ 'query': LEADS_QUERY, '_skip': offset, '_fields': 'id,addresses' }) leads = resp['data'] for lead in leads: need_update = False for address in lead['addresses']: if address['country'] == args.old_code: address['country'] = args.new_code need_update = True if need_update: if args.confirmed: api.put('lead/' + lead['id'], data={'addresses': lead['addresses']})
parser.error("at least one option required") log_format = "[%(asctime)s] %(levelname)s %(message)s" if not args.confirmed: log_format = 'DRY RUN: ' + log_format logging.basicConfig(level=logging.INFO, format=log_format) logging.debug('parameters: %s' % vars(args)) api = CloseIO_API(args.api_key, development=args.development) emails_to_ids = {} if any([args.from_user_email, args.to_user_email]): has_more = True offset = 0 while has_more: resp = api.get('user') for user in resp['data']: emails_to_ids[user['email']] = user['id'] offset += max(0, len(resp['data']) - 1) has_more = resp['has_more'] logging.debug(emails_to_ids) if args.from_user_email: from_user_id = emails_to_ids[args.from_user_email] else: # for exception, if user_id is not present in the database resp = api.get('user/' + args.from_user_id, data={'_fields': 'id,email'}) from_user_id = resp['id'] emails_to_ids[resp['email']] = resp['id']
#!/usr/bin/env python import json import codecs from closeio_api import Client as CloseIO_API """ Dumps all leads within a given organization to a file in JSON format """ api_key = raw_input("API key: ") file_name = raw_input("Output Filename: ") api = CloseIO_API(api_key) f = codecs.open(file_name, "w", "utf-8") leads = [] has_more = True offset = 0 while has_more: resp = api.get('lead', data={'_skip': offset}) data = resp['data'] for lead in data: leads.append(lead) offset += len(data) has_more = resp['has_more'] json.dump(leads, f) f.close()
if args.end_date: params['date_created__lte'] = args.end_date query = query + ' date <= "%s"' % args.end_date query += ")" if args.user_id: params['user_id'] = args.user_id if not args.unattached_only: print("Getting Lead Display Names...") while has_more: resp = api.get( 'lead', params={ '_skip': offset, 'query': query, '_fields': 'id,display_name', }, ) for lead in resp['data']: display_names[lead['id']] = lead['display_name'] print(offset) offset += len(resp['data']) has_more = resp['has_more'] has_more = True offset = 0 params[ '_fields'] = 'id,user_id,duration,direction,date_created,remote_phone,local_phone,voicemail_url,recording_url,source,lead_id,updated_by_name' if args.call_costs:
required=True, help='Sender name you want to use to send sequence', ) args = parser.parse_args() api = CloseIO_API(args.api_key) from_subs = [] print("Getting sequences") sequences = [] has_more = True offset = 0 while has_more: resp = api.get( 'sequence', params={'_skip': offset}, ) sequences.extend(resp['data']) offset += len(resp['data']) has_more = resp['has_more'] for sequence in sequences: print(f"Getting sequence subscriptions for `{sequence['name']}`") has_more = True offset = 0 while has_more: sub_results = api.get( 'sequence_subscription', params={ '_skip': offset, 'sequence_id': sequence['id']
parser.error("at least one option required") log_format = "[%(asctime)s] %(levelname)s %(message)s" if not args.confirmed: log_format = 'DRY RUN: '+log_format logging.basicConfig(level=logging.INFO, format=log_format) logging.debug('parameters: %s' % vars(args)) api = CloseIO_API(args.api_key, development=args.development) emails_to_ids = {} if any([args.from_user_email, args.to_user_email]): has_more = True offset = 0 while has_more: resp = api.get('user', params={'_skip': offset}) for user in resp['data']: emails_to_ids[user['email']] = user['id'] offset += len(resp['data']) has_more = resp['has_more'] logging.debug(emails_to_ids) if args.from_user_email: from_user_id = emails_to_ids[args.from_user_email] else: # for exception, if user_id is not present in the database resp = api.get('user/'+args.from_user_id, params={ '_fields': 'id,email' })
desired_status = 'open' # capitalization doesn't matter api = CloseIO_API(args.api_key, development=args.development) has_more = True offset = 0 last_lead = None total_merged = 0 while has_more: leads_merged_this_page = 0 # Get a page of leads resp = api.get('lead', data={ 'query': 'sort:display_name', '_skip': offset, '_fields': 'id,display_name,name,status_label,opportunities,custom' }) leads = resp['data'] for lead in leads: if last_lead and lead['id'] == last_lead['id']: continue # same lead, skip # Determine whether "lead" should be considered a duplicate of the previous lead ("last_lead") is_duplicate = last_lead and lead['name'].strip() and last_lead['name'].strip().lower() == lead['name'].strip().lower() if is_duplicate: # Should we use 'lead' or 'last_lead' as the 'destination' (preferred) lead?
ETA(), ' ', FileTransferSpeed() ] pbar = ProgressBar(widgets=progress_widgets, maxval=import_count).start() dupes_cnt = 0 for key, val in unique_leads.items(): retries = 5 # check if it's a duplicate dupe = False if args.skip_duplicates and val.get('name'): # get the org id necessary for search org_id = api.get('api_key')['data'][0]['organization_id'] # get all the search results for given lead name search_results = [] filters = { 'organization_id': org_id, 'query': 'name:"%s"' % key, } has_more = True skip = 0 while has_more: filters['_skip'] = skip resp = api.get('lead', params=filters) results = resp['data'] search_results.extend(results) has_more = resp['has_more']