def run(api_key, development, confirmed, limit=100): api = CloseIO_API(api_key, development=development) # loop through existing leads with multiple addresses LEADS_QUERY_WITH_MULTIPLE_ADDRESSES = "addresses > 1 sort:activities" has_more = True while has_more: resp = api.get('lead', params={ 'query': LEADS_QUERY_WITH_MULTIPLE_ADDRESSES, '_fields': 'id,addresses', '_limit': limit, }) leads = resp['data'] for lead in leads: if len(lead['addresses']) < 2: logging.warning("unexpected result: %s", lead) continue # this shouldn't happen based on the search query, but just to be safe... if confirmed: api.put('lead/' + lead['id'], data={'addresses': lead['addresses'][:1]}) logging.info("removed %d extra address(es) for %s\n%s" % (len(lead['addresses'][1:]), lead['id'], lead['addresses'][1:])) has_more = resp['has_more'] time.sleep(2) # give the search indexer some time to catch up with the changes
def get_list_of_users(api_key): closeio_api = Client(api_key) has_more = True offset = 0 limit = 100 list_of_users = [] while has_more: response = closeio_api.get('user', params={ '_skip': offset, '_limit': limit }) users = response['data'] for user in users: list_of_users.append(user) has_more = response['has_more'] offset += limit return list_of_users
def run(api_key, development, confirmed, limit=100): api = CloseIO_API(api_key, development=development) # loop through existing leads with multiple addresses LEADS_QUERY_WITH_MULTIPLE_ADDRESSES = "addresses > 1 sort:activities" has_more = True while has_more: resp = api.get('lead', data={ 'query': LEADS_QUERY_WITH_MULTIPLE_ADDRESSES, '_fields': 'id,addresses', '_limit': limit, }) leads = resp['data'] for lead in leads: if len(lead['addresses']) < 2: logging.warning("unexpected result: %s", lead) continue # this shouldn't happen based on the search query, but just to be safe... if confirmed: api.put('lead/' + lead['id'], data={'addresses': lead['addresses'][:1]}) logging.info("removed %d extra address(es) for %s\n%s" % (len( lead['addresses'][1:]), lead['id'], lead['addresses'][1:])) has_more = resp['has_more'] time.sleep( 2 ) # give the search indexer some time to catch up with the changes
def export_total_talk_time_per_lead_for_each_org(): """For each api key given, upload a CSV to dropbox of the total talk time per lead per user for an organization. """ global leads global calls_per_lead global user_ids_to_names global api for api_key in os.environ.get('CLOSE_API_KEYS').split(','): ## Initiate Close API leads = [] calls_per_lead = [] user_ids_to_names = {} api = CloseIO_API(api_key.strip()) try: org = api.get('me')['organizations'][0] org_name = org['name'].replace('/', ' ') org_id = org['id'] org_memberships = api.get( 'organization/' + org['id'], params={'_fields': 'memberships,inactive_memberships'}) user_ids_to_names = { k['user_id']: k['user_full_name'] for k in org_memberships['memberships'] + org_memberships['inactive_memberships'] } except APIError as e: print(f'Failed to pull org data because {str(e)} for {api_key}') continue try: name_keys = [ f'{v} Total Talk Time' for v in user_ids_to_names.values() ] name_keys = sorted(name_keys) print(f'Getting calls for {org_name}') final_calls_per_lead = _get_call_duration_per_lead() final_calls_per_lead = sorted(final_calls_per_lead, key=itemgetter('Lead Name')) except Exception as e: print(f'Failed to pull calls for {org_name} because {str(e)}') continue ordered_keys = ['Lead ID', 'Lead Name', 'Total Talk Time'] + name_keys output = io.StringIO() writer = csv.DictWriter(output, ordered_keys) writer.writeheader() writer.writerows(final_calls_per_lead) csv_output = output.getvalue().encode('utf-8') file_name = f"{org_name}/{org_name} Total Talk Time {datetime.today().strftime('%Y-%m-%d')}.csv" upload_to_dropbox(file_name, csv_output)
def generate_user_activity_report(api_key, org_id, date_start, date_end, user_id): closeio_api = Client(api_key) user_activity_report = closeio_api.get('report/activity/{}'.format(org_id), params={ 'date_start': date_start, 'date_end': date_end, 'user_id': user_id, }) return user_activity_report
def get_list_of_leads_via_search_query(api_key, search_query): closeio_api = Client(api_key) has_more = True offset = 0 limit = 100 list_of_leads = [] while has_more: response = closeio_api.get('lead', params={ '_skip': offset, '_limit': limit, 'query': search_query }) leads = response['data'] for lead in leads: list_of_leads.append(lead) has_more = response['has_more'] offset += limit return list_of_leads # # Retrieve lead data from Lead ID # def get_lead_data(api_key, lead_id): # closeio_api = Client(api_key) # api_url = 'lead/{}'.format(lead_id) # lead_data = closeio_api.get(api_url) # return lead_data
) parser.add_argument( '--sender-account-id', '-s', required=True, help='Email account id you want to use to send sequence', ) parser.add_argument( '--sender-name', '-n', required=True, help='Sender name you want to use to send sequence', ) args = parser.parse_args() api = CloseIO_API(args.api_key) from_subs = [] print("Getting sequences") sequences = [] has_more = True offset = 0 while has_more: resp = api.get( 'sequence', params={'_skip': offset}, ) sequences.extend(resp['data']) offset += len(resp['data']) has_more = resp['has_more']
# print k, v # sys.exit(0) args.old_code = args.old_code.upper() args.new_code = args.new_code.upper() assert args.old_code in ISO_COUNTRIES.keys( ), '%s country code is not valid' % args.old_code assert args.new_code in ISO_COUNTRIES.keys( ), '%s country code is not valid' % args.new_code assert args.old_code != args.new_code, 'equal country codes' logging.info('old country: %s (%s) -> new country: %s (%s) ' % (args.old_code, ISO_COUNTRIES[args.old_code], args.new_code, ISO_COUNTRIES[args.new_code])) api = CloseIO_API(args.api_key, development=args.development) has_more = True offset = 0 while has_more: resp = api.get('lead', params={ 'query': LEADS_QUERY, '_skip': offset, '_fields': 'id,addresses' }) leads = resp['data'] for lead in leads: need_update = False
#!/usr/bin/env python import json import codecs from closeio_api import Client as CloseIO_API """ Dumps all leads within a given organization to a file in JSON format """ api_key = raw_input("API key: ") file_name = raw_input("Output Filename: ") api = CloseIO_API(api_key) f = codecs.open(file_name, "w", "utf-8") leads = [] has_more = True offset = 0 while has_more: resp = api.get('lead', data={'_skip': offset}) data = resp['data'] for lead in data: leads.append(lead) offset += len(data) has_more = resp['has_more'] json.dump(leads, f) f.close()
the values contained in your csv. """, ) parser.add_argument("csvfile", type=argparse.FileType("rU"), help="Path to csv file.") parser.add_argument("single_field_name", nargs="?", help="Optional: Field name to use for a single field update.") parser.add_argument("single_field_action", nargs="?", help="Optional: Action to use for a single field update.") parser.add_argument("--api-key", "-k", required=True, help="API Key.") parser.add_argument( "--confirmed", "-c", action="store_true", help="Without this flag, the script will do a dry run without actually updating any data.", ) args = parser.parse_args() api = CloseIO_API(args.api_key) action_data = {} choices_data = {} # Checks if our single field update values are set. # Formats the information from the variables and the csv file into the same dictionary format as the multi-column section. if args.single_field_name and args.single_field_action: if args.single_field_action.lower().strip() not in ("create", "update", "replace"): print "Error: " + r["action"] + " not a valid action." print "Use create, update, or replace only." sys.exit(1) action_data[args.single_field_name] = args.single_field_action.lower().strip() choices_data[args.single_field_name] = []
#!/usr/bin/env python import argparse from closeio_api import Client as CloseIO_API DELETE_FIELDS = ['Field1', 'Field2', 'Field3'] parser = argparse.ArgumentParser(description='Delete the fields: ' + ', '.join(DELETE_FIELDS)) parser.add_argument('--api_key', '-k', required=True, help='API Key') parser.add_argument('--development', action='store_true', help='Use a development server rather than production.') args = parser.parse_args() api = CloseIO_API(args.api_key, development=args.development) skip = 0 has_more = True while has_more: resp = api.get('lead', data={'_skip': skip}) leads = resp['data'] for lead in leads: n_fields_deleted = 0 custom = lead['custom'].copy() for field in DELETE_FIELDS: if custom.get(field): del custom[field] n_fields_deleted += 1 if n_fields_deleted: print "LEAD: %s" % lead['id']
parser = argparse.ArgumentParser( description= 'Find duplicate contacts on a lead in your Close org via contact_name, email address, or phone number' ) parser.add_argument('--api-key', '-k', required=True, help='API Key') parser.add_argument('--field', '-f', default='all', choices=['contact_name', 'email', 'phone', 'all'], required=False, help="Specify a field to compare uniqueness") args = parser.parse_args() ## Initialize Close API Wrapper api = CloseIO_API(args.api_key) org_id = api.get('api_key/' + args.api_key)['organization_id'] org_name = api.get('organization/' + org_id, params={'_fields': 'name'})['name'].replace('/', '') ## Calculate number of slices necessary to get all leads total_leads = api.get('lead', params={ '_limit': 0, 'query': 'sort:created contacts > 1' })['total_results'] total_slices = int(math.ceil(float(total_leads) / 1000)) slices = range(1, total_slices + 1) leads = []
group.add_argument('--opportunities', '-O', action='store_true', help='reassign only active opportunities') group.add_argument('--all-opportunities', action='store_true', help='reassign all opportunities') args = parser.parse_args() if not any([args.tasks, args.opportunities, args.all_tasks, args.all_opportunities]): parser.error("at least one option required") log_format = "[%(asctime)s] %(levelname)s %(message)s" if not args.confirmed: log_format = 'DRY RUN: '+log_format logging.basicConfig(level=logging.INFO, format=log_format) logging.debug('parameters: %s' % vars(args)) api = CloseIO_API(args.api_key, development=args.development) emails_to_ids = {} if any([args.from_user_email, args.to_user_email]): has_more = True offset = 0 while has_more: resp = api.get('user') for user in resp['data']: emails_to_ids[user['email']] = user['id'] offset += len(resp['data']) has_more = resp['has_more'] logging.debug(emails_to_ids) if args.from_user_email:
args = parser.parse_args() # Dict of lowercased tags => tuples of (custom_field_name, custom_field_value) tag_templates = {} sniffer = csv.Sniffer() dialect = sniffer.sniff(args.csvfile.read(1024)) args.csvfile.seek(0) c = csv.DictReader(args.csvfile, dialect=dialect, fieldnames=['tag', 'custom_field_name', 'custom_field_value']) c.next() for r in c: if r: assert len(r) == 3, 'Invalid csv format at line %d' % (c.line_num,) tag_templates[r['tag'].lower()] = (r['custom_field_name'], r['custom_field_value']) api = CloseIO_API(args.api_key, development=args.development) has_more = True offset = 0 while has_more: resp = api.get('lead', data={ 'query': 'custom.Tags:* sort:created', '_skip': offset, '_fields': 'id,custom' }) leads = resp['data'] for l in leads: if 'Tags' in l['custom'].keys(): tags = [t.strip() for t in l['custom']['Tags'].split(',')]
choices=[ 'call', 'created', 'email', 'lead_status_change', 'note', 'opportunity_status_change', 'sms', 'task_completed', ], required=True, help='The type of activity you\'d like to export to JSON', ) args = parser.parse_args() api = CloseIO_API(args.api_key) days = [] activities = [] endpoint = args.activity_type if endpoint == 'opportunity_status_change': endpoint = 'status_change/opportunity' elif endpoint == 'lead_status_change': endpoint = 'status_change/lead' starting_date = datetime.strptime(args.date_start, '%Y-%m-%d') ending_date = (starting_date + relativedelta(days=+1) - relativedelta(seconds=+1)) ending_date_final = datetime.strptime(args.date_end, '%Y-%m-%d')
#if args.list_countries: # for k,v in ISO_COUNTRIES.iteritems(): # print k, v # sys.exit(0) args.old_code = args.old_code.upper() args.new_code = args.new_code.upper() assert args.old_code in ISO_COUNTRIES.keys(), '%s country code is not valid' % args.old_code assert args.new_code in ISO_COUNTRIES.keys(), '%s country code is not valid' % args.new_code assert args.old_code != args.new_code, 'equal country codes' logging.info('old country: %s (%s) -> new country: %s (%s) ' % (args.old_code, ISO_COUNTRIES[args.old_code], args.new_code, ISO_COUNTRIES[args.new_code])) api = CloseIO_API(args.api_key, development=args.development) has_more = True offset = 0 while has_more: resp = api.get('lead', data={ 'query': LEADS_QUERY, '_skip': offset, '_fields': 'id,addresses' }) leads = resp['data'] for lead in leads: need_update = False for address in lead['addresses']:
unique_leads[grouper] = lead elif lead['contacts'] not in unique_leads[grouper]['contacts']: unique_leads[grouper]['contacts'].extend(lead['contacts']) print('Found %d leads (grouped by company) from %d contacts.' % (len(unique_leads), import_count)) print('\nHere is a sample lead (last row):') print(json.dumps(unique_leads[grouper], indent=4)) print('\nAre you sure you want to continue? (y/n) ') if raw_input('') != 'y': sys.exit() ############################################################################## api = CloseIO_API(args.api_key, development=args.development) progress_widgets = ['Importing %d rows: ' % import_count, Percentage(), ' ', Bar(), ' ', ETA(), ' ', FileTransferSpeed()] pbar = ProgressBar(widgets=progress_widgets, maxval=import_count).start() dupes_cnt = 0 for key, val in unique_leads.items(): retries = 5 # check if it's a duplicate dupe = False if args.skip_duplicates and val.get('name'): # get the org id necessary for search org_id = api.get('api_key')['data'][0]['organization_id']
group.add_argument('--opportunities', '-O', action='store_true', help='reassign only active opportunities') group.add_argument('--all-opportunities', action='store_true', help='reassign all opportunities') args = parser.parse_args() if not any([args.tasks, args.opportunities, args.all_tasks, args.all_opportunities]): parser.error("at least one option required") log_format = "[%(asctime)s] %(levelname)s %(message)s" if not args.confirmed: log_format = 'DRY RUN: '+log_format logging.basicConfig(level=logging.INFO, format=log_format) logging.debug('parameters: %s' % vars(args)) api = CloseIO_API(args.api_key, development=args.development) emails_to_ids = {} if any([args.from_user_email, args.to_user_email]): has_more = True offset = 0 while has_more: resp = api.get('user', data={'_skip': offset}) for user in resp['data']: emails_to_ids[user['email']] = user['id'] offset += len(resp['data']) has_more = resp['has_more'] logging.debug(emails_to_ids) if args.from_user_email:
description= 'Create a CSV of all deleted leads in the past 30 days and see how they were deleted' ) parser.add_argument('--api-key', '-k', required=True, help='API Key') parser.add_argument( '--print-lead-ids', '-p', action='store_true', help= 'Use this field to print lead_ids deleted in an array at the end of the script', ) args = parser.parse_args() api = CloseIO_API(args.api_key) has_more = True cursor = '' events = [] leads = [] reverted_imports = {} me = api.get('me') org_id = me['organizations'][0]['id'] org = api.get( f'organization/{org_id}', params={'_fields': 'name,memberships,inactive_memberships'}, ) org_memberships = org['memberships'] + org['inactive_memberships'] org_name = org['name']
) parser.add_argument( '--end-date', '-e', help= 'The end of the date range you want to download recordings for in yyyy-mm-dd format.' ) parser.add_argument( '--file-path', '-f', required=True, help='The file path to the folder where the recordings will be stored.') args = parser.parse_args() api = CloseIO_API(args.api_key, development=args.development) api_encoded = "Basic " + str(base64.b64encode(args.api_key)) has_more = True offset = 0 leads = {} params = {} query = "call((recording_duration > 0 or voicemail_duration > 0)" if args.start_date: params['date_created__gte'] = args.start_date query = query + ' date >= "%s"' % args.start_date if args.end_date: params['date_created__lte'] = args.end_date
) parser.add_argument( '--lead-id', '-l', help= 'Use this field if you want to narrow your search to a specific lead_id', ) parser.add_argument( '--user-id', '-u', help= 'Use this field if you want to narrow your search to changes done by a specific user', ) args = parser.parse_args() api = CloseIO_API(args.api_key) org_id = api.get('me')['organizations'][0]['id'] org = api.get( 'organization/' + org_id, params={ '_fields': 'id,name,memberships,inactive_memberships,lead_custom_fields' }, ) org_name = org['name'].replace('/', "") org_memberships = org['memberships'] + org['inactive_memberships'] try: custom_field_name = [ i for i in org['lead_custom_fields'] if i['id'] == args.custom_field ][0]['name'] except IndexError as e:
import json import sys from requests.exceptions import ConnectionError from closeio_api import APIError, Client as CloseIO_API from closeio_api.utils import CsvReader parser = argparse.ArgumentParser(description='Remove tasks associated with inactive users') parser.add_argument('--api-key', '-k', required=True, help='API Key') parser.add_argument('--confirmed', action='store_true', help='Confirm making changes. Otherwise this script is not going to modify any data.') parser.add_argument('--verbose', '-v', action='store_true', help='Increase logging verbosity.') args = parser.parse_args() api = CloseIO_API(args.api_key, async=False) # Get IDs of all inactive users in a given org org_id = api.get('api_key/' + args.api_key)['organization_id'] org = api.get('organization/' + org_id) inactive_users = [m['user_id'] for m in org['inactive_memberships']] # Get IDs of all the tasks assigned to these inactive users task_ids = [] total_cnt = len(inactive_users) for idx, user_id in enumerate(inactive_users): if args.verbose: print 'Gathering tasks for %s (%d/%d)' % (user_id, idx + 1, total_cnt) has_more = True skip = 0
args = parser.parse_args() log_format = "[%(asctime)s] %(levelname)s %(message)s" if not args.confirmed: log_format = "DRY RUN: " + log_format logging.basicConfig(level=logging.INFO, format=log_format) logging.debug("parameters: %s" % vars(args)) sniffer = csv.Sniffer() dialect = sniffer.sniff(args.csvfile.read(1000000)) args.csvfile.seek(0) c = csv.DictReader(args.csvfile, dialect=dialect) assert any(x in ("company", "lead_id") for x in c.fieldnames), 'ERROR: column "company" or "lead_id" is not found' api = CloseIO_API(args.api_key, development=args.development) resp = api.get("custom_fields/lead") available_custom_fieldnames = [x["name"] for x in resp["data"]] new_custom_fieldnames = [ x for x in [y.split(".", 1)[1] for y in c.fieldnames if y.startswith("custom.")] if x not in available_custom_fieldnames ] if new_custom_fieldnames: if args.create_custom_fields: for field in new_custom_fieldnames: if args.confirmed: api.post("custom_fields/lead", data={"name": field, "type": "text"}) available_custom_fieldnames.append(field)
log_format = "[%(asctime)s] %(levelname)s %(message)s" if not args.confirmed: log_format = 'DRY RUN: '+log_format logging.basicConfig(level=logging.INFO, format=log_format) logging.debug('parameters: %s' % vars(args)) sniffer = csv.Sniffer() dialect = sniffer.sniff(args.csvfile.read(2048)) args.csvfile.seek(0) c = csv.DictReader(args.csvfile, dialect=dialect) assert any(x in ('company', 'lead_id') for x in c.fieldnames), \ 'ERROR: column "company" or "lead_id" is not found' api = CloseIO_API(args.api_key, development=args.development) resp = api.get('custom_fields/lead') available_custom_fieldnames = [x['name'] for x in resp['data']] new_custom_fieldnames = [x for x in [y.split('.', 1)[1] for y in c.fieldnames if y.startswith('custom.')] if x not in available_custom_fieldnames] if new_custom_fieldnames: if args.create_custom_fields: for field in new_custom_fieldnames: if args.confirmed: api.post('custom_fields/lead', data={'name': field, 'type': 'text'}) available_custom_fieldnames.append(field) logging.info('added new custom field "%s"' % field) else: logging.error('unknown custom fieldnames: %s' % new_custom_fieldnames)
parser.add_argument('--api-key', '-k', required=True, help='API Key') parser.add_argument('--request-id', '-r', required=True, help='request_id from event log.') parser.add_argument('--output', '-o', required=True, help='json output file of events') parser.add_argument('--verbose', '-v', action='store_true', help='Increase logging verbosity.') args = parser.parse_args() api = CloseIO_API(args.api_key) def setup_logger(): logger = logging.getLogger('closeio.api.events_by_request_id') logger.setLevel(logging.INFO) if args.verbose: logger.setLevel(logging.DEBUG) ch = logging.StreamHandler(sys.stdout) formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') ch.setFormatter(formatter) logger.addHandler(ch) return logger
log_format = "[%(asctime)s] %(levelname)s %(message)s" if not args.confirmed: log_format = 'DRY RUN: ' + log_format logging.basicConfig(level=logging.INFO, format=log_format) logging.debug('parameters: %s' % vars(args)) sniffer = csv.Sniffer() dialect = sniffer.sniff(args.csvfile.read(1000000)) args.csvfile.seek(0) c = csv.DictReader(args.csvfile, dialect=dialect) assert any(x in ('company', 'lead_id') for x in c.fieldnames), \ 'ERROR: column "company" or "lead_id" is not found' api = CloseIO_API(args.api_key, development=args.development) resp = api.get('custom_fields/lead') available_custom_fieldnames = [x['name'] for x in resp['data']] new_custom_fieldnames = [ x for x in [y.split('.', 1)[1] for y in c.fieldnames if y.startswith('custom.')] if x not in available_custom_fieldnames ] if new_custom_fieldnames: if args.create_custom_fields: for field in new_custom_fieldnames: if args.confirmed: api.post('custom_fields/lead', data={
'-v', action='store_true', help='Increase logging verbosity.') parser.add_argument( '--development', action='store_true', help='Use a development (testing) server rather than production.') parser.add_argument( '--confirmed', action='store_true', help= 'Without this flag, no action will be taken (dry run). Use this to perform the merge.' ) args = parser.parse_args() api = CloseIO_API(args.api_key, development=args.development) def setup_logger(): logger = logging.getLogger('closeio.api.merge_leads') logger.setLevel(logging.INFO) if args.verbose: logger.setLevel(logging.DEBUG) ch = logging.StreamHandler(sys.stdout) formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') ch.setFormatter(formatter) logger.addHandler(ch) return logger
parser = argparse.ArgumentParser( description= 'Restore an array of deleted leads by ID. This CANNOT restore status changes or call recordings.' ) parser.add_argument('--api-key', '-k', required=True, help='API Key') group = parser.add_mutually_exclusive_group(required=True) group.add_argument('--leads', help='List of lead IDs in a form of a comma separated list') group.add_argument( '--leads-file', help= 'List of lead IDs in a form of a textual file with single column of lead IDs', ) args = parser.parse_args() api = CloseIO_API(args.api_key) # Array of Lead IDs. Add the IDs you want to restore here. if args.leads: lead_ids = args.leads.split(",") elif args.leads_file: with open(args.leads_file) as f: lines = f.readlines() lead_ids = [el.strip() for el in lines] # Strip new lines lead_ids = list(filter(None, lead_ids)) # Strip empty lines # Create a list of active users for the sake of posting opps. org_id = api.get('me')['organizations'][0]['id'] memberships = api.get('organization/' + org_id, params={'_fields': 'memberships'})['memberships'] active_users = [i['user_id'] for i in memberships]
FAIL = '\033[91m' ENDC = '\033[0m' BOLD = '\033[1m' UNDERLINE = '\033[4m' def green(str): print bcolors.OKBLUE + str + bcolors.ENDC def strCleanup(str): return str.strip().lower().replace("+","").replace("/","") desired_status = 'open' # capitalization doesn't matter api = CloseIO_API(args.api_key, development=args.development) offset = 0 has_more = True combinedLeads = {} typesToCheck = ["phones","emails"] combined = {} combined["emails"] = {} combined["phones"] = {} combined["addresses"] = {} combined["name"] = {} statusLabelOrder = {"Kalt":0,"Kontakt":1,"Termin":2,"Kunde":3,"Tot":4,"Investor":5} """
from closeio_api.utils import CsvReader parser = argparse.ArgumentParser(description='Remove email addresses from contacts in CSV file') parser.add_argument('--api-key', '-k', required=True, help='API Key') parser.add_argument('--confirmed', action='store_true', help='Really run this?') parser.add_argument('file', help='Path to the csv file') args = parser.parse_args() reader = CsvReader(args.file) headers = dict([(name, idx,) for idx, name in enumerate(reader.next())]) # skip the 1st line header if any(field not in headers for field in ['contact_id', 'email_address']): print 'contact_id or email_address headers could not be found in your csv file.' sys.exit(-1) api = CloseIO_API(args.api_key, async=False) for row in reader: contact_id = row[headers['contact_id']] email_address = row[headers['email_address']] try: contact = api.get('contact/' + contact_id) if not contact['emails']: continue emails = filter(lambda email: email['email'] != email_address, contact['emails']) if args.confirmed: resp = api.put('contact/' + contact_id, {'emails': emails}) except APIError: pass
if grouper not in unique_leads: unique_leads[grouper] = lead elif lead['contacts'] not in unique_leads[grouper]['contacts']: unique_leads[grouper]['contacts'].extend(lead['contacts']) print 'Found %d leads (grouped by company) from %d contacts.' % (len(unique_leads), import_count) print '\nHere is a sample lead (last row):' print json.dumps(unique_leads[grouper], indent=4) if raw_input('\nAre you sure you want to continue? (y/n) ') != 'y': sys.exit() ############################################################################## api = CloseIO_API(args.api_key, development=args.development) progress_widgets = ['Importing %d rows: ' % import_count, Percentage(), ' ', Bar(), ' ', ETA(), ' ', FileTransferSpeed()] pbar = ProgressBar(widgets=progress_widgets, maxval=import_count).start() dupes_cnt = 0 for key, val in unique_leads.items(): retries = 5 # check if it's a duplicate dupe = False if args.skip_duplicates and val.get('name'): # get the org id necessary for search org_id = api.get('api_key')['data'][0]['organization_id']
'url', 'all', 'custom', ], help="Specify a field to compare uniqueness", ) parser.add_argument( '--custom-field-name', '-c', help= "Specify the custom field name if you're deduplicating by `custom` field", ) args = parser.parse_args() # Initialize Close API Wrapper api = CloseIO_API(args.api_key) organization = api.get('me')['organizations'][0] org_id = organization['id'] org_name = organization['name'] # Calculate number of slices necessary to get all leads total_leads = api.get('lead', params={ '_limit': 0, 'query': 'sort:created' })['total_results'] total_slices = int(math.ceil(float(total_leads) / 1000)) slices = range(1, total_slices + 1) leads = [] # Write data to a CSV
unique_leads[grouper] = lead elif lead["contacts"] not in unique_leads[grouper]["contacts"]: unique_leads[grouper]["contacts"].extend(lead["contacts"]) print("Found %d leads (grouped by company) from %d contacts." % (len(unique_leads), import_count)) print("\nHere is a sample lead (last row):") print(json.dumps(unique_leads[grouper], indent=4)) print("\nAre you sure you want to continue? (y/n) ") if raw_input("") != "y": sys.exit() ############################################################################## api = CloseIO_API(args.api_key, development=args.development) progress_widgets = [ "Importing %d rows: " % import_count, Percentage(), " ", Bar(), " ", ETA(), " ", FileTransferSpeed(), ] pbar = ProgressBar(widgets=progress_widgets, maxval=import_count).start() dupes_cnt = 0
import argparse import sys from closeio_api import Client as CloseIO_API parser = argparse.ArgumentParser(description="Change all the opportunities for a given leads' search query to a given status.") parser.add_argument('--query', type=str, required=True, help='Search query.') parser.add_argument('--api_key', type=str, required=True, help='API key') parser.add_argument('--status', type=str, required=True, help='Label of the new status') parser.add_argument('--dev', action='store_true', help='Use the dev server', default=False) args = parser.parse_args() # Should tell you how many leads are going to be affected api = CloseIO_API(args.api_key, development=args.dev) # Get the status_id org_id = api.get('api_key')['data'][0]['organization_id'] statuses = api.get('organization/{0}'.format(org_id))['opportunity_statuses'] new_status_id = [st['id'] for st in statuses if st['label'].lower() == args.status.lower()] if not new_status_id: print 'Status not found: {0}'.format(args.status) sys.exit(1) new_status_id = new_status_id[0] print 'Gathering opportunities for {0}'.format(args.query) has_more = True offset = 0 limit = 50
import argparse from closeio_api import Client as CloseIO_API, APIError import csv import gevent import gevent.monkey from gevent.pool import Pool reload(sys) sys.setdefaultencoding('utf-8') gevent.monkey.patch_all() parser = argparse.ArgumentParser(description='Get a list of all lead merge events for the last 30 days from your Close organization') parser.add_argument('--api-key', '-k', required=True, help='API Key') args = parser.parse_args() ## Initialize the Close API and get all users in the org api = CloseIO_API(args.api_key) org_id = api.get('api_key/' + args.api_key)['organization_id'] org = api.get('organization/' + org_id, params={ '_fields': 'inactive_memberships,memberships,name' }) org_name = org['name'].replace('/', '') memberships = org['memberships'] + org['inactive_memberships'] users = { membership['user_id'] : membership['user_full_name'] for membership in memberships } ## Method to get data about the deleted source lead added to the event def getSourceLeadData(event): print "%s of %s: %s" % (events.index(event) + 1, len(events), event['Merge Event ID']) source_delete_event = api.get('event', params={ 'object_type': 'lead', 'action': 'deleted', 'lead_id': event['Source Lead ID'] }) if len(source_delete_event['data']) > 0: delete_event = source_delete_event['data'][0] if delete_event.get('previous_data'): event['Source Lead Status'] = delete_event['previous_data'].get('status_label')
#!/usr/bin/env python import argparse from flask_common.utils import CsvWriter from closeio_api import Client as CloseIO_API HEADERS = ['lead_name', 'status_type', 'status_label', 'confidence', 'user_name', 'value', 'value_period', 'note', 'date_created', 'date_updated', 'date_won'] parser = argparse.ArgumentParser(description='Export Opportunities to CSV') parser.add_argument('--api_key', '-k', required=True, help='API Key') parser.add_argument('--output', '-o', required=True, help='Output filename') parser.add_argument('--development', action='store_true', help='Use a development server rather than production.') args = parser.parse_args() with open(args.output, 'wb') as f: writer = CsvWriter(f) api = CloseIO_API(args.api_key, development=args.development) writer.writerow(HEADERS) skip = 0 has_more = True while has_more: resp = api.get('opportunity', data={'_skip': skip}) opportunities = resp['data'] for opportunity in opportunities: row = [] for header in HEADERS: row.append(opportunity.get(header) or '') writer.writerow(row)
import sys import argparse from closeio_api import Client as CloseIO_API from progressbar import ProgressBar from progressbar.widgets import Counter, Percentage, Bar, AdaptiveETA, FileTransferSpeed parser = argparse.ArgumentParser(description='Detect duplicates & merge leads (see source code for details)') parser.add_argument('--api-key', '-k', required=True, help='API Key') parser.add_argument('--field', '-f', required=False, default='company', choices=['company', 'email', 'phone'], help='Field to compare uniqueness.') parser.add_argument('--verbose', '-v', action='store_true', help='Increase logging verbosity.') parser.add_argument('--development', action='store_true', help='Use a development (testing) server rather than production.') parser.add_argument('--confirmed', action='store_true', help='Without this flag, no action will be taken (dry run). Use this to perform the merge.') args = parser.parse_args() api = CloseIO_API(args.api_key, development=args.development) def setup_logger(): logger = logging.getLogger('closeio.api.merge_leads') logger.setLevel(logging.INFO) if args.verbose: logger.setLevel(logging.DEBUG) ch = logging.StreamHandler(sys.stdout) formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') ch.setFormatter(formatter) logger.addHandler(ch) return logger logger = setup_logger()
action='store_true', help='reassign all opportunities') args = parser.parse_args() if not any( [args.tasks, args.opportunities, args.all_tasks, args.all_opportunities]): parser.error("at least one option required") log_format = "[%(asctime)s] %(levelname)s %(message)s" if not args.confirmed: log_format = 'DRY RUN: ' + log_format logging.basicConfig(level=logging.INFO, format=log_format) logging.debug('parameters: %s' % vars(args)) api = CloseIO_API(args.api_key, development=args.development) emails_to_ids = {} if any([args.from_user_email, args.to_user_email]): has_more = True offset = 0 while has_more: resp = api.get('user') for user in resp['data']: emails_to_ids[user['email']] = user['id'] offset += max(0, len(resp['data']) - 1) has_more = resp['has_more'] logging.debug(emails_to_ids) if args.from_user_email:
#!/usr/bin/env python import argparse from closeio_api import Client as CloseIO_API DELETE_FIELDS = ['Field1', 'Field2', 'Field3'] parser = argparse.ArgumentParser(description='Delete the fields: ' + ', '.join(DELETE_FIELDS)) parser.add_argument('--api_key', '-k', required=True, help='API Key') parser.add_argument('--development', action='store_true', help='Use a development server rather than production.') args = parser.parse_args() api = CloseIO_API(args.api_key, development=args.development) skip = 0 has_more = True while has_more: resp = api.get('lead', data={'_skip': skip}) leads = resp['data'] for lead in leads: n_fields_deleted = 0 custom = lead['custom'].copy() for field in DELETE_FIELDS: if custom.get(field): del custom[field] n_fields_deleted += 1
parser.add_argument( '--user-id', '-u', help='Use this field if you only want to find calls for a specific user', ) parser.add_argument( '--call-costs', '-c', action='store_true', help= 'Use this field if you want to include a call cost column in your export CSV', ) args = parser.parse_args() api = CloseIO_API(args.api_key) params = {} has_more = True offset = 0 calls = [] display_names = {} if not args.start_date and not args.end_date: query = 'has:calls' else: query = 'call(' if args.start_date: params['date_created__gte'] = args.start_date query = query + ' date >= "%s"' % args.start_date
args = parser.parse_args() full_tasks = [] full_opps = [] if not any( [args.tasks, args.opportunities, args.all_tasks, args.all_opportunities]): parser.error("at least one option required") log_format = "[%(asctime)s] %(levelname)s %(message)s" if not args.confirmed: log_format = 'DRY RUN: ' + log_format logging.basicConfig(level=logging.INFO, format=log_format) logging.debug(f'parameters: {vars(args)}') api = CloseIO_API(args.api_key) emails_to_ids = {} if any([args.from_user_email, args.to_user_email]): has_more = True offset = 0 while has_more: resp = api.get('user', params={'_skip': offset}) for user in resp['data']: emails_to_ids[user['email']] = user['id'] offset += len(resp['data']) has_more = resp['has_more'] logging.debug(emails_to_ids) if args.from_user_email:
elif lead['contacts'] not in unique_leads[grouper]['contacts']: unique_leads[grouper]['contacts'].extend(lead['contacts']) print('Found %d leads (grouped by company) from %d contacts.' % (len(unique_leads), import_count)) print('\nHere is a sample lead (last row):') print(json.dumps(unique_leads[grouper], indent=4)) print('\nAre you sure you want to continue? (y/n) ') if raw_input('') != 'y': sys.exit() ############################################################################## api = CloseIO_API(args.api_key, development=args.development) progress_widgets = [ 'Importing %d rows: ' % import_count, Percentage(), ' ', Bar(), ' ', ETA(), ' ', FileTransferSpeed() ] pbar = ProgressBar(widgets=progress_widgets, maxval=import_count).start() dupes_cnt = 0 for key, val in unique_leads.items(): retries = 5
def api_client(): """Return the Close API client fixture.""" return Client('fake-api-key')
'--date-end', '-e', required=True, help='The yyyy-mm-dd you want to end looking for activities') parser.add_argument('--activity-type', '-t', choices=[ 'call', 'created', 'email', 'lead_status_change', 'note', 'opportunity_status_change', 'sms', 'task_completed' ], required=True, help='The type of activity you\'d like to export to JSON') args = parser.parse_args() api = CloseIO_API(args.api_key) org_id = api.get('api_key/' + args.api_key, params={'_fields': 'organization_id'})['organization_id'] org_name = api.get('organization/' + org_id, params={'_fields': 'name'})['name'].replace('/', '') days = [] activities = [] endpoint = args.activity_type if endpoint == 'opportunity_status_change': endpoint = 'status_change/opportunity' elif endpoint == 'lead_status_change': endpoint = 'status_change/lead' starting_date = datetime.strptime(args.date_start, '%Y-%m-%d') ending_date = starting_date + relativedelta(days=+1) - relativedelta(
args = parser.parse_args() """ Detect duplicate leads and merge them. Duplicate criteria: - Case insensitive exact match by Company Name Priority (how to choose 'Destination lead'): - Prefers leads with Opportunities over ones without. - If both or neither have opportunities, prefer leads with desired_status specified below. """ desired_status = 'open' # capitalization doesn't matter api = CloseIO_API(args.api_key, development=args.development) has_more = True offset = 0 last_lead = None total_merged = 0 while has_more: leads_merged_this_page = 0 # Get a page of leads resp = api.get('lead', data={ 'query': 'sort:display_name', '_skip': offset, '_fields': 'id,display_name,name,status_label,opportunities,custom' }) leads = resp['data']
def run(api_key, confirmed, development=False, use_existing_contact=False, new_contact_name='', phones_custom_field='all phones', emails_custom_field='all emails'): """ After an import from a different CRM, for all leads, move emails and phones that were put in in a lead custom field to the lead's first contact (if--use_existing_contact flag was used) or create a new contact. """ print 'confirmed:', `confirmed` print 'phones_custom_field:', `phones_custom_field` print 'emails_custom_field:', `emails_custom_field` print 'use_existing_contact:', `use_existing_contact` api = CloseIO_API(api_key, development=development) has_more = True offset = 0 while has_more: # Get a page of leads resp = api.get('lead', data={ 'query': '"custom.Source CRM":* not "custom.Migration completed":* sort:created', '_skip': offset, '_fields': 'id,display_name,name,contacts,custom', }) leads = resp['data'] for lead in leads: contacts = lead['contacts'] custom = lead['custom'] company_emails = custom.get(emails_custom_field, '') company_phones = custom.get(phones_custom_field, '') if not company_phones and not company_emails: continue if company_emails : if company_emails.startswith('["'): company_emails = company_emails[2:-2].split('", "') else: company_emails = [company_emails] if company_phones: if company_phones.startswith('["'): company_phones = company_phones[2:-2].split('", "') else: company_phones = [company_phones] if contacts and use_existing_contact: contact = contacts[0] else: contact = { 'lead_id': lead['id'], 'phones': [], 'emails': [] } if new_contact_name: contact['name'] = new_contact_name for pn in company_phones: contact['phones'].append({ 'type': 'office', 'phone': pn }) for e in company_emails: contact['emails'].append({ 'type': 'office', 'email': e }) print 'Lead:', lead['id'], lead['name'].encode('utf8') print 'Emails:', `custom.get(emails_custom_field)`, ' => ', `company_emails` print 'Phones:', `custom.get(phones_custom_field)`, ' => ', `company_phones` try: if contact.get('id'): print 'Updating an existing contact', contact['id'] if confirmed: api.put('contact/%s' % contact['id'], data={ 'phones': contact['phones'], 'emails': contact['emails'], }) else: print 'Creating a new contact' if confirmed: api.post('contact', data=contact) print 'Payload:', contact if confirmed: api.put('lead/%s' % lead['id'], data={ 'custom.Migration completed': 'Yes' }) except APIError as e: print e print 'Payload:', contact if confirmed: api.put('lead/%s' % lead['id'], data={ 'custom.Migration completed': 'skipped' }) print '' if not confirmed: # If we don't actually update the "Migration completed" custom field, # we need to paginate offset += len(leads) has_more = resp['has_more'] print 'Done'