def mark_posted(): at_orange = Airtable(os.getenv('AT_APP_KEY_ORANGE'), 'Session hosting', os.getenv('AT_API_KEY')) for row in at_orange.get_all(): for num in range(3): col = f'videoid_{num}' if col in row['fields']: if checked(f'posted_{num}', row['fields']): continue url = "https://youtube.googleapis.com/youtube/v3/videos" url += f'?part=snippet,status,recordingDetails' url += f"&id={row['fields'][col]}" response = requests.get(url, headers=get_auth_header(access_token)) assert response.status_code == 200 body = response.json() assert len(body['items']) == 1 item = body['items'][0] if item['status']['privacyStatus'] == 'public': at_orange.update(row['id'], {f'posted_{num}': True})
def hello(short_url=None): domain_url = os.getenv('DOMAIN') if not short_url: # data = create_line_items(request) # total_lines = len(data['line_items']) return render_template('success.html', **locals()) else: try: air_base = os.getenv('AIR_TABLE_BASE') air_api_key = os.getenv('AIR_TABLE_API') air_table_name = os.getenv('AIR_PROTOCOLO_TABLE_NAME') at = Airtable(air_base, air_table_name, api_key=air_api_key) lookup_record = at.search('short_url', short_url) text_qs = lookup_record[0]['fields']['query_string'] visits = int(lookup_record[0]['fields']['visits']) + 1 if lookup_record[0]['fields']['visits'] >= 0 else 0 dict_qs = dict(parse_qsl(text_qs)) data = create_line_items(dict_qs) total_lines = len(data['line_items']) view_counter = {'visits': visits} at.update(lookup_record[0]['id'], view_counter) if 'imagen_protocolo' in lookup_record[0]['fields']: images = [image['url'] for image in lookup_record[0]['fields']['imagen_protocolo']] images = images[:3] else: images = [] except Exception as e: return jsonify(error=str(e)), 403 return render_template('index.html', **locals())
def create_link(): domain_url = os.getenv('DOMAIN') air_base = os.getenv('AIR_TABLE_BASE') air_api_key = os.getenv('AIR_TABLE_API') air_table_name = os.getenv('AIR_PROTOCOLO_TABLE_NAME') try: at = Airtable(air_base, air_table_name, api_key=air_api_key) new_record_content = dict(request.args) if getattr(request, 'args') else dict(request.form) new_record_content['query_string'] = urlencode(request.args) if getattr(request, 'args') else urlencode(request.form) if '_createdTime' in new_record_content: del new_record_content['_createdTime'] if 'date_created' in new_record_content: del new_record_content['date_created'] if 'date_modified' in new_record_content: del new_record_content['date_modified'] if 'id' in new_record_content: del new_record_content['id'] new_record = at.insert(new_record_content) short_url = {'short_url': new_record['id'].split('rec')[1], 'airtableID': new_record['id'], 'visits': 0} at.update(new_record['id'], short_url) return jsonify({'link_url': domain_url + '/' + short_url['short_url']}) except Exception as e: return jsonify(error=str(e)), 403
def add_visit(page_id): response = Response(True) try: airtable = Airtable(AIRTABLE_BASE_ID, PAGE_TABLES[int(page_id)]) today = datetime.date.today().strftime("%d-%m-%Y") existing_day = airtable.match('Date', today) if existing_day: visits = existing_day['fields']['Visits'] visits = visits + 1 airtable.update(existing_day['id'], {'Visits': visits}, typecast=True) else: new_record = {'Date': today, 'Visits': 1} airtable.insert(new_record) except Exception as e: response.status = False response.message = str(e) return response.compose()
def update_airtable_issue(token, issue_number, issue_url, rec_id): source = Airtable(AIRTABLE_BASE, 'Source Data', api_key=token) # Turn the Key and new Value into a Dictionary update = { 'GitHub Issue Number': issue_number, 'GitHub Issue URL': issue_url } # Update AirTable with the new details using the Table name and record ID Airtable.update(source, rec_id, update)
def create_checkout_session(): if request.data: data = json.loads(request.data) else: data = create_line_items(request) line_items = data.pop('line_items', None) for item in line_items: item['amount'] = int(item['amount']*100) domain_url = os.getenv('DOMAIN') try: # Create new Checkout Session for the order # Other optional params include: # [billing_address_collection] - to display billing address details on the page # [customer] - if you have an existing Stripe Customer ID # [payment_intent_data] - lets capture the payment later # [customer_email] - lets you prefill the email input in the form # For full details see https:#stripe.com/docs/api/checkout/sessions/create # ?session_id={CHECKOUT_SESSION_ID} means the redirect will have the session ID set as a query param checkout_session = stripe.checkout.Session.create( success_url=domain_url + "/success?session_id={CHECKOUT_SESSION_ID}", \ cancel_url=domain_url + "/canceled.html", \ line_items=line_items, \ metadata=data, \ payment_method_types=["card"], \ ) if request.data: air_base = os.getenv('AIR_TABLE_BASE') air_api_key = os.getenv('AIR_TABLE_API') air_table_name = os.getenv('AIR_PEDIDOS_TABLE_NAME') at = Airtable(air_base, air_table_name, api_key=air_api_key) new_record_content = dict(protocolo=["rec" + data['protocolo_id']], shipping_name=data['shipping_name'], shipping_email=data['shipping_email'], shipping_phone=data['shipping_phone'], shipping_address=data['shipping_address'], shipping_city=data['shipping_city'], shipping_provincia=data['shipping_provincia'], shipping_postalcode=data['shipping_postalcode'], status='unpaid', stripe_session_id=checkout_session['id'], ) new_record = at.insert(new_record_content) at.update(new_record['id'], {'airtableID': new_record['id'] }) return jsonify({'sessionId': checkout_session['id']}) else: return jsonify({'linkinfo': domain_url + '/checkout-session?sessionId=' + checkout_session['id']}) except Exception as e: return jsonify(error=str(e)), 403
def write_to_airtable(district_payload): table = Airtable('appNoMhZ2h3BqsBvd', 'Districts') r_t = {"R_t": district_payload['latest_rt']} district_id = district_payload['district_id'] table.update(district_id, r_t) message = "Wrote R_t for {}".format(district_payload['district_api']) return message
def make_record_updates( comparison_map: Dict[str, Dict[str, ATRecord]], assume_newer: bool = False, delete_unmatched_except: Optional[Tuple] = None, ): """Update Airtable from newer source records""" record_type = MC.get() at_key, at_base, at_table, at_typecast = MC.at_connect_info() at = Airtable(at_base, at_table, api_key=at_key) an_only = comparison_map["an_only"] did_update = False if an_only: did_update = True prinl(f"Doing updates for {record_type} records...") prinl(f"Uploading {len(an_only)} new record(s)...") records = [r.all_fields() for r in an_only.values()] at.batch_insert(records, typecast=at_typecast) an_newer: Dict[str, ATRecord] = comparison_map["an_newer"] if an_newer: update_map: Dict[str, Dict[str, Any]] = {} for key, record in an_newer.items(): updates = record.find_at_field_updates(assume_newer=assume_newer) if updates: update_map[record.at_match.record_id] = updates if update_map: if not did_update: prinl(f"Doing updates for {record_type} records...") did_update = True prinl(f"Updating {len(update_map)} existing record(s)...") for i, (record_id, updates) in enumerate(update_map.items()): at.update(record_id, updates, typecast=at_typecast) if (i + 1) % 25 == 0: prinlv(f"Processed {i+1}/{len(update_map)}...") if not did_update: prinlv(f"No updates required for {record_type} records.") at_only = comparison_map["at_only"] if at_only and delete_unmatched_except: field_name = delete_unmatched_except[0] field_val = delete_unmatched_except[1] record_ids = [ record.record_id for record in at_only.values() if record.custom_fields.get(field_name) != field_val ] if record_ids: prinl( f"Deleting {len(record_ids)} unmatched Airtable record(s)...") at.batch_delete(record_ids)
def upload_packaging_list_to_airtable(app_id, secret_key, record_id, list_url): try: # initialize airtable tables print('##### Uploading packaging list to Airtable started #####') tbl_shipment_group = Airtable(app_id, 'ShipmentGroup', secret_key) shipment_group_record = tbl_shipment_group.get(record_id) packing_lists = [] if 'PackingLists Generated' in shipment_group_record['fields']: packing_lists = shipment_group_record['fields']['PackingLists Generated'] packing_lists.append({'url': list_url}) tbl_shipment_group.update(shipment_group_record['id'], { 'PackingLists Generated': packing_lists }) print('##### Uploading packaging list to Airtable finished #####') except Exception as e: print('Error uploading packaging list to Airtable: ' + str(e)) raise ValueError('Error uploading packaging list to Airtable: ' + str(e))
def updateAirTable(key1, val1, key2, val2, tblNme, pDir, recId): try: # Change directory to the Project directory but save the old dir and go back to it at the end currDir = os.getcwd() os.chdir(pDir) # Get the AirTable log in details key = getLoginDetails() # Get an instance of the table you want to update arTbl = Airtable(baseKey, tblNme, api_key=key) # Turn the Key and new Value into a Dictionary dicDat = {key2: val2} # Update AirTable with the new details using the Table name and record ID Airtable.update(arTbl, recId, dicDat) os.chdir(currDir) return recId except Exception as e: os.chdir(currDir) return 'Update Failed: ' + str(e)
def main(): """ The main method. Initializes airtable object. For each record in airtable it fetches corresponding cvpartner data and updates the airtable record :return: None """ airtable = Airtable(airtable_base_id, airtable_table_name, api_key=airtable_api_key) records = airtable.get_all( fields=["email", bio_field_name, image_field_name]) for record in records: # checks if a email is present in the record if 'email' not in record['fields']: continue email = record['fields']['email'] # ignores non @knowit.no emails if not email.endswith("@knowit.no"): continue # does not update if bio and image_url already exists if bio_field_name in record['fields'] and image_field_name in record[ 'fields']: print("both filled out") continue cv_bio, image_url = get_cvpartner_data(email) if cv_bio: if bio_field_name not in record['fields']: print("updating cv") fields = {bio_field_name: cv_bio} airtable.update(record['id'], fields) if image_url: if image_field_name not in record['fields']: print("updating image url") filename = email.split('@', 1)[0] image_object = make_attachment_obj(image_url, filename) fields = {image_field_name: image_object} airtable.update(record['id'], fields)
def comparte_gana_sms(): domain_url = os.getenv('DOMAIN') air_base = os.getenv('AIR_TABLE_BASE') air_api_key = os.getenv('AIR_TABLE_API') air_table_name = os.getenv('AIR_PROTOCOLO_TABLE_NAME') try: #at = Airtable(air_base, air_table_name, api_key=air_api_key) email = request.form.get('Email') iban = request.form.get('IBAN') name = request.form.get('Name') codigo_pais_movil = request.form.get('Código País') movil = request.form.get('Número de Teléfono Móvil') protocolo = request.form.get('Protocolo')[2:-2] at = Airtable(air_base, air_table_name, api_key=air_api_key) lookup_record = at.search('airtableID', protocolo) if 'imagen_protocolo' in lookup_record[0]['fields']: del lookup_record[0]['fields']['imagen_protocolo'] del lookup_record[0]['fields']['date_created'] del lookup_record[0]['fields']['date_modified'] if 'pedidos_pagados_clientes' in lookup_record[0]['fields']: del lookup_record[0]['fields']['pedidos_pagados_clientes'] del lookup_record[0]['fields']['airtableID'] del lookup_record[0]['fields']['short_url'] base_encoded_email = urlencode({"email_login": lookup_record[0]['fields']['email_login']}) lookup_record[0]['fields']['email_login'] = email lookup_record[0]['fields']['query_string'] = lookup_record[0]['fields']['query_string'].replace(base_encoded_email, urlencode({"email_login": email})) new_record = at.insert(lookup_record[0]['fields']) short_url = {'short_url': new_record['id'].split('rec')[1], 'airtableID': new_record['id'], 'visits': 0} at.update(new_record['id'], short_url) mensaje = "Este es tu link para compartir el protocolo {} de Prescriptum: {}/{}/".format(lookup_record[0]['fields']['nombre_protocolo'], domain_url, short_url['short_url']) send_sms(codigo_pais_movil+movil, mensaje, "Prescriptum") return request.args except Exception as e: return jsonify(error=str(e)), 403
def sync(airtable: Airtable, local_books, logger): logger.debug('Fetching Airtable books...') airtable_books = airtable.get_all(view='Active') logger.debug('Airtable books fetched') airtable_book_ids = [ b['fields']['ID'] for b in airtable.get_all(view='Active') ] local_book_ids = [b.id for b in local_books] # --- RETURN --- # Books that are in airtable but not in local returned_books = find_airtable(diff(airtable_book_ids, local_book_ids), airtable_books) for returned_book in returned_books: airtable.update(returned_book['id'], {'ReturnedAt': utc_now()}) if returned_books: logger.info(f'User has {len(returned_books)} returned books') # --- NEW BOOKS --- # Books that are in local but not in airtable new_books = find_local(diff(local_book_ids, airtable_book_ids), local_books) for new_book in new_books: airtable_book = airtable_book_from_local_book(new_book) airtable_book['BorrowedAt'] = utc_now() airtable.insert(airtable_book) if new_books: logger.info(f'User has {len(new_books)} new books') # --- STILL BOOKS --- # Books that are in local and in airtable still_book_ids = intersect(local_book_ids, airtable_book_ids) still_airtable_books = find_airtable(still_book_ids, airtable_books) still_local_books = find_local(still_book_ids, local_books) for still_airtable_book in still_airtable_books: corresponding_still_local_book = find_local( [still_airtable_book['fields']['ID']], still_local_books)[0] airtable.update( still_airtable_book['id'], airtable_book_from_local_book(corresponding_still_local_book)) if still_book_ids: logger.info(f'User has {len(still_book_ids)} still books')
def main(): serial = getSerial() airtable = Airtable('app9og4P1Z4iet5fT', 'Computers', 'keybFjYyk9LuWpxNw') computers = [] records = airtable.search('SN', serial) print(records) cfgDict = cfgToDict() # print(cfgDict) if len(records) == 0: c = Computer(serial) airtable.insert(createFields(c, cfgDict)) for record in records: curr = Computer(serial, record) computers.append(curr) for c in computers: airtable.update(c.comp.get('id'), createFields(c, cfgDict))
def updateAirtable(): airtable = Airtable( os.environ["AIRTABLE_BASE_KEY"], "People", api_key=os.environ["AIRTABLE_API_KEY"], ) pwc = getPeopleWantingCourse() centres = getCentres() for person in pwc: firstcode = person.split() nearest = postcodenearest(firstcode[0], centres) fields = { "firstNearestActiveCentreDist": nearest[0][0], "firstNearestActiveCentre": [centres[nearest[0][1]]], "secondNearestActiveCentreDist": nearest[1][0], "secondNearestActiveCentre": [centres[nearest[1][1]]], "thirdNearestActiveCentreDist": nearest[2][0], "thirdNearestActiveCentre": [centres[nearest[2][1]]], } record = airtable.match("Record Id", pwc[person]) airtable.update(record["id"], fields) print("Updated row" + pwc[person])
def post_video_ids(videos): at = Airtable(os.getenv('AT_APP_KEY_ORANGE'), 'Session hosting', os.getenv('AT_API_KEY')) for row in at.get_all(): updates = {} submissions = row['fields']['submission_ids'].split(', ') webinar = '9' + row['fields']['zoom_link'].split('/j/9')[-1][:10] for num in range(len(submissions)): if f'videoid_{num}' not in row['fields']: video = find_video_by_title(f'{webinar} {num} out', videos) if video is not None: updates[f'videoid_{num}'] = video else: print(f"There is already an id for this one :") print(row['fields'][f'videoid_{num}']) if updates: print('Updating') print(updates) at.update(row['id'], updates)
def save_regex_to_airtable(row_id, new_regex, exclude_regex, table_name=settings.AIRTABLE_TAGS_TABLE_NAME): if not settings.AIRTABLE_SAVE: return if not new_regex or new_regex == settings.DEFAULT_REGEX: return False airtable = Airtable( settings.AIRTABLE_BASE_ID, table_name, settings.AIRTABLE_API_KEY, ) record = airtable.update( row_id, { "Regular expression": new_regex, "Exclude regular expression": exclude_regex, }) return True
def update_in_airtable(rec_id, table_name, data): """ wrapper function to update a record in airtable :param rec_id: record to update :param table_name: table to update in :param data: data to add/adjust :return: on success returns updated entry, on error returns error message """ logger.info('## Attempting to update rec_id {} in table {} with data {}'.format(rec_id, table_name, data)) table = Airtable(intake_form_constants.AIRTABLE_BASE_KEY, table_name, api_key=os.environ['AIRTABLE_KEY']) updated_rec = table.update(rec_id, data) if 'id' not in updated_rec: logger.warning(intake_form_constants.UNABLE_TO_UPDATE_RECORD.format(rec_id, updated_rec)) return { 'statusCode': 400, 'body': json.dumps({ 'message': intake_form_constants.UNABLE_TO_UPDATE_RECORD.format(rec_id, updated_rec), 'received_date': data }) } logger.info('## Successfully updated record {} in table {}'.format(rec_id, table_name)) return updated_rec
def add_part(part): """ Add a part to the Inventory table in airtable """ # Create part_url part_url = f"https://{Site.objects.get_current().domain}/bom/part/{part.id}" # Get manufacturer and manfacturer part information mfg_parts = [] for mfg_part in part.manufacturer_parts(): mfg_part_id = add_mfg_part(mfg_part) if mfg_part_id not in mfg_parts: mfg_parts.append(mfg_part_id) # Get supplier and supplier part information suppliers = [] for supplier_part in part.seller_parts(): supplier_id = get_supplier_id(supplier_part.seller.name) if supplier_id not in suppliers: suppliers.append(supplier_id) part_info = { "Part Number": part.full_part_number(), "Description": part.description(), "Manufacturer Part": mfg_parts, "Supplier": suppliers, "Django-BOM URL": part_url } inventory = Airtable(OPERATIONS_ID, "Inventory") # Check if part has already been added results = inventory.search("Part Number", part.full_part_number()) if len(results) == 0: result = inventory.insert(part_info) else: result = inventory.update(results[0]['id'], part_info) return result['id']
def update_airtable(df_links, airtable_base_key, airtable_table_name): logging.info('... send to airtable ...') # connect to airtable airtable = Airtable(airtable_base_key, airtable_table_name, api_key=os.environ['AIRTABLE_KEY']) inserted_count = 0 updated_count = 0 for i, row in df_links.iterrows(): airtable_records = airtable.search('link', row['link']) if len(airtable_records) == 0: logging.info('... new record ({})...'.format(row['link'])) record_fields = { 'link': row['link'], 'domain': row['domain'], 'first_seen': row['created_utc'], 'last_seen': row['created_utc'], 'first_added': row['last_seen'], 'last_added': row['last_seen'], 'times_seen': 1, 'score_avg': row['score'], 'score_min': row['score'], 'score_max': row['score'], 'score_sum': row['score'], 'title_list': row['title'], 'last_title': row['title'], 'permalink_list': row['permalink'], 'last_permalink': row['permalink'], } airtable.insert(record_fields) logging.info('... inserted ...') inserted_count += 1 elif len(airtable_records) > 0: for record in airtable_records: logging.info('... update record ({})...'.format(row['link'])) record_id = record['id'] old = airtable.get(record_id)['fields'] new_score_avg = (old['score_sum'] + row['score']) / (old['times_seen'] + 1) new_score_min = old['score_min'] if old['score_min'] <= row[ 'score'] else row['score'] new_score_max = old['score_max'] if old['score_max'] >= row[ 'score'] else row['score'] new_score_sum = old['score_sum'] + row['score'] new_times_seen = old['times_seen'] + 1 if row['title'] not in old['title_list']: new_title_list = '{}|{}'.format(old['title_list'], row['title']) else: new_title_list = old['title_list'] if row['permalink'] not in old['permalink_list']: new_permalink_list = '{}|{}'.format( old['permalink_list'], row['permalink']) else: new_permalink_list = old['permalink_list'] record_fields = { 'link': old['link'], 'domain': old.get('domain', 'N/A'), 'first_seen': old['first_seen'], 'last_seen': row['created_utc'], 'first_added': old['first_added'], 'last_added': row['last_seen'], 'times_seen': new_times_seen, 'score_avg': new_score_avg, 'score_min': new_score_min, 'score_max': new_score_max, 'score_sum': new_score_sum, 'title_list': new_title_list, 'last_title': row['title'], 'permalink_list': new_permalink_list, 'last_permalink': row['permalink'], } airtable.update(record_id, record_fields) logging.info('... updated ...') updated_count += 1 logging.info('... inserted_count = {} ...'.format(inserted_count)) logging.info('... updated_count = {} ...'.format(updated_count)) return inserted_count, updated_count
def run(): base_key = 'app0bcm2GEEGdlC7K' coaches_table_name = 'Coaches' events_table_name = 'Events' coaches_table = Airtable(base_key, coaches_table_name, api_key='keyiLfzA6XNxVuMw1') events_table = Airtable(base_key, events_table_name, api_key='keyiLfzA6XNxVuMw1') all_events = events_table.get_all() all_coaches = coaches_table.get_all() final_data = [] print('Getting Headers') #Set headers dynamically so as columns are added the demographics update headers = sorted({key for coach in all_coaches for key in coach['fields'].keys()}) \ + ['Program','Event Type','Event Date', 'Duration','Event Role'] #Get rid of event columns in coach table for column in [ 'Chair', 'Head TF', 'TF', 'Mentor', 'Presenter', 'Facilitator', 'Participant' ]: try: headers.remove(column) except: pass #Dynamically find the most recent survey column to compute on fire status later max_survey_column = sorted( [h if h.find("DWN Survey") != -1 else '' for h in headers])[-1] on_fire_index = headers.index('On Fire Status') final_data.append(headers) print('Starting to run through coaches') for coach_dict in all_coaches: #Create a container for coach rows while we calculate on fire status coach_new_rows = [] coach = coach_dict['fields'] #On fire status calculation #Calculate dynamically as you loop through on_fire_status = 0 major_event_count = 0 took_DWN_survey = False if max_survey_column in coach and coach[max_survey_column] == "Yes": took_DWN_survey = True #Set up demographics row demographics = [coach[field] if field in coach else ''\ for field in headers[:len(headers)-5]] #Gather all events in a dictionary to loop through coach_events = {} print('Current coach: {0} {1}'.format(coach['First'].encode(), \ coach['Last'].encode())) #Use this to see if a coach hasn't been to any events. If that is the case #we need to add a row for them to all data anyway so that we can access their #info distinct_event_count = 0 if 'Chair' in coach: coach_events['Chair'] = coach['Chair'] distinct_event_count += 1 if 'Head TF' in coach: coach_events['head_tf'] = coach['Head TF'] distinct_event_count += 1 if 'TF' in coach: coach_events['TF'] = coach['TF'] distinct_event_count += 1 if 'Mentor' in coach: coach_events['Mentor'] = coach['Mentor'] distinct_event_count += 1 if 'Presenter' in coach: coach_events['Presenter'] = coach['Presenter'] distinct_event_count += 1 if 'Facilitator' in coach: coach_events['Facilitator'] = coach['Facilitator'] distinct_event_count += 1 if 'Participant' in coach: coach_events['Participant'] = coach['Participant'] distinct_event_count += 1 #Coach hasn't been to any events. Add one row to final_data for them #anyway so they are included in the data if distinct_event_count == 0: #Calculate on fire status, update airtable on_fire_status = 1 if took_DWN_survey else 0 fields = {'On Fire Status': str(on_fire_status)} coaches_table.update(coach_dict['id'], fields) #and update demographics row new_row = demographics + ['', '', '', '', ''] new_row[on_fire_index] = on_fire_status final_data.append(new_row) continue #Loop through events, get info about event from events table today = date.today() for event_role in coach_events: event_list = coach_events[event_role] for event_id in event_list: event_record = events_table.get(event_id)['fields'] event_type = event_record['Event Type'] event_date = event_record['Date'] event_program = event_record['Program'] #Check if the event and role count as a major event #Use the event date to compute whether it was in the last three years if event_program not in ["DWN", "DWCC" ] and event_role != "Participant": date_split = event_date.split('-') event_date_object = date(int(date_split[0]), int(date_split[1]), int(date_split[2].split('T')[0])) date_diff = today - event_date_object if date_diff.days / 365 <= 3.0: major_event_count += 1 duration = compute_duration(event_program, event_type, event_role) #Only add independent columns to coach new rows until we have calculated On Fire new_row = [event_program, event_type, event_date,\ duration, event_role] coach_new_rows.append(new_row) #Calculate on fire status, update airtable, update demos on_fire_status = compute_on_fire(took_DWN_survey, major_event_count) fields = {'On Fire Status': str(on_fire_status)} coaches_table.update(coach_dict['id'], fields) demographics[on_fire_index] = on_fire_status #When we're done with all one coach, add demographics and add all their rows final_data += [demographics + row for row in coach_new_rows] #Get rid of after testing #break appendToSheets(final_data) #Local solution '''with open('final_data.csv', 'w', newline='') as file:
print(f"Record {fname} is up to date!") except KeyError: pass if not student_email: continue for i in processed_results: if i[2]: i[2] = i[2].lower() if student_email.lower() == i[2]: try: # SE airtable print('Updating {}'.format(record['fields']['Name'])) except KeyError: # UX airtable print('Updating {}'.format(record['fields']['Name'])) a.update(record['id'], {'Slack ID': i[3]}) i.append('PROCESSED') unprocessed = [u for u in processed_results if len(u) == 4] processed = [u for u in processed_results if len(u) == 5] print("Unprocessed Slack IDs: ", len(unprocessed)) print("Number of students in Airtable: ", len(students)) print("Updated Slack IDs: ", len(processed)) students = a.get_all() no_slack_id = [u for u in students if u['fields'].get('Slack ID') == None] if len(no_slack_id) == 0: print("Everyone present and accounted for! All student records in Airtable" " have a Slack ID.") else: print("Found {} students in Airtable with no Slack ID.".format(
class Table: """ Represents an Airbase table. """ def __init__(self, base, table_name, relations): self.base = base self.table_name = table_name self.relations = relations self.airtable = Airtable(base.id, table_name, base.api_key) self.load() def load(self): self.data = self.airtable.get_all() def insert(self, row): result = self.airtable.insert(row) self.data.append(result) self.link() return result def update(self, id, row): return self.airtable.update(id, row) def get(self, id): for row in self.data: if row['id'] == id: return row return None def find(self, fields, first=False): results = [] for row in self.data: match = True for k, v in fields.items(): if k not in row['fields']: if v is not None: match = False elif row['fields'][k] != v: match = False if match: results.append(row) if first: if len(results) == 0: return None else: return results[0] return results def get_or_insert(self, fields, extra=None): """ Get or insert and get the first record that matches the fields. When inserting you can add additional things using the extras value which should be a dictionary of names and values to set in addition to the supplied fields. """ r = self.find(fields, first=True) if not r: f = fields if extra: f.update(extra) r = self.insert(f) return r def link(self): """ Use the table's schema relations to turn IDs into objects. """ for row in self.data: for prop, other_table_name in self.relations.items(): other_table = self.base.tables[other_table_name] if prop in row['fields']: value = row['fields'][prop] if type(value) == list: new_value = [] for v in value: new_value.append(other_table.get(v)) row['fields'][prop] = new_value else: row['fields'][prop] = other_table.get(value) def wipe(self): """ Remove all rows from the table. """ self.load() ids = [row['id'] for row in self.data] self.airtable.batch_delete(ids)
from airtable import Airtable test_base = "appaPqizdsNHDvlEm" test_table = "table" airtable = Airtable(test_base, test_table, api_key=os.environ["AIRTABLE_API_KEY"]) # Insert rec = airtable.insert({"text": "A", "number": 1, "boolean": True}) # Get assert airtable.get(rec["id"]) # Update rv = airtable.update(rec["id"], {"text": "B"}) assert rv["fields"]["text"] == "B" # Replace rv = airtable.replace(rec["id"], {"text": "C"}) assert rv["fields"]["text"] == "C" # Get all assert airtable.get_all() # Delete assert airtable.delete(rec["id"]) # Batch Insert records = airtable.batch_insert([{ "text": "A",
def update_user_on_server(record): table = Airtable(BASE_ID, USERS_TABLE_NAME, api_key=API_KEY) user_elem = table.update(record['id'], record['fields']) logging.info('update_user_on_server') return user_elem
class Airtable(object): """ `Args:` base_key: str The key of the Airtable base that you will interact with. table_name: str The name of the table in the base. The table name is the equivilant of the sheet name in Excel or GoogleDocs. api_key: str The Airtable provided api key. Not required if ``AIRTABLE_API_KEY`` env variable set. """ def __init__(self, base_key, table_name, api_key=None): self.api_key = check_env.check('AIRTABLE_API_KEY', api_key) self.at = AT(base_key, table_name, self.api_key) def get_record(self, record_id): """ Returns a single record. `Args:` record_id: str The Airtable record id `Returns:` A dictionary of the record """ return self.at.get(record_id) def get_records(self, fields=None, max_records=None, view=None, formula=None, sort=None): """ `Args:` fields: str or lst Only return specified column or list of columns. The column name is case sensitive max_records: int The maximum total number of records that will be returned. view: str If set, only the records in that view will be returned. The records will be sorted according to the order of the view. formula: str The formula will be evaluated for each record, and if the result is not 0, false, "", NaN, [], or #Error! the record will be included in the response. If combined with view, only records in that view which satisfy the formula will be returned. For example, to only include records where ``COLUMN_A`` isn't empty, pass in: ``"NOT({COLUMN_A}='')"`` For more information see `Airtable Docs on formulas. <https://airtable.com/api>`_ Usage - Text Column is not empty: ``airtable.get_all(formula="NOT({COLUMN_A}='')")`` Usage - Text Column contains: ``airtable.get_all(formula="FIND('SomeSubText', {COLUMN_STR})=1")`` sort: str or lst Specifies how the records will be ordered. If you set the view parameter, the returned records in that view will be sorted by these fields. If sorting by multiple columns, column names can be passed as a list. Sorting Direction is ascending by default, but can be reversed by prefixing the column name with a minus sign -. Example usage: ``airtable.get(sort=['ColumnA', '-ColumnB'])`` `Returns:` Parsons Table See :ref:`parsons-table` for output options. """ # Raises an error if sort is None type. Thus, only adding if populated. kwargs = {'fields': fields, 'max_records': max_records, 'formula': formula} if sort: kwargs['sort'] = sort tbl = Table(self.at.get_all(**kwargs)) return tbl.unpack_dict(column='fields', prepend=False) def insert_record(self, row): """ Insert a single record into an Airtable. `Args:` row: dict Fields to insert. Must be dictionary with Column names as Key. typecast: boolean Automatic data conversion from string values. `Returns:` Dictionary of inserted row """ resp = self.at.insert(row) logger.info('Record inserted') return resp def insert_records(self, table): """ Insert multiple records into an Airtable. The columns in your Parsons table must exist in the Airtable. The method will attempt to map based on column name, so the order of the columns is irrelevant. `Args:` table: A Parsons Table Insert a Parsons table typecast: boolean Automatic data conversion from string values. `Returns:` List of dictionaries of inserted rows """ resp = self.at.batch_insert(table) logger.info(f'{table.num_rows} records inserted.') return resp def update_record(self, record_id, fields, typecast=False): """ Updates a record by its record id. Only Fields passed are updated, the rest are left as is. `Args:` record_id: str The Airtable record id fields: dict Fields to insert. Must be dictionary with Column names as Key. typecast: boolean Automatic data conversion from string values. `Returns:` ``None`` """ resp = self.at.update(record_id, fields, typecast=typecast) logger.info(f'{record_id} updated') return resp
def update_request(record): table = Airtable(BASE_ID, REQUESTS_TABLE_NAME, api_key=API_KEY) request_elem = table.update(record['id'], record['fields']) logging.info('update_request') return request_elem
label = label.rotate(90, expand=1) # Save image to temp directory label_path = c.TEMP_DIR + '/label-%s.png' % (int(time.time())) label.save(label_path) # Send print job to Brother printer # The if-else awaits and catches errors in the asynchronous system call if os.system( 'brother_ql --backend pyusb --printer %s --model %s print -l %s %s' % (c.PRINTER_ADDR, c.PRINTER_MODEL, c.PRINTER_LABEL_ROLL_SIZE, label_path)) == 0: continue else: sys.exit('ERROR: Could not print label.') print('Labels sent to Brother printer') # Update records in Airtable as printed for id in record_ids: airtable.update(id, {c.AIRTABLE_PRINTED_COL: True}) print('Airtable: Printed rows updated to "' + c.AIRTABLE_PRINTED_COL + '=true"') # Delete temporary directory os.system('rm -Rf ' + c.TEMP_DIR) print('Temporary directory deleted.') print('Success! Print batch successfully completed')
'domain': old.get('domain', 'N/A'), 'first_seen': old['first_seen'], 'last_seen': row['created_utc'], 'first_added': old['first_added'], 'last_added': row['last_seen'], 'times_seen': new_times_seen, 'score_avg': new_score_avg, 'score_min': new_score_min, 'score_max': new_score_max, 'score_sum': new_score_sum, 'title_list': new_title_list, 'last_title': row['title'], 'permalink_list': new_permalink_list, 'last_permalink': row['permalink'], } airtable.update(record_id, record_fields) print('... updated ...') updated_count += 1 print('... inserted_count = {} ...'.format(inserted_count)) print('... updated_count = {} ...'.format(updated_count)) #%% row.get('domain', 'UNKNOWN') #%%
class AirtableClient: def __init__(self, conf: AirtableConfig, airtable_name, table_spec, read_only): self.read_only = read_only self.client = Airtable( conf.base_id, airtable_name, conf.api_key, ) self.table_spec = table_spec def get_all(self, formula=None): return (self.table_spec.model_cls.from_airtable(raw) for page in self.client.get_iter(formula=formula) for raw in page) def get_all_with_new_status(self): # TODO : sort by creation time asc # NOTE here is a formula for querying on a blank status # TODO : get rid of this if we don't need it # "IF(" # "{{Status}} = BLANK()," # # If blank... # "{{_meta_last_seen_status}} != \"{blank_sentinel}\"," # # If not blank... # "{{Status}} != {{_meta_last_seen_status}}" # ")" return self.get_all(formula=("AND({Status} != BLANK(), " "{Status} != {_meta_last_seen_status})")) def update(self, model): if self.read_only: logger.info(f"Not updating {model.id} in read-only mode") return self.client.update( model.id, model.to_airtable()["fields"], ) # TODO : handle missing statuses (e.g. airtable field was updated) def poll_table(self, conf, max_num_retries=DEFAULT_POLL_TABLE_MAX_NUM_RETRIES): logger.info("Polling table: {}".format(self.table_spec.name)) success = True callbacks = { status: cb(conf) for status, cb in self.table_spec.status_to_cb.items() } for record in self.get_all_with_new_status(): assert record.status is not None logger.info( f"Processing '{self.table_spec.name}' record: {record}") try: original_id = record.id original_status = record.status cb = callbacks.get(record.status) if cb is None: logger.info("No callback for record with status " f"'{record.status}': {record.id}") continue for num_retries in range(max_num_retries): try: cb(record) # noqa: F841 break except Exception: logger.exception( f"Callback '{cb.__qualname__}' for record failed " f"(num retries {num_retries}): {record.id}") else: logger.error( f"Callback '{cb.__qualname__}' for record did not " f"succeed: {record.id}") success = False if original_id != record.id: raise ValueError( f"Callback '{cb.__qualname__}' modified the ID of the " f"record: original={original_id}, new={record.id}") finally: record.meta_last_seen_status = original_status # Update the record in airtable to reflect local modifications self.update(record) return success