Ejemplo n.º 1
0
class Api():
    def __init__(self, *, config=None):
        api_key = config['airtable_api_key']
        base_key = config['airtable_base_key']
        table_name_state = config['table_name_state']
        table_name_timesheet = config['table_name_timesheet']
        self.pricipal = config['principal']
        self.lunch_break = config['lunch_break']
        self.state = Airtable(base_key, table_name_state, api_key)
        self.timesheet = Airtable(base_key, table_name_timesheet, api_key)

    def get_state(self):
        record = self.state.match('state_principal', self.pricipal)
        return record['fields']['state_name']

    def set_state(self, *, state_name=None):
        record = {'state_name': state_name}
        self.state.update_by_field('state_principal', self.pricipal, record)

    def clock_in(self, *, day=None, time=None):
        record = {
            'record_day': day,
            'clock_in_time': time,
            'lunch_break': self.lunch_break
        }
        self.timesheet.insert(record)

    def clock_out(self, *, day=None, time=None):
        record = {'clock_out_time': time}
        self.timesheet.update_by_field('record_day', day, record)
Ejemplo n.º 2
0
def insert_level(agent_userid, level):
    airtable = Airtable(BASE_KEY, 'member', api_key=API_KEY)
    record = airtable.match('User id', agent_userid)

    if 'Classification level' in record['fields']:
        return False
    else:
        airtable.update_by_field('User id', agent_userid,
                                 {'Classification level': level})
        return True
Ejemplo n.º 3
0
def exitevent(recordid, event):
    airtable = Airtable(BASE_KEY, 'events', api_key=API_KEY)

    eventrecord = airtable.match('Event', event)

    if 'Attendee' in eventrecord['fields']:
        if recordid in eventrecord['fields']['Attendee']:
            addendeelist = eventrecord['fields']['Attendee']
            addendeelist.remove(recordid)
        else:
            return False
    else:
        return False

    airtable.update_by_field('Event', event, {'Attendee': addendeelist})
    return True
Ejemplo n.º 4
0
def joinevent(recordid, event):
    airtable = Airtable(BASE_KEY, 'events', api_key=API_KEY)

    eventrecord = airtable.match('Event', event)

    if 'Attendee' in eventrecord['fields']:
        if recordid in eventrecord['fields']['Attendee']:
            return False
        else:
            addendeelist = eventrecord['fields']['Attendee']
            addendeelist.append(recordid)
    else:
        addendeelist = [recordid]

    airtable.update_by_field('Event', event, {'Attendee': addendeelist})
    return len(addendeelist)
Ejemplo n.º 5
0
def update_sample_status(request):
    """Main entry point for the Cloud Function."""

    if request.method != 'PUT':
        return abort(405)

    # Verify input parameters.
    request_json = request.get_json()
    project = request_json.get('project')
    sample = request_json.get('sample')
    status = request_json.get('status')
    if not project or not sample or not status:
        return abort(400)

    logging.info(f'Processing request: {request_json}')

    # Fetch the per-project configuration from the Secret Manager.
    gcp_project = os.getenv('GCP_PROJECT')
    secret_name = (f'projects/{gcp_project}/secrets'
                   '/update-sample-status-config/versions/latest')
    config_str = secret_manager.access_secret_version(request={
        'name': secret_name
    }).payload.data.decode('UTF-8')
    config = json.loads(config_str)

    project_config = config.get(project)
    if not project_config:
        return abort(404)

    # Get the Airtable credentials.
    base_key = project_config.get('baseKey')
    table_name = project_config.get('tableName')
    api_key = project_config.get('apiKey')
    if not base_key or not table_name or not api_key:
        return abort(500)

    # Update the entry.
    airtable = Airtable(base_key, table_name, api_key)
    try:
        response = airtable.update_by_field('Sample ID', sample,
                                            {'Status': status})
    except HTTPError as err:  # Invalid status enum.
        logging.error(err)
        return abort(400)

    if not response:  # Sample not found.
        return abort(404)

    return ('', 204)
Ejemplo n.º 6
0
def write_to_airtable():
    for item in time_list:
        airtable = Airtable(base_key, item["table_name"], api_key)
        data_from_air_table = airtable.get_all()
        time = item["period_type"]
        url = f"{base_url}id={path_id}&last={number_of_period}&format={output_type}&download={download}&data={original_data}&time={time}"
        response = requests.request("GET", url)
        res = response.json()
        data_from_cbs = res['DataSet']['Series'][0]['obs']

        for record in data_from_cbs:
            check_list = list(
                filter(
                    lambda period: period['fields']["TimePeriod"] == record[
                        "TimePeriod"], data_from_air_table))
            print(check_list)
            time_period = record["TimePeriod"]
            employee = record["Value"]
            # print(employee)
            new_record = {"TimePeriod": time_period, "employee": employee}
            if not check_list:
                airtable.insert(new_record)
            else:
                airtable.update_by_field("TimePeriod", time_period, new_record)
def success():

    air_base = os.getenv('AIR_TABLE_BASE')
    air_api_key = os.getenv('AIR_TABLE_API')
    air_table_name = os.getenv('AIR_PEDIDOS_TABLE_NAME') 
    at = Airtable(air_base, air_table_name, api_key=air_api_key)

    at_prot = Airtable(air_base, os.getenv('AIR_PROTOCOLO_TABLE_NAME'), api_key=air_api_key)

    session_id = request.args.get('session_id', '')
    session = stripe.checkout.Session.retrieve(session_id)
    payment = stripe.PaymentIntent.retrieve(session['payment_intent'])

    at.update_by_field('stripe_session_id', session_id, {'stripe_payment_id': payment['id']})
    at.update_by_field('stripe_session_id', session_id, {'status': payment['status']})
    at.update_by_field('stripe_session_id', session_id, {'paid_amount': payment['amount']/100})

    pedido = at.search('stripe_session_id', session_id)[0]['fields']
    protocolo = at_prot.get(pedido['protocolo'][0])['fields']

    email = {'nombre_consulta': protocolo['nombre_consulta'],
             'nombre_protocolo': protocolo['nombre_protocolo'],
             'shipping_name': pedido['shipping_name'],
             'shipping_email': pedido['shipping_email'],
             'shipping_phone': pedido['shipping_phone'],
             'preciofinal': protocolo['preciofinal'],
             'comisionesfinal': protocolo['comisionesfinal'],
             'modo_empleo': protocolo['modo_empleo'] if protocolo['modo_empleo'] else ''
             }

    email_login = protocolo['email_login']

    subject = 'Tienes una nueva venta de tu protocolo {}'.format(email['nombre_protocolo'])

    send_mail(subject, email_login, 'email.html', **email)

    return render_template('success.html', **locals())
Ejemplo n.º 8
0
class AirtablePipeline(object):
    """
    Stub pipeline to save to AirTable.
    """
    def __init__(self):
        self.airtable = Airtable(AIRTABLE_BASE_KEY, AIRTABLE_DATA_TABLE)

    def process_item(self, item, spider):
        # copy item; airtable-specific munging is happening here that breaks
        # opencivicdata standard

        if item.get('start_time') is None:
            spider.logger.debug(
                'AIRTABLE PIPELINE: Ignoring event without start_time {0}'.
                format(item['id']))
            return item

        dt = item['start_time']
        if dt < datetime.datetime.now(dt.tzinfo):
            spider.logger.debug(
                'AIRTABLE PIPELINE: Ignoring past event {0}'.format(
                    item['id']))
            return item

        time.sleep(randint(0, 3))  # to avoid rate limiting?

        new_item = item.copy()

        # flatten location
        new_item['location_url'] = get_key(new_item, 'location.url')
        new_item['location_name'] = get_key(new_item, 'location.name')
        new_item['location_address'] = get_key(new_item, 'location.address')
        new_item['location_latitude'] = get_key(
            new_item, 'location.coordinates.latitude')
        new_item['location_longitude'] = get_key(
            new_item, 'location.coordinates.longitude')
        new_item['url'] = new_item.get('sources', [{
            'url': ''
        }])[0].get('url', '')

        new_item = {
            k: self._format_values(k, v)
            for k, v in new_item.items() if k in KEEP_FIELDS
        }

        try:
            self.save_item(new_item, spider)
            return item
        except HTTPError as e:
            spider.logger.error('HTTP error')
            spider.logger.error(e.response.content)
            spider.logger.exception('Original message')
            spider.logger.error(json.dumps(new_item, indent=4, sort_keys=True))
            raise DropItem('Could not save {0}'.format(new_item['id']))
        except Exception as e:
            spider.logger.exception('Unknown error')

    def _format_values(self, k, v):
        if ((v is None) or v == '') and (k not in ['start_time', 'end_time']):
            return 'N/A'
        if k == 'location_name':
            return ' '.join([w.capitalize() for w in v.split(' ')])
        if isinstance(v, bool):
            return int(v)
        if isinstance(v, datetime.datetime):
            # converts '2018-10-14T00:00:00-05:00' into '2018-10-14T05:00:00+00:00'
            # as required by the Airtable API
            return v.astimezone(utc).isoformat()
        return v

    def save_item(self, item, spider):
        now = datetime.datetime.now().isoformat()
        airtable_item = self.airtable.match('id', item['id'])
        if airtable_item:
            spider.logger.debug('AIRTABLE PIPELINE: Updating {0}'.format(
                item['id']))
            item['scrape_date_updated'] = now
            self.airtable.update_by_field('id', item['id'], item)
        else:
            spider.logger.debug('AIRTABLE PIPELINE: Creating {0}'.format(
                item['id']))
            item['scrape_date_updated'] = now
            item['scrape_date_initial'] = now
            self.airtable.insert(item)
Ejemplo n.º 9
0
class GeocoderPipeline(object):
    def __init__(self, session=None):
        if session is None:
            session = requests.Session()
        self.session = session
        self.client = MapzenAPI(os.environ.get('MAPZEN_API_KEY'))
        self.geocode_database = Airtable(AIRTABLE_BASE_KEY,
                                         AIRTABLE_GEOCODE_TABLE)

    def process_item(self, item, spider):
        """
        Geocodes an item by:
            (1) looking in airtable cache
            (2) making a mapzen query and adding the result
                to the cache if (1) is not found

        Mapzen queries are standardized to end with ', Chicago, IL'.

        If something like '5100 Milwaukee Chicago, IL' is not found,
        '5100 Milwaukee Ave., Chicago, IL' and
        '5100 Milwaukee St., Chicago, IL' are also tried.
        """
        # skip geocoding if event is in the past
        if item.get('start_time') is None:
            spider.logger.debug(
                'GEOCODER PIPELINE: Ignoring event without start_time {0}'.
                format(item['id']))
            return item
        dt = item['start_time']
        if dt < datetime.datetime.now(dt.tzinfo):
            spider.logger.debug(
                'GEOCODER PIPELINE: Ignoring past event {0}'.format(
                    item['id']))
            return item

        query = self._get_mapzen_query(item.get('location', {}))
        if not query:
            spider.logger.debug(
                'GEOCODER PIPELINE: Empty query. Not geocoding {0}'.format(
                    item['id']))
            return item

        for suffix in ['', ' ave.', ' st.']:
            new_query = query.replace(', chicago, il',
                                      '{0}, chicago, il'.format(suffix))
            time.sleep(randint(0, 3))  # to avoid rate limiting?
            updated_item = self._update_fromDB(new_query, item)
            if updated_item:
                spider.logger.debug(
                    'GEOCODER PIPELINE: Geocoded item from airtable cache.')
                return updated_item

        bad_addresses = ['Chicago, IL, USA', 'Illinois, USA', '']
        for suffix in ['', ' ave.', ' st.']:
            new_query = query.replace(', chicago, il',
                                      '{0}, chicago, il'.format(suffix))
            geocoded_item = self._geocode(new_query, item, spider)
            address = geocoded_item['location']['address']
            if (address not in bad_addresses) and (address.endswith(
                    'Chicago, IL, USA')) and (self._hasDigit(address)):
                write_item = {
                    'mapzen_query':
                    new_query,
                    'longitude':
                    geocoded_item['location']['coordinates']['longitude'],
                    'latitude':
                    geocoded_item['location']['coordinates']['latitude'],
                    'name':
                    geocoded_item['location']['name'],
                    'address':
                    geocoded_item['location']['address'],
                    'geocode':
                    geocoded_item['geocode'],
                    'community_area':
                    geocoded_item['community_area']
                }
                self._geocodeDB_write(spider, write_item)
                spider.logger.debug(
                    'GEOCODER PIPELINE: Geocoded item from mapzen.')
                return geocoded_item

        spider.logger.exception((
            "GEOCODER PIPELINE: Couldn't geocode using mapzen or airtable cache. "
            "Query: {0}. Item id: {1}").format(query, item['id']))
        return item

    def _geocode(self, query, item, spider):
        """
        Makes a Mapzen query and returns results.
        """
        try:
            geocode = self.client.search(query,
                                         boundary_country='US',
                                         format='keys')
        except ValueError:
            spider.logger.debug(
                ('GEOCODER PIPELINE: Could not geocode, skipping. '
                 'Query: {0}. Item id: {1}').format(query, item['id']))
        except Exception as e:
            spider.logger.info(
                ('GEOCODER PIPELINE: Unknown error when geocoding, skipping. '
                 'Query: {0}. Item id: {1}. Message: {2}').format(
                     query, item['id'], str(e)))
        else:
            new_data = {
                'location': {
                    'coordinates': {
                        'longitude':
                        str(geocode['features'][0]['geometry']['coordinates']
                            [0]),
                        'latitude':
                        str(geocode['features'][0]['geometry']['coordinates']
                            [1])
                    },
                    'name':
                    geocode['geocoding']['query']['parsed_text'].get(
                        'query', ''),
                    'address':
                    geocode['features'][0]['properties']['label'],
                    'url':
                    item.get('location', {
                        'url': ''
                    }).get('url', '')
                },
                'geocode':
                json.dumps(geocode, indent=4, sort_keys=True),
                'community_area':
                geocode['features'][0]['properties'].get('neighbourhood', '')
            }
            geocoded_item = item.copy()
            geocoded_item.update(new_data)
            return geocoded_item
        return {'location': {'address': ''}}

    def _hasDigit(self, string):
        """
        Returns True if the string contains a digit.
        """
        return any(char.isdigit() for char in string)

    def _get_mapzen_query(self, location_dict):
        """
        Clean and item's location to make a mapzen query.
        All cleaned queries are lowercase and
        end with ', chicago, il'.
        """
        name = location_dict.get('name', '').strip()
        address = location_dict.get('address', '').strip()
        query = ', '.join([
            name, address
        ]).strip(', ').lower()  # combine '{name}, {address}' and lowercase
        query = query.replace('-',
                              ' ').replace('/',
                                           ' ')  # remove special characters
        query = query.replace('milwukee', 'milwaukee').replace(
            'milwuakee', 'milwaukee')  # fix misspellings
        query = query.replace('n.', 'n. ').replace('s.', 's. ').replace(
            'e.', 'e. ').replace('w.', 'w. ')
        query = re.sub(r' +', ' ', query)  # remove repeated spaces
        query = re.sub(r',* chicago,*( il)* *\d*$', ', chicago, il',
                       query)  # remove zip code, standardize ', chicago, il'
        if not query:
            return ''
        if 'city hall' in query.lower():
            return 'chicago city hall, chicago, il'
        if not query.endswith(', chicago, il'):
            return '{0}, chicago, il'.format(query)
        else:
            return query

    def _update_fromDB(self, query, item):
        """
        Query the geocode database and update item
        with results.
        """
        fetched_item = self._geocodeDB_fetch(query)
        try:
            new_data = {
                'location': {
                    'coordinates': {
                        'longitude': str(fetched_item['longitude']),
                        'latitude': str(fetched_item['latitude'])
                    },
                    'name': fetched_item.get('name', ''),
                    'address': fetched_item['address'],
                    'url': item.get('location', {
                        'url': ''
                    }).get('url', '')
                },
                'geocode': str(fetched_item.get('geocode', '')),
                'community_area': fetched_item.get('community_area', '')
            }
        except:
            return {}
        else:
            updated_item = item.copy()
            updated_item.update(new_data)
            return updated_item

    def _geocodeDB_fetch(self, query):
        """
        Fetch from geocode_database.
        """
        try:
            return self.geocode_database.match('mapzen_query', query)['fields']
        except:
            return None

    def _geocodeDB_write(self, spider, item):
        """
        Write to geocode_database.
        """
        spider.logger.debug('GEOCODER PIPELINE: Caching {0}'.format(
            item['mapzen_query']))
        item['geocode_date_updated'] = datetime.datetime.now().isoformat()
        airtable_item = self.geocode_database.match('mapzen_query',
                                                    item['mapzen_query'])
        if airtable_item:
            self.geocode_database.update_by_field('mapzen_query',
                                                  item['mapzen_query'], item)
        else:
            self.geocode_database.insert(item)
Ejemplo n.º 10
0
    urls.append(item['fields']['URL'])

urls = list(dict.fromkeys(urls))

events = []

for url in urls:
    events.append(GetClubhouse(url))

with open('_data/events.json', 'w') as outfile:
    json.dump(events, outfile, ensure_ascii=False, indent=2)

for event in events:
    update = airtable.update_by_field(
        'URL', event['url'], {
            'Hosts': ', '.join(event['speakers']),
            'Description': event['description'],
            'Audition Name': event['title']
        })
    if not update:
        insert = airtable.insert({
            'URL': event['url'],
            'Hosts': ', '.join(event['speakers']),
            'Description': event['description'],
            'Audition Name': event['title']
        })
        print('Insert: ', insert)
    else:
        print('Update: ', update)

# close old issue
def main():
    logging.basicConfig(level=logging.WARN)

    ARGPARSER = argparse.ArgumentParser()
    ARGPARSER.add_argument(
        '-l',
        '--loglevel',
        dest='loglevel',
        default=DEFAULT_LOGLEVEL,
        action='store',
        required=False,
        choices=["debug", "info", "warn", "error"],
        help=
        'Level for logging (strings from logging python package: "warn", "info", "debug")'
    )
    ARGPARSER.add_argument(
        '-a',
        '--api',
        dest='api',
        default=None,
        action='store',
        required=True,
        choices=['airtable'],
        help='Execute specified API: only "airtable" is currently supported')
    ARGPARSER.add_argument(
        '-k',
        '--api-key',
        dest='apikey',
        default="",
        action='store',
        required=True,
        help='Specify API key where appropriate (e.g. -k keyAnIuYcufa3dD)')
    ARGPARSER.add_argument(
        '-b',
        '--base',
        dest='base',
        default="",
        action='store',
        required=True,
        help='Specify Base ID where appropriate (e.g. -b appA8ZuLosBV4GDSd)')
    ARGPARSER.add_argument(
        '-t',
        '--table',
        dest='table',
        default="",
        action='store',
        required=True,
        help='Specify Table ID where appropriate (e.g. -t Tasks)')
    ARGPARSER.add_argument(
        '-v',
        '--view',
        dest='view',
        default="",
        action='store',
        required=True,
        help='Specify Table View where appropriate (e.g. -v Work)')
    ARGPARSER.add_argument('--dry-run',
                           dest='dryrun',
                           default=False,
                           action='store_true',
                           required=False,
                           help='Do not commit calculation results')
    # ARGPARSER.add_argument('-o', '--output', dest='output', default=DEFAULT_OUTPUT,
    #                       action='store', required=False,
    #                       help='Output .tjp file for task-juggler')
    ARGS = ARGPARSER.parse_args()

    set_logging_level(ARGS.loglevel)

    # PASSWORD = getpass('Enter generic password for {user}: '.format(user=ARGS.username))

    airtable = Airtable(ARGS.base, ARGS.table, api_key=ARGS.apikey)

    data = [x["fields"] for x in airtable.get_all(view=ARGS.view)]
    for rec in data:
        preference = 0
        if "preference" in rec:
            preference = int(rec['preference'])
        if "priority" in rec:
            if rec["priority"].lower() == "low":
                pri = preference + 100
            elif rec["priority"].lower() == "high":
                pri = preference + 200
            elif rec["priority"].lower() == "critical":
                pri = preference + 300
            else:
                pri = 1
        else:
            pri = preference + 100  # low
        rec["priority"] = pri
        if 'appointment' in rec:
            rec['start'] = rec['appointment']
            del rec[
                "priority"]  # tasks scheduling is not guaranteed if priority is set
        if 'depends' in rec:
            rec['depends'] = [
                int(x) for x in re.findall(r"[\w']+", rec["depends"])
            ]
        if "priority" in rec and "deadline" in rec and not rec[
                "priority"] >= 300:
            diff_days = (datetime.datetime.now() -
                         dateutil.parser.parse(rec["deadline"])).days
            if diff_days < 0: diff_days = 0
            rec["priority"] = rec["priority"] + diff_days * 3
            if rec["priority"] >= 250: rec["priority"] = 250

    JUGGLER = DictJuggler(data)
    JUGGLER.run()

    if ARGS.dryrun: return

    for t in JUGGLER.walk(juggler.JugglerTask):
        airtable.update_by_field("id", t.get_id(), {
            "booking":
            t.walk(juggler.JugglerBooking)[0].decode()[0].isoformat()
        })
Ejemplo n.º 12
0
class AirtablePipeline(object):
    """
    Stub pipeline to save to AirTable.
    """
    def __init__(self):
        self.airtable = Airtable(AIRTABLE_BASE_KEY, AIRTABLE_DATA_TABLE)

    def process_item(self, item, spider):
        # copy item; airtable-specific munging is happening here that breaks
        # opencivicdata standard

        if item.get('start_time') is None:
            spider.logger.debug(
                'AIRTABLE PIPELINE: Ignoring event without start_time {0}'.
                format(item['id']))
            return item

        dt = dateutil.parser.parse(item['start_time'])
        if dt < datetime.datetime.now(dt.tzinfo):
            spider.logger.debug(
                'AIRTABLE PIPELINE: Ignoring past event {0}'.format(
                    item['id']))
            return item

        time.sleep(randint(0, 3))  # to avoid rate limiting?

        new_item = item.copy()

        # flatten location
        new_item['location_url'] = get_key(new_item, 'location.url')
        new_item['location_name'] = get_key(new_item, 'location.name')
        new_item['location_address'] = get_key(new_item, 'location.address')
        new_item['location_latitude'] = get_key(
            new_item, 'location.coordinates.latitude')
        new_item['location_longitude'] = get_key(
            new_item, 'location.coordinates.longitude')
        new_item[
            'timezone'] = 'America/Chicago'  # TODO have this passed in by the spiders
        new_item['all_day'] = 'false'
        new_item['agency_name'] = spider.long_name

        new_item = {k: v for k, v in new_item.items() if k in FIELDS_WHITELIST}

        try:
            self.save_item(new_item, spider)
            return item
        except HTTPError as e:
            spider.logger.error('HTTP error')
            spider.logger.error(e.response.content)
            spider.logger.exception('Original message')
            spider.logger.error(json.dumps(new_item, indent=4, sort_keys=True))
            raise DropItem('Could not save {0}'.format(new_item['id']))
        except Exception as e:
            spider.logger.exception('Unknown error')

    def save_item(self, item, spider):
        now = datetime.datetime.now().isoformat()
        airtable_item = self.airtable.match('id', item['id'])
        if airtable_item:
            spider.logger.debug('AIRTABLE PIPELINE: Updating {0}'.format(
                item['id']))
            item['scrape_date_updated'] = now
            self.airtable.update_by_field('id', airtable_item['id'], item)
        else:
            spider.logger.debug('AIRTABLE PIPELINE: Creating {0}'.format(
                item['id']))
            item['scrape_date_updated'] = now
            item['scrape_date_initial'] = now
            self.airtable.insert(item)
Ejemplo n.º 13
0
methods = joined_methods >> pipe(_.to_dict(orient="records"))

# +
raw_map = {x['fields']['method']: x['fields'] for x in raw_methods}

inserts = []
updated = []
for entry in methods:
    fields = strip_dict_nans({k: v for k, v in entry.items() if k != "id"})
    dst_fields = raw_map.get(entry['method'], {})

    if record_needs_update(fields, dst_fields):
        updated.append(fields)

        air_methods.update_by_field("method", entry["method"], fields=fields)

    if pd.isna(entry["id"]):
        inserts.append(fields)

if inserts:
    air_methods.batch_insert(inserts)

print("Updated methods: ", ", ".join([d['method'] for d in updated]))
print("Inserted methods: ", ", ".join([d['method'] for d in inserts]))

#air_methods.batch_update(list(map(strip_dict_nans, method_inserts)))
#air_backends.batch_insert(backend_inserts)
# -

# ## Link method records to backends