Пример #1
0
    def handle(self, handler_input):
        # type: (HandlerInput) -> Response
        number = get_slot_value(
            handler_input = handler_input,
            slot_name = "number"
        )
        method = get_slot_value(
            handler_input = handler_input,
            slot_name = "method"
        )

        slots = handler_input.request_envelope.request.intent.slots
        if "method" in slots:
            method_slot_resolution = slots["method"].resolutions.resolutions_per_authority[0]
            if method_slot_resolution.status.code == StatusCode.ER_SUCCESS_MATCH:
                method = method_slot_resolution.values[0].value.name
 
        request = {
            "function": "appendRecords",
            "parameters": [{
                "query_result": {
                "number": number,
                "method": method
                }
            }],
            "devMode": True
        }

        gsheet.write_data(gsheet.get_auth(), request)
        
        speech_text = str(number) + "回、" + method + "の記録をしました"
        handler_input.response_builder.speak(speech_text).set_should_end_session(True)
        return handler_input.response_builder.response
Пример #2
0
def parse_settings_on_cloud(afterdate: str,
                            beforedate: str,
                            write_to_sheets=True):
    """
    Same as `parse_settings()` (see below) but without command line interface and showbrowser option.
    Outputs scraped results to a gsheet:Settings_scheduler if `write_to_sheets` is True
    """

    logger.info(f"Parsing settings between {afterdate} and {beforedate}.")

    days_to_pull = get_days_between_dates(afterdate=afterdate,
                                          beforedate=beforedate)
    pulled_settings = make_setting_list(days_to_pull)
    for setting in pulled_settings:
        persist.rest_setting(setting)
    #maybe make this cleaner in sql? future work
    if write_to_sheets:
        gsheet.write_data(
            gsheet.open_sheet(gsheet.init_sheets(),
                              "Court_scraper_eviction_scheduler",
                              "eviction_scheduler"),
            gsheet.morning_afternoon(
                gsheet.combine_cols(
                    gsheet.filter_df(
                        gsheet.filter_df(pd.DataFrame(pulled_settings),
                                         'setting_type', 'Eviction'),
                        'hearing_type', '(Hearing)|(Trial)'),
                    ['case_number', 'setting_style'],
                    'case_dets').drop_duplicates("case_number", keep="last")))
Пример #3
0
def parse_settings(afterdate, beforedate, outfile, showbrowser=False):
    """Gets data for all settings between `afterdate` and `beforedate` and sends results to PostgreSQL database."""

    # If showbrowser is True, use the default selenium driver
    if showbrowser:
        from selenium import webdriver
        fetch_page.driver = webdriver.Chrome("./chromedriver")

    days_to_pull = get_days_between_dates(afterdate=afterdate, beforedate=beforedate)
    pulled_settings = make_setting_list(days_to_pull)
    for setting in pulled_settings:
        persist.rest_setting(setting)
    gsheet.write_data(gsheet.open_sheet(gsheet.init_sheets(),"Court_scraper_eviction_scheduler","eviction_scheduler"),gsheet.morning_afternoon(gsheet.combine_cols(gsheet.filter_df(gsheet.filter_df(pd.DataFrame(pulled_settings),'setting_type','Eviction'),'hearing_type','(Hearing)|(Trial)'),['case_number','setting_style'],'case_dets').drop_duplicates("case_number", keep="last")))
    json.dump(pulled_settings, outfile)
def parse_settings_on_cloud(afterdate, beforedate):
    logger.info(f"Parsing settings between {afterdate} and {beforedate}.")

    days_to_pull = get_days_between_dates(afterdate=afterdate,
                                          beforedate=beforedate)
    pulled_settings = make_setting_list(days_to_pull)
    for setting in pulled_settings:
        persist.rest_setting(setting)

    gsheet.write_data(
        gsheet.open_sheet(gsheet.init_sheets(),
                          "Court_scraper_eviction_scheduler",
                          "eviction_scheduler"),
        gsheet.combine_cols(
            gsheet.filter_df(
                gsheet.filter_df(pd.DataFrame(pulled_settings), 'setting_type',
                                 'Eviction'), 'hearing_type',
                '(Hearing)|(Trial)'), ['case_number', 'setting_style'],
            'case_dets'))
def parse_and_persist_settings(afterdate: str,
                               beforedate: str,
                               outfile: str,
                               showbrowser=False):
    pulled_settings = parse_settings(afterdate, beforedate, outfile,
                                     showbrowser)
    for setting in pulled_settings:
        persist.rest_setting(setting)
    gsheet.write_data(
        gsheet.open_sheet(gsheet.init_sheets(),
                          "Court_scraper_eviction_scheduler",
                          "eviction_scheduler"),
        gsheet.morning_afternoon(
            gsheet.combine_cols(
                gsheet.filter_df(
                    gsheet.filter_df(pd.DataFrame(pulled_settings),
                                     'setting_type', 'Eviction'),
                    'hearing_type', '(Hearing)|(Trial)'),
                ['case_number', 'setting_style'],
                'case_dets').drop_duplicates("case_number", keep="last")))
    json.dump(pulled_settings, outfile)
Пример #6
0
def dump_to_sheets(sheet, worksheet, tables, filter_evictions=False):
    if os.getenv("LOCAL_DEV") != "true":
        sheet = gsheet.open_sheet(gsheet.init_sheets(), sheet, worksheet)
        dfs = []
        for table in tables:
            conn = connect_to_database.get_database_connection(local_dev)
            sql = "select * from " + table
            df = pd.read_sql_query(sql, conn)
            #Group cases with multiple events into the same case number do we want to do this it leads to columns with " , " junk
            #if table=="events": df = df.groupby("case_detail_id").fillna('').agg(', '.join).reset_index()
            dfs.append(df)
        df = reduce(
            lambda left, right: pd.merge(
                left, vright, von='case_number', vhow='outer'), dfs)
        if filter_evictions:
            gsheet.filter_df(df, 'case_type', 'Eviction')
        gsheet.write_data(sheet, df)
    else:
        logger.info(
            "Not dumping to google sheets because LOCAL_DEV environment variable is 'true'."
        )
Пример #7
0
# -*- coding:utf-8 -*-
import gsheet
import os

gsheet.write_data(
    spreadsheet_id=os.environ.get('SPREADSHEET_ID'),  # 環境計測情報記録用シートID
    values=gsheet.collect_data(),
    service=gsheet.get_authentication(),
    range='sensor!A1')

gsheet.write_data(
    spreadsheet_id=os.environ.get('SPREADSHEET_ID_REMOE'),  # 消費電力記録用シートID
    values=gsheet.collect_remoe_data(),
    service=gsheet.get_authentication(),
    range='energy!A1')
Пример #8
0
# -*- coding:utf-8 -*-
import gsheet
import move_sensor as ms
import datetime as dt
import os.path

now = ms.move_sensor_last_update()

if ms.move_updated(now):
    with open('move_last_update', mode='w') as f:
        f.write(now)

    now_jst = dt.datetime.strptime(
        now, '%Y-%m-%dT%H:%M:%SZ') + dt.timedelta(hours=9)

    values = [now_jst.strftime('%Y/%m/%d %H:%M:%S')]

    gsheet.write_data(
        spreadsheet_id=os.environ.get('SPREADSHEET_ID'),  # 環境計測情報記録用シートID
        values=values,
        service=gsheet.get_authentication(),
        range='move!A1')