예제 #1
0
파일: parses.py 프로젝트: jazminite/wcl
def main():
    wks = wb.worksheet('add_parse')
    reports = get_reports(secrets.raid_id, secrets.c_date)
    print('Reports retrieved')
    players = get_players(reports)
    parses = get_parses(players, [])
    print('Table created')
    update_sheet(wks, parses)
    print('Worksheet updated')
예제 #2
0
def main_script():
    # Duplicate the Report worksheet
    today = dt.datetime.today().strftime('%m-%d-%y')
    DATA = {
        'requests': [{
            'duplicateSheet': {
                'sourceSheetId': int(wks.id),
                'insertSheetIndex': 0,
                'newSheetName': 'Report ' + today
            }
        }]
    }
    service.spreadsheets().batchUpdate(spreadsheetId=secrets.google_sheet_id,
                                       body=DATA).execute()

    # Collect Trello Card info
    table = []
    print('--------------------')
    for b in trello.trello_boards:
        for l in b['lists']:
            list_json = requests.request(
                'GET',
                trello.url_lists + l['id'] + '/cards' + trello.tokens).json()
            print(b['name'])
            print('--------------------')
            x = 0
            for c in list_json:
                x += 1
                print(str(x) + ' - ' + c['name'])
                card_members = []
                if len(c['idMembers']) > 0:
                    for id in c['idMembers']:
                        card_member = get_trello_member(id)
                        card_members.append(card_member['name'])
                new_row = [
                    b['name'], l['name'], c['name'], c['shortUrl'],
                    str(dt.datetime.fromtimestamp(int(c['id'][0:8], 16))),
                    str(
                        dt.datetime.strptime(c['dateLastActivity'],
                                             '%Y-%m-%dT%H:%M:%S.%fZ')),
                    ', '.join(card_members)
                ]

                # Append card / ticket to the table
                table.append(new_row)
            print('--------------------')

    # Add card info to the newly created worksheet
    new_wks = wb.worksheet('Report ' + today)
    update_sheet(new_wks, table)

    print('Complete!')
예제 #3
0
def main():
    ''' run it
    '''
    now = datetime.now()

    log = open("log.txt", "a")
    log.write("=================================\n")
    log.write("0. " + now.strftime("%Y-%m-%d-%H-%M") + "\n")

    #Finding the data from a map in this page https://www.oakgov.com/covid/casesByZip.html.
    #Url3 is the address where the map pulls its data from

    url3 = 'https://services1.arcgis.com/GE4Idg9FL97XBa3P/arcgis/rest/services/COVID19_Cases_by_Zip_Code_Total_Population/FeatureServer/0/query?f=json&where=1%3D1&returnGeometry=false&spatialRel=esriSpatialRelIntersects&outFields=*&orderByFields=Join_Zip_Code%20asc&resultOffset=0&resultRecordCount=200&cacheHint=true'

    response = get(url3)
    data = response.json()

    log.write("1. Pulled JSON\n")

    #The data is in the list of dictionaries called 'features'
    features = data['features']

    #We want to pull all the variables inside the dict called attributes, so we create a new list of dictionaries that only includes the attributes
    new_data = []

    for feature in features:
        new_data.append(feature['attributes'])

    Oakland = DataFrame(new_data)
    log.write("2. Created data frame\n")

    #### Output ##################
    utils.save_source_file("Oakland.json", response.text)
    log.write("3. Saved Source file\n")

    csv = Oakland.to_csv(index=False)
    utils.save_data_file("Oakland.csv", csv)
    log.write("4. Saved Data file\n")

    fp = Oakland[[
        "Join_Zip_Code", "Total__COVID19_Cases", "Death_Count",
        "Total_Population_ACS_17_5YR"
    ]].copy(deep=True)

    fp.columns = ["ZIP code", "Cases", "Deaths", "Population"]
    fp.loc[:, "Cases"] = fp["Cases"].astype(float)
    fp.loc[:, "Deaths"] = fp["Deaths"].astype(float)
    fp.loc[:, "Population"] = fp["Population"].astype(float)
    fp.loc[fp["Cases"].isnull(), "Cases"] = 0.0
    fp.loc[fp["Deaths"].isnull(), "Deaths"] = 0.0
    fp.loc[:, "Cases per 100,000"] = 100000.0 * fp["Cases"] / fp["Population"]
    log.write("5. Processed Freepress\n")

    fpcsv = fp.to_csv(index=False)
    utils.save_freepress_file("Oakland.csv", fpcsv)
    log.write("6. Saved Freepress\n")
    ##############################

    #### save csv to sheets ######

    GOOGLE_SHEET_ID = '1wtkFL6BIZQoLnUVfcr8y_2XyjSzlrPimH6A0lMjtlys'
    GOOGLE_SHEET_NAME = 'Oakland County'
    try:
        # we need to convert the dataframe to a list of lists
        # and then update via google sheets api
        data_to_save = [fp.columns.values.tolist()] + fp.values.tolist()
        sheets.update_sheet(GOOGLE_SHEET_ID, GOOGLE_SHEET_NAME, data_to_save)
        log.write("7. Updated google sheet\n")

    except Exception as exc:
        print(exc)
        log.write(f'exception writing to sheet: {exc}')
예제 #4
0
def main():
    ''' run it
    '''
    now = datetime.now()

    log = open("log.txt", "a")
    log.write("=================================\n")
    log.write("0. "+now.strftime("%Y-%m-%d-%H-%M")+"\n")

    # read the local "config" csv file with each city/municipality's display name,
    # name (as mapped to gis data) and 2018 population
    # format of the csv is
    # display name, name in gis data, 2018 population
    city_population_df = pd.read_csv('city_population.csv')

    url = 'https://www.waynecounty.com/gisserver/rest/services/COVID/COVID_Location/MapServer/0/query?f=json&where=1%3D1&returnGeometry=false&spatialRel=esriSpatialRelIntersects&outFields=*&orderByFields=Confirms%20desc&resultOffset=0&resultRecordCount=43'
    data = {}

    try:
        response = get(url)
        data = response.json()
    except Exception as exc:
        log.write(f'Failed to fetch data via waynecounty.com/gisserver: {exc}')
        return

    log.write("1. Pulled JSON\n")

    # The data is in the list of dictionaries called 'features'
    features = data.get('features')
    if not features:
        log.write('Failed to get "features" from json for wayne county')
        return

    new_data = []
    for feature in features:
        new_data.append(feature.get('attributes'))

    wayne_df = pd.DataFrame(new_data)
    log.write("2. Created data frame\n")

    #### Output ##################
    utils.save_source_file("wayne.json", response.text)
    log.write("3. Saved Source file\n")

    wayce_csv = wayne_df.to_csv(index=False)
    utils.save_data_file("wayne.csv", wayce_csv)
    log.write("4. Saved Data file\n")

    # create a dataframe with the data from arcgis merged with the population csv file
    wayne_df = pd.DataFrame(new_data)
    wayne_full_df = pd.merge(wayne_df, city_population_df, on='Municipality')

    # some cleanup...converting to floats
    wayne_full_df.loc[:, 'Confirms'] = wayne_full_df['Confirms'].astype(float)
    wayne_full_df.loc[:, 'Deaths'] = wayne_full_df['Deaths'].astype(float)
    wayne_full_df.loc[:, '2018 Population'] = wayne_full_df['2018 Population'].str.replace(',', '').astype(float)
    wayne_full_df.loc[wayne_full_df['Confirms'].isnull(), 'Confirms'] = 0.0
    wayne_full_df.loc[wayne_full_df['Deaths'].isnull(), 'Deaths'] = 0.0

    # cases per 1,000
    wayne_full_df['Cases per 1,000 population'] = 1000.0*wayne_full_df['Confirms']/wayne_full_df['2018 Population']

    # save our a few of the columns to a new dataframe and
    # rename the columns to match what the free press would like
    final_df = wayne_full_df[['Display Name', 'Confirms', 'Deaths', 'Cases per 1,000 population', '2018 Population']].copy(deep=True)
    final_df.columns = ['City/Twp.', 'Cases', 'Deaths', 'Cases per 1,000 population', 'Population']

    log.write("5. Processed Freepress\n")

    # save the dataframe to csv
    df_csv = final_df.to_csv(index=False)
    utils.save_freepress_file("wayne.csv", df_csv)
    log.write("6. Saved Freepress\n")
    ##############################

    #### save csv to sheets ######

    try:
        # we need to convert the dataframe to a list of lists
        # and then update via google sheets api
        data_to_save = [final_df.columns.values.tolist()] + final_df.values.tolist()
        sheets.update_sheet(GOOGLE_SHEET_ID, GOOGLE_SHEET_NAME, data_to_save)
        log.write("7. Updated google sheet\n")

    except Exception as exc:
        print(exc)
        log.write(f'exception writing to sheet: {exc}')
        raise exc
예제 #5
0
def main():
    wks = wb.worksheet('players')
    reports = get_reports(secrets.raid_id, secrets.c_date)
    players = get_player_rows(reports)
    update_sheet(wks, players)
예제 #6
0
def main():
    wks = wb.worksheet('add_parse')
    parses = get_parses(secrets.team_players, [])
    print('Table created')
    update_sheet(wks, parses)
    print('Worksheet updated')
예제 #7
0
def main_script():
  # Duplicate the Report worksheet
  today = dt.datetime.today().strftime('%m-%d-%y')
  DATA = {'requests': [
    {
        'duplicateSheet': {
            'sourceSheetId': int(wks.id),
            'insertSheetIndex': 0,
            'newSheetName': 'Report ' + today
        }
    }
  ]}
  service.spreadsheets().batchUpdate(
        spreadsheetId=secrets.google_sheet_id, body=DATA).execute()

  # Collect Trello Card / Zendesk ticket info
  table = []
  tickets_to_open = []
  print('--------------------')
  for b in trello.trello_boards:
    for l in b['lists']:
      list_json = requests.request('GET', trello.url_lists + l['id'] + '/cards' + trello.tokens).json()
      print(b['name'])
      print('--------------------')
      x = 0
      for c in list_json:
        x += 1
        print(str(x) + ' - ' + c['name'])
        zendesk_url = find_zendesk_url(c)
        zendesk_id = zendesk_url.split('/').pop()
        new_row = [
          b['name'],
          l['name'],
          c['name'],
          c['shortUrl'],
          str(dt.datetime.strptime(c['dateLastActivity'], '%Y-%m-%dT%H:%M:%S.%fZ')),
          zendesk_url
        ]
        if len(zendesk_id) <= 6:
          zendesk_ticket = get_zendesk_ticket(zendesk_id)
          try:
            agent = zendesk_ticket['zAssigneeName']
            status = zendesk_ticket['zStatus']
            hold_pending = status == 'hold' or status == 'pending'
            if c['dateLastActivity'] > zendesk_ticket['zLastUpdated'] and hold_pending:
              status = u're-opened'
              tickets_to_open.append(zendesk_id)
            if(isinstance(agent, str)):
              agent = u'unknown'
            new_row.extend((agent, status, str(dt.datetime.strptime(zendesk_ticket['zLastUpdated'], '%Y-%m-%dT%H:%M:%SZ'))))
            add_to_datadog_api(b['tag'], l['tag'], 'zendesk_agent:' + remove_accents(agent.replace(' ', '_').lower()), 'zendesk_status:' + status)
          except KeyError:
            new_row.extend(('unknown', 'error', 'x'))
            add_to_datadog_api(b['tag'], l['tag'], 'zendesk_agent:unknown', 'zendesk_status:error')
        else:
          new_row.extend(('unknown', 'missing', 'x'))
          add_to_datadog_api(b['tag'], l['tag'], 'zendesk_agent:unknown', 'zendesk_status:missing')

        # Append card / ticket to the table
        table.append(new_row)
      print('--------------------')

  # Add card / ticket info to the newly created worksheet
  new_wks = wb.worksheet('Report ' + today)
  update_sheet(new_wks, table)

  # Add metrics through datadog api
  metrics = []

  for attr, val in datadog_api.items():
    metrics.append(val)

  api.Metric.send(metrics)

  # Open Zendesk tickets where last date Trello Card updated > last Zendesk ticket updated + status = pending or hold
  print('Tickets to open: ', len(tickets_to_open))
  if(len(tickets_to_open) > 0):
    confirm = open_zendesk_tickets(tickets_to_open)
    print(confirm)

  print('Complete!')