Esempio n. 1
0
def fetch_tracks(request_from_server=False):
    global memory_cache
    logger.info('fetch_tracks: called')

    if memory_cache:
        logger.info('fetch_tracks: checking memory cache')
    else:
        logger.info('fetch_tracks: checking disk cache')
        memory_cache = cache.fetch(config.LASTFM_CACHE_PATH)

    content = cache.content(memory_cache, config.LASTFM_CACHE_LIFETIME, request_from_server)
    if content:
        return content

    try:
        formatted_tracks = []
        endpoint = 'https://ws.audioscrobbler.com/2.0/?method=user.gettoptracks&period=7day&format=json&limit=5&user='******'&api_key=' + config.LASTFM_API_KEY 
        with urllib.request.urlopen(endpoint) as url:
            tracks = json.loads(url.read().decode())['toptracks']['track']
            for track in tracks:
                formatted_tracks.append({'name': track['name'], 'count': track['playcount'], 'artist': track['artist']['name']})
        
        memory_cache = cache.save(formatted_tracks, config.LASTFM_CACHE_PATH)
        return formatted_tracks
    except:
        return None
Esempio n. 2
0
def fetch_wait_times(request_from_server=False):
    global memory_cache
    logger.info('fetch_wait_times: called')

    if memory_cache:
        logger.info('fetch_wait_times: checking memory cache')
    else:
        logger.info('fetch_wait_times: checking disk cache')
        memory_cache = cache.fetch(config.THEME_PARKS_CACHE_PATH)

    content = cache.content(memory_cache, config.THEME_PARKS_CACHE_LIFETIME,
                            request_from_server)
    if content:
        return content

    rides = []
    try:
        with urllib.request.urlopen(config.THEME_PARKS_DLR_URL) as url:
            for park_rides in list(json.loads(url.read().decode()).values()):
                rides.extend(park_rides)
        with urllib.request.urlopen(config.THEME_PARKS_WDW_URL) as url:
            for park_rides in list(json.loads(url.read().decode()).values()):
                rides.extend(park_rides)
    except:
        pass

    logger.info('fetch_wait_times: got {} rides'.format(len(rides)))

    memory_cache = cache.save(rides, config.THEME_PARKS_CACHE_PATH)
    return rides
Esempio n. 3
0
def fetch_events(request_from_server=False):
    global memory_cache
    logger.info('fetch_events: called')

    if memory_cache:
        logger.info('fetch_events: checking memory cache')
    else:
        logger.info('fetch_events: checking disk cache')
        memory_cache = cache.fetch(config.CALENDAR_CACHE_PATH)

    content = cache.content(memory_cache, config.CALENDAR_CACHE_LIFETIME,
                            request_from_server)
    if content:
        return content

    store = CalCalendarStore.defaultCalendarStore()
    cals = []
    for cal in store.calendars():
        if cal.title() in config.CALENDAR_CALENDARS:
            cals.append(cal)
        logger.info(cal.title())

    cst = tz.gettz('America/Chicago')
    today = datetime.now().date()
    start_dt = datetime(today.year, today.month, today.day, tzinfo=cst)
    end_dt = start_dt + timedelta(180)

    start_int = int(start_dt.strftime("%s"))
    end_int = int(end_dt.strftime("%s"))
    start = NSDate.dateWithTimeIntervalSince1970_(start_int)
    end = NSDate.dateWithTimeIntervalSince1970_(end_int)

    formatted_results = {}

    for cal in cals:
        events = []
        pred = CalCalendarStore.eventPredicateWithStartDate_endDate_calendars_(
            start, end, [cal])
        for event in store.eventsWithPredicate_(pred):
            s = event._.startDate.timeIntervalSince1970()
            e = event._.endDate.timeIntervalSince1970()
            events.append({'name': event._.title, 'start': s, 'end': e})
        formatted_results[cal.title()] = events

    memory_cache = cache.save(formatted_results, config.CALENDAR_CACHE_PATH)
    return formatted_results
Esempio n. 4
0
def fetch_playlists(request_from_server=False):
    auth()

    global memory_playlists_cache, sp
    logger.info('fetch_playlists: called')

    if memory_playlists_cache:
        logger.info('fetch_playlists: checking memory cache')
    else:
        logger.info('fetch_playlists: checking disk cache')
        memory_playlists_cache = cache.fetch(
            config.SPOTIFY_PLAYLISTS_CACHE_PATH)

    content = cache.content(memory_playlists_cache,
                            config.SPOTIFY_PLAYLISTS_CACHE_LIFETIME,
                            request_from_server)
    if content:
        return content

    try:
        formatted_playlists = []
        playlists = sp.user_playlists(config.SPOTIFY_USERNAME)
        for playlist in playlists['items']:
            if playlist['owner']['id'] == config.SPOTIFY_USERNAME:
                formatted_playlists.append({
                    'name': playlist['name'],
                    'uri': playlist['uri']
                })

        logger.info('fetch_playlists: got {} playlists'.format(
            len(formatted_playlists)))

        memory_playlists_cache = cache.save(
            formatted_playlists, config.SPOTIFY_PLAYLISTS_CACHE_PATH)
        return formatted_playlists
    except:
        return []
Esempio n. 5
0
def fetch_homework(request_from_server=False):
    auth()

    global creds, memory_cache
    logger.info('fetch_homework: called')

    if memory_cache:
        logger.info('fetch_homework: checking memory cache')
    else:
        logger.info('fetch_homework: checking disk cache')
        memory_cache = cache.fetch(config.GOOGLE_CACHE_PATH)

    content = cache.content(memory_cache, config.GOOGLE_CACHE_LIFETIME,
                            request_from_server)
    if content:
        return content

    try:
        service = build('drive', 'v3', credentials=creds)
        request = service.files().export_media(
            fileId=config.GOOGLE_HOMEWORK_DOC_ID, mimeType='text/html')

        fh = io.BytesIO()
        downloader = MediaIoBaseDownload(fh, request)
        done = False
        while done is False:
            status, done = downloader.next_chunk()
            logger.info("fetch_homework: Download %d%%." %
                        int(status.progress() * 100))

        html = fh.getvalue().decode('UTF-8')
        raw_text = html2text(html)
        classes = raw_text.split('Flow:')[0]

        for key in config.GOOGLE_HOMEWORK_DOC_REPLACEMENT.keys():
            classes = classes.replace(
                key, config.GOOGLE_HOMEWORK_DOC_REPLACEMENT[key])

        formatted_assignments = []
        classes = classes.split('<class>')[1:]
        for class_str in classes:
            classes_split = class_str.split('\n')
            class_name = classes_split[0]

            for raw_assignment in classes_split[1:]:
                raw_assignment = raw_assignment.replace('  * ', '')
                detail_split = raw_assignment.split('] ')
                if len(detail_split) == 2:
                    date = detail_split[0][1:]
                    date_split = date.split('/')
                    date_dt = datetime(2020, int(date_split[0]),
                                       int(date_split[1]))
                    name = detail_split[1]
                    formatted_assignments.append({
                        'name': class_name + ': ' + name,
                        'start': date_dt.timestamp(),
                        'end': date_dt.timestamp()
                    })

        logger.info('fetch_homework: fetched {} classes'.format(
            len(formatted_assignments)))

        memory_cache = cache.save(formatted_assignments,
                                  config.GOOGLE_CACHE_PATH)
        return formatted_assignments
    except:
        return []