コード例 #1
0
def make_every_day():
    """
    每天0点执行的计划任务。
    销毁上一天的所有任务安排并从API读取下一天的任务安排,然后创建新的任务。
    :return: None
    """
    print("Make schedule data every day in %s" %
          (datetime.now().strftime("%Y-%m-%d %H:%M:%S"), ))
    sches = schedule.get_schedule(config.api['server'] +
                                  config.api['schedule'])
    if sches is not None:
        sche_year, sche_term, sche_begin, sche_end, sche_items = sches
        sched = task
        sche_weeks, sche_weekday = \
            schedule.calc_weeks(datetime.date(datetime.strptime(sche_begin, '%Y-%m-%d')), datetime.date(datetime.now()))
        for item in sche_items:  # 按照计划,对每一个下课时间生成一个计划任务。
            check = {
                'year': sche_year,
                'term': sche_term,
                'weeks': sche_weeks,
                'weekday': sche_weekday,
                'course_number': item['no'],
                'begin_time': item['begin'],
                'end_time': item['end']
            }
            sched.add_job(schedule.get_attendance_task(config.database, check),
                          'date',
                          run_date=get_datetime(item['end']),
                          misfire_grace_time=60)
            print("Add new schedule task in %s" % (item['end'], ))
    else:
        print(
            "Exception happened while getting schedule data.Please check config or server."
        )
コード例 #2
0
    def do_POST(self):
        self.send_response(200)
        content_length = int(self.headers['Content-Length'])
        post_data = self.rfile.read(content_length)
        post_data = {
            k: v
            for k, v in (x.split('=')
                         for x in post_data.decode('utf-8').split('&'))
        }
        error, schedule, warnings = None, None, None

        try:
            # Given start date or left empty
            start_date = date_parse(
                post_data.get("start_date", default_start_date))
        except ValueError as e:
            # Given start date but not valid string; use default
            traceback.print_exc()
            start_date = date_parse(default_start_date)

        try:
            schedule, warnings = get_schedule(
                post_data['username'], parse.unquote(post_data['password']),
                start_date)
        except Exception as e:
            traceback.print_exc()
            error = e
        if schedule:
            self.send_response(200)
            self.send_header('Content-Type', 'text/html')
            self.end_headers()
            encoded_schedule = 'data:application/octet-stream;base64,' + \
                b64encode(str(schedule).encode('utf-8')).decode('utf-8')
            output = '<h1>Schedule created '
            if warnings:
                output += 'with %s warning%s!</h1><ul>' % (
                    len(warnings), '' if len(warnings) == 1 else 's')
                for warning in warnings:
                    output += '<li>' + warning + '</li>'
                output += '</ul>'
            else:
                output += 'successfully!</h1>'
            output += '<h2><a class="button" download="%s" href="%s" title="Download Schedule">Download Schedule</a></h2>' % (
                post_data['username'] + ' - ' + post_data.get("start_date") +
                '.ics', encoded_schedule)
            output += '<a href="/"">go back</a>'

            self.write_template(output)
        else:
            self.send_response(200)
            self.send_header('Content-Type', 'text/html')
            self.end_headers()
            if not error:
                output = '<h1>Incorrect username or password!</h1>'
            else:
                output = "<h1>An internal error occured</h1><pre>" + str(
                    error) + "</pre>"
                print(error)
            output += '<p><a href="/"">go back</a></p>'
            self.write_template(output)
コード例 #3
0
ファイル: atom.py プロジェクト: Ekasy/AtomBot
def bot_talking(message):
    bot.send_chat_action(message.chat.id, action='typing')
    if weather.is_weather(message.text.lower()):
        bot.send_message(message.chat.id,
                         weather.get_weather(message.text.lower()))
    elif news.is_news(message.text.lower()):
        bot.send_message(message.chat.id, news.get_news(message.text.lower()))
    elif schedule.is_schedule(message.text.lower()):
        result = schedule.get_schedule(message.text.upper())
        if isinstance(result, str):
            bot.send_message(message.chat.id, result)
        else:
            bot.send_photo(message.chat.id, caption='Держи!', photo=result)
    else:
        bot.send_message(message.chat.id,
                         smart_talking.get_smart_talking(message.text))
コード例 #4
0
ファイル: team_graph.py プロジェクト: ktarrant/WinCandles
def generate_play_logs(teamid, year=2016):
	sched = get_schedule(year)
	games = sched[(sched.home_team == teamid) | (sched.away_team == teamid)]
	for i in games.index:
		game = games.loc[i]
		game_date = datetime.strptime(str(game.date), '%Y%m%d').date()
		play_logs = get_play_logs(game_date)
		try:
			url_play_log = play_logs[game.home_team]
		except KeyError as e:
			log.error(
				"KeyError: Failed to find expected '{}' in Play Log list @ {}"
				.format(teamid, game_date))
			continue
		yield { 'date': game.date, 'home': teamid, 'away': game.away_team,
				'log': get_play_log_data(url_play_log) }
コード例 #5
0
def get_state(time):
	return {
		'time': get_mins(time),
		'patients': get_patients(get_schedule(time), time),
		'sites': config_coll.find_one({'name': 'site'})['data'],
		'optimizer': {
			"active": False,
			"advanceTime": 60,
			"objective": {
				"waitNorm": "l1",
				"overTimeWeight": 10
			},
			"confidenceLevel": 0.7,
			"patientConfidenceLevel": 0.7,
			"numSamples": 100
		}
	}
コード例 #6
0
ファイル: test.py プロジェクト: babsk/ZWay-API-Python-Access
        msg += " day: " + str(o['objTimer'][i]['iDay'])
        msg += " time: " + str(o['objTimer'][i]['iTm'])
        msg += " sp: " + str(o['objTimer'][i]['fSP'])
        print (msg)
                    

roomid = int(input ("Enter id of room: "))


#data = input ("Enter heatgenius schedule: ")
#print (data)

#processData (data)

cookie = login.send()
r = schedule.get_schedule (cookie,roomid)

displaySchedule (r.json()['data'])

print ("Enter new trigger point:")
day = int(input ("Day: "))
hour = int(input ("Hour: "))
minute = int (input ("Minute: "))
sp = int (input ("SP: "))

trigger = {'day': day, 'id': '***', 'minute': minute, 'hour': hour, 'eventName': '****', 'active': False, 'sp': sp}
sched = r.json()['data']
sched.append(trigger)

data = {"data":sched}
r = schedule.set_schedule(cookie,roomid,data)
コード例 #7
0
def make_index_file(date, audio_fname_list, output_fname=None):
    import datetime
    import os

    import schedule
    import utils
    from rsab.listenagain import config, ListenAgainConfigError
    if not config.has_option('DEFAULT', 'output'):
        raise ListenAgainConfigError('No [DEFAULT]/output config defined')

    if output_fname is None:
        output_fname = 'index.html'
    output_fname = os.path.join(config.get('DEFAULT', 'output'), output_fname)
    output_file = open(output_fname, 'w')

    template = Template('player')

    playlist_items = []
    details_for_audio_files = []
    show_name_mapping = {}
    presenter_name_mapping = {}
    for audio_fname in audio_fname_list:
        playlist_items.append(str(make_playlist_item(audio_fname)))
        details_for_audio_files.append(schedule.schedule_from_audio_file_name(audio_fname))

    live_schedule = schedule.get_schedule(date + datetime.timedelta(days=1))
    for details in details_for_audio_files + live_schedule:
        if details is None:
            continue
        show_name = details['show']
        show_title = utils.get_message(show_name, 'show', default=None)
        if show_title is None:
            show_title = utils.get_message(show_name, 'presenter', default=show_name)
        if show_name and show_name not in show_name_mapping:
            show_name_mapping[show_name] = show_title
        for presenter in details.get('presenters', []):
            if not presenter or presenter == show_name:
                continue
            if presenter not in presenter_name_mapping:
                presenter_name_mapping[presenter] = utils.get_message(presenter, 'presenter', default=presenter)

    template.playlist_items = '\n'.join(filter(None, playlist_items))
    hidden_input = '<input type="hidden" disabled="disabled" name="%s" value="%s" />'
    template.show_name_mapping = '\n'.join([
        hidden_input % ('showname', '%s:%s' % pair)
        for pair in show_name_mapping.items()
    ])
    template.presenter_name_mapping = '\n'.join([
        hidden_input % ('presentername', '%s:%s' % pair)
        for pair in presenter_name_mapping.items()
    ])
    template.live_schedule = '\n'.join([
        hidden_input % (
            'live_schedule',
            '%s:%s' % (
                details['start'].strftime('%H:%M'),
                ','.join([details['show']] + details.get('presenters', [])),
            ),
        )
        for details in schedule.get_schedule(date + datetime.timedelta(days=1))
    ])

    output_file.write(str(template))
    output_file.close()
    return output_fname
コード例 #8
0
def setup_times(generators_data, loads_data):
    """
    Create a :class:`~schedule.TimeIndex` object
    from the schedule files.

    Also create a unified DataFrame of all the schedules, `timeseries`.

    If there are no schedule files (as in ED,OPF),
    create an index with just a single time.
    """
    fcol = 'schedulefilename'
    ncol = 'schedulename'

    loads_data[ncol] = None
    generators_data[ncol] = None

    if fcol not in loads_data.columns:
        loads_data[fcol] = None
    if fcol not in generators_data.columns:
        generators_data[fcol] = None

    datadir = user_config.directory

    timeseries = {}

    def filter_notnull(df, col):
        return df[df[col].notnull()]

    for i, load in filter_notnull(loads_data, fcol).iterrows():
        name = 'd{}'.format(i)
        loads_data.ix[i, ncol] = name
        timeseries[name] = get_schedule(joindir(datadir, load[fcol])) * \
            user_config.load_multiplier + user_config.load_adder

    for i, gen in filter_notnull(generators_data, fcol).iterrows():
        name = 'g{}'.format(i)
        generators_data.ix[i, ncol] = name
        timeseries[name] = get_schedule(joindir(datadir, gen[fcol]))

    # handle observed and forecast power
    fobscol = 'observedfilename'
    obscol = 'observedname'
    ffcstcol = 'forecastfilename'
    fcstcol = 'forecastname'

    obs_name = None
    if fobscol in generators_data:
        generators_data[obscol] = None
        for i, gen in filter_notnull(generators_data, fobscol).iterrows():
            obs_name = 'g{}_observations'.format(i)
            generators_data.ix[i, obscol] = obs_name
            timeseries[obs_name] = get_schedule(joindir(datadir, gen[fobscol]))
            if user_config.wind_multiplier != 1.0:
                timeseries[obs_name] *= user_config.wind_multiplier

        generators_data = generators_data.drop(fobscol, axis=1)

    fcst_name = None
    if ffcstcol in generators_data:
        generators_data[fcstcol] = None
        for i, gen in filter_notnull(generators_data, ffcstcol).iterrows():
            fcst_name = 'g{}_forecast'.format(i)
            generators_data.ix[i, fcstcol] = fcst_name
            timeseries[fcst_name] = get_schedule(joindir(datadir, gen[ffcstcol])) * \
                user_config.wind_multiplier + user_config.wind_forecast_adder

            if user_config.wind_error_multiplier != 1.0:
                logging.debug('scaling wind forecast error')
                obs_name = 'g{}_observations'.format(i)
                error = timeseries[fcst_name] - timeseries[obs_name]
                timeseries[fcst_name] = timeseries[obs_name] + \
                    error * user_config.wind_error_multiplier

            if (timeseries[fcst_name] < 0).any():
                print timeseries[fcst_name].describe()
                logging.warning('Wind forecast must always be at least zero.')
                timeseries[fcst_name][timeseries[fcst_name] < 0] = 0

        generators_data = generators_data.drop(ffcstcol, axis=1)

    generators_data = generators_data.drop(fcol, axis=1)
    loads_data = loads_data.drop(fcol, axis=1)

    if len(timeseries) == 0:
        # this is a ED or OPF problem - only one time
        return DataFrame(), just_one_time(), generators_data, loads_data

    timeseries = DataFrame(timeseries)
    times = TimeIndex(timeseries.index)
    timeseries.index = times.strings.values

    if user_config.wind_capacity_factor != 0:
        if len(filter_notnull(generators_data, obscol)) != 1:
            raise NotImplementedError(
                'wind capacity factor only works with one wind generator')

        all_loads = timeseries[filter(lambda col: col.startswith('d'),
                                      timeseries.columns)]

        capf_current = timeseries[obs_name].sum() / all_loads.sum(axis=1).sum()

        wind_mult = user_config.wind_capacity_factor / capf_current
        user_config.wind_multiplier = wind_mult

        logging.info('scaling wind from a c.f. of {} to a c.f. of {}'.format(
            capf_current, user_config.wind_capacity_factor))
        timeseries[obs_name] *= wind_mult
        if fcst_name:
            timeseries[fcst_name] *= wind_mult

    return timeseries, times, generators_data, loads_data
コード例 #9
0
    if not os.path.exists("login.txt"):
        print "Error: create a login.txt which contains the username and password"
        print "       $ echo 'username password' > login.txt"
        return 1
    with open("login.txt") as f:
        for line in f:
            line = line.strip()
            if line.startswith("#"):
                continue
            user, password = line.split()
    browser = SpeakerDeckBrowser(user, password)
    browser.login()
    deck_groups = load_decks()
    #summarize_decks(decks)
    for id, decks in deck_groups.items():
        released = [ d for d in decks if d.released ]
        if not released:
            continue
        deck = released[-1]
        copy_deck_to_github(deck)
        copy_deck_to_speakerdeck(browser, deck)

DECK_DIR = "/Users/wolever/Dropbox/Apps/pycon-2015-slides/"
GITHUB_DIR = "/Users/wolever/code/pycon-2015-slides/"
SCHEDULE = {}
for item in get_schedule():
    SCHEDULE[item["conf_key"]] = item

if __name__ == '__main__':
    sys.exit(main())
コード例 #10
0
def main():
    schedules = schedule.get_schedule()
    emails = mails.get_emails()
    forecast = weather.get_weather_forecast()
    mails.send_emails(emails, schedules, forecast)
コード例 #11
0
ファイル: get_data.py プロジェクト: ZiiCee/minpower
def setup_times(generators_data, loads_data):
    """
    Create a :class:`~schedule.TimeIndex` object
    from the schedule files.

    Also create a unified DataFrame of all the schedules, `timeseries`.

    If there are no schedule files (as in ED,OPF),
    create an index with just a single time.
    """
    fcol = "schedulefilename"
    ncol = "schedulename"

    loads_data[ncol] = None
    generators_data[ncol] = None

    if fcol not in loads_data.columns:
        loads_data[fcol] = None
    if fcol not in generators_data.columns:
        generators_data[fcol] = None

    datadir = user_config.directory

    timeseries = {}

    def filter_notnull(df, col):
        return df[df[col].notnull()]

    for i, load in filter_notnull(loads_data, fcol).iterrows():
        name = "d{}".format(i)
        loads_data.ix[i, ncol] = name
        timeseries[name] = (
            get_schedule(joindir(datadir, load[fcol])) * user_config.load_multiplier + user_config.load_adder
        )

    for i, gen in filter_notnull(generators_data, fcol).iterrows():
        name = "g{}".format(i)
        generators_data.ix[i, ncol] = name
        timeseries[name] = get_schedule(joindir(datadir, gen[fcol]))

    # handle observed and forecast power
    fobscol = "observedfilename"
    obscol = "observedname"
    ffcstcol = "forecastfilename"
    fcstcol = "forecastname"

    obs_name = None
    if fobscol in generators_data:
        generators_data[obscol] = None
        for i, gen in filter_notnull(generators_data, fobscol).iterrows():
            obs_name = "g{}_observations".format(i)
            generators_data.ix[i, obscol] = obs_name
            timeseries[obs_name] = get_schedule(joindir(datadir, gen[fobscol]))
            if user_config.wind_multiplier != 1.0:
                timeseries[obs_name] *= user_config.wind_multiplier

        generators_data = generators_data.drop(fobscol, axis=1)

    fcst_name = None
    if ffcstcol in generators_data:
        generators_data[fcstcol] = None
        for i, gen in filter_notnull(generators_data, ffcstcol).iterrows():
            fcst_name = "g{}_forecast".format(i)
            generators_data.ix[i, fcstcol] = fcst_name
            timeseries[fcst_name] = (
                get_schedule(joindir(datadir, gen[ffcstcol])) * user_config.wind_multiplier
                + user_config.wind_forecast_adder
            )

            if user_config.wind_error_multiplier != 1.0:
                logging.debug("scaling wind forecast error")
                obs_name = "g{}_observations".format(i)
                error = timeseries[fcst_name] - timeseries[obs_name]
                timeseries[fcst_name] = timeseries[obs_name] + error * user_config.wind_error_multiplier

            if (timeseries[fcst_name] < 0).any():
                print timeseries[fcst_name].describe()
                logging.warning("Wind forecast must always be at least zero.")
                timeseries[fcst_name][timeseries[fcst_name] < 0] = 0

        generators_data = generators_data.drop(ffcstcol, axis=1)

    generators_data = generators_data.drop(fcol, axis=1)
    loads_data = loads_data.drop(fcol, axis=1)

    if len(timeseries) == 0:
        # this is a ED or OPF problem - only one time
        return DataFrame(), just_one_time(), generators_data, loads_data

    timeseries = DataFrame(timeseries)
    times = TimeIndex(timeseries.index)
    timeseries.index = times.strings.values

    if user_config.wind_capacity_factor != 0:
        if len(filter_notnull(generators_data, obscol)) != 1:
            raise NotImplementedError("wind capacity factor only works with one wind generator")

        all_loads = timeseries[filter(lambda col: col.startswith("d"), timeseries.columns)]

        capf_current = timeseries[obs_name].sum() / all_loads.sum(axis=1).sum()

        wind_mult = user_config.wind_capacity_factor / capf_current
        user_config.wind_multiplier = wind_mult

        logging.info(
            "scaling wind from a c.f. of {} to a c.f. of {}".format(capf_current, user_config.wind_capacity_factor)
        )
        timeseries[obs_name] *= wind_mult
        if fcst_name:
            timeseries[fcst_name] *= wind_mult

    return timeseries, times, generators_data, loads_data
コード例 #12
0
def get_schedule(group):	
    dayOfWeek = schedule.get_dayNumberOfWeek(day)
    groupId = schedule.get_groupid(group.text)
    response = ''
    if groupId == str("Invalid group"):
        response += '<b>нет такой группы</b>'
    else:		
        date, time_lst, location_lst, auditorium_lst, discipline_lst, type_lst, lecturer_lst = schedule.get_schedule(schedule.get_table(group.text, groupId),dayOfWeek)
        response +='\U0001F4C5<b>{}</b>\n'.format(date)
        for time, location, auditorium, discipline, type, lecturer in zip(time_lst, location_lst, auditorium_lst, discipline_lst, type_lst, lecturer_lst):
           response += '\U0000270F<b>{}</b> {}, ayд.{}\n<b>{}</b>: {}, {}\n'.format(time, location, auditorium, type, discipline, lecturer)
        if len(location_lst) == 0:
            response += '<b>нет пар</b>'
    JBot.send_message(group.chat.id,response,parse_mode='HTML', reply_markup=types.ReplyKeyboardRemove())
コード例 #13
0
def application(environ, start_response):
    '''
    if environ['PATH_INFO'] == '/add':
        schedule.add(environ)
    if environ['PATH_INFO'] == '/remove':
        schedule.remove(environ)
    '''
    if environ['PATH_INFO'] == '/schedule':
        start_response('200 OK',
                       [('Content-Type', 'application/json; charset=UTF-8')])
        return schedule.get_schedule()
    if environ['PATH_INFO'] == '/crash':
        crashreports.add(environ)
    if environ['PATH_INFO'] == '/table':
        if is_authorized(environ):
            content = crashreports.build_reports_table(
                cgi.parse_qs(environ.get('QUERY_STRING', '')))
            mapping = {'title': 'Welcome to my Website', 'content': content}
            start_response('200 OK', [('Content-Type', 'text/html')])
            return templates_builder.render('table.html', mapping)
        else:
            start_response('200 OK', [('Content-Type', 'text/html')])
            return templates_builder.render('auth.html', 'text/html')
    if environ['PATH_INFO'] == '/auth':
        start_response('200 OK', [('Content-Type', 'text/html')])
        return templates_builder.render('auth.html', 'text/html')
    if environ['PATH_INFO'] == '/login':
        environ['HTTP_X_FORWARDED_FOR'].split(',')[-1].strip()  # ip
        post_input = urllib.parse.parse_qs(
            environ['wsgi.input'].readline().decode(), True)
        m = hashlib.md5()
        m.update((environ['HTTP_X_FORWARDED_FOR'].split(',')[-1].strip() +
                  post_input['username'][0] +
                  post_input['password'][0]).encode('utf-8'))
        cookie = SimpleCookie()
        cookie['login'] = m.hexdigest()
        start_response('200 OK',
                       [('Content-Type', 'text/html'),
                        ('Set-Cookie', cookie['login'].OutputString())])
        return "OK"
    if environ['PATH_INFO'] == '/delete':
        if is_authorized(environ):
            parameters = cgi.parse_qs(environ.get('QUERY_STRING', ''))
            if 'id' in parameters:
                crashreports.delete_report_by_id(
                    cgi.escape(parameters['id'][0]))
            content = crashreports.build_reports_table()
            mapping = {'title': 'Welcome to my Website', 'content': content}
            start_response('200 OK', [('Content-Type', 'text/html')])
            return templates_builder.render('table.html', mapping)
        else:
            start_response('200 OK', [('Content-Type', 'text/html')])
            return templates_builder.render('auth.html', 'text/html')
    if environ['PATH_INFO'] == '/current':
        start_response('200 OK', [('Content-Type', 'application/json')])
        return current_stream.get_current_stream_info_json()
    start_response('200 OK', [('Content-Type', 'text/html')])
    return [('''Привет %(subject)s
    #Hello %(subject)s!
    ''' % {
        'subject': '111'
    }).encode()]
コード例 #14
0
        get_ses(username, password, disco, whatsapp)
        for username, password, disco, whatsapp in users
    ]
    await asyncio.gather(*cors)
    db.load_users(users)
    await notify(wa, [["918592988798", f"{len(sessions)} mouths to feed"]])


if __name__ == "__main__":
    users = user.get_users()
    custom = user.get_courses()
    # user.conn.close()
    lp = asyncio.get_event_loop()
    lp.run_until_complete(init())
    lp.run_until_complete(crawl())
    schedules = db.get_schedule()

    while True:
        # now = pytz.utc.localize(datetime.utcnow()).astimezone(ist)
        now = datetime.now()

        if now.hour == 17 and now.minute >= 10:

            async def close():
                await notif.close()
                for ses in sessions.values():
                    await ses.close()

            lp.run_until_complete(close())
            lp.close()
            user.conn.close()
コード例 #15
0
def run():
    from optparse import OptionParser
    import datetime
    import glob
    import os
    import sys
    import time

    import audio
    import html
    import notify
    import utils
    import recorder
    import remote
    import schedule

    option_parser = OptionParser()
    option_parser.add_option(
        "-p",
        "--print",
        dest="print_schedule",
        action="store_true",
        default=False,
        help="Print information about the date but don't do anything",
    )

    option_parser.add_option("-w", "--wav", dest="wavs", action="store_true", default=False, help="Construct WAVs")

    option_parser.add_option("-e", "--encode", dest="encode", action="store_true", default=False, help="Encode MP3s")

    option_parser.add_option(
        "-i", "--index", dest="index", action="store_true", default=False, help="Generate index pages"
    )

    option_parser.add_option(
        "-u", "--upload", dest="upload", action="store_true", default=False, help="Upload data to web server"
    )

    option_parser.add_option(
        "-c", "--config", dest="config_file", help="Specify alternative config file", metavar="FILE"
    )

    option_parser.add_option(
        "-f",
        "--filter",
        dest="filter",
        action="append",
        help="Filter schedule to items containing at least one of the given show/presenter",
        metavar="NAME",
    )

    options, args = option_parser.parse_args()

    task_options = ["print_schedule", "wavs", "encode", "index", "upload"]
    num_task_options_supplied = len([None for option_name in task_options if getattr(options, option_name, False)])
    # No specific do-something options were given.  Do everything.
    if num_task_options_supplied == 0:
        for option_name in task_options:
            setattr(options, option_name, True)

    config_files = utils.default_config_files()
    if options.config_file is not None:
        config_files.append(options.config_file)
    config = utils.init_config(config_files)

    date_string = "yesterday"
    if len(args) == 1:
        date_string = args[0]
    date = utils.interpret_date_string(date_string)

    start_time = time.time()

    recorder.init_module()
    bounds_and_files = recorder.get_bounds_and_files_for_date(date)
    schedule_list = schedule.get_schedule(date, filter_items=options.filter)

    if options.wavs or options.print_schedule:
        if options.filter:
            print "Schedule (filtered):"
        else:
            print "Schedule:"
        schedule.print_schedule(schedule_list)
        print
        print "Recordings:"
        recorder.print_bounds_and_files(bounds_and_files)

    wav_files = None
    if options.wavs:
        wav_files = audio.make_wav_files(bounds_and_files, schedule_list)

    mp3_files = None
    if options.encode:
        # Always rebuild WAVs list in case any are hanging around from before.
        if True:  # wav_files is None:
            if config.has_option("main", "wavs"):
                wavs_dir = config.get("main", "wavs")
            else:
                wavs_dir = os.getcwd()
            wav_files = glob.glob(os.path.join(wavs_dir, "*.wav"))
        # XXX Delete working WAVs?  Only if MP3 was created for it.
        mp3_files = [audio.encode_file(path) for path in wav_files]
        if True:  # XXX look for no-delete option later
            print "Deleting local copies of WAVs..."
            for (wav, mp3) in zip(wav_files, mp3_files):
                if mp3 is not None and os.path.isfile(wav):
                    os.unlink(wav)
                    print "   ", wav
            print "done."
            print

    ftp_conn = None
    remote_audio_files = []
    if options.upload or options.index:
        if config.has_option("ftp", "keep_days"):
            keep_days = config.getint("ftp", "keep_days")
        else:
            keep_days = 7
        earliest_keep_date = date - datetime.timedelta(days=keep_days - 1)
        ftp_conn = remote.connect()
        remote_audio_files = ftp_conn.get_list_of_audio_files()

        # First make an index with no old files:
        # XXX For now this ignores per-file limits, so will remove everything
        # over N days old from the index temporarily.  If a file has a higher
        # number of days defined, it will be restored to the index later when
        # it's not deleted -- but for a file with a lower number of days
        # defined, it'll disappear later than it should.
        audio_files_for_first_index = [
            fname
            for (fname, details) in [
                (fname, schedule.schedule_from_audio_file_name(fname)) for fname in remote_audio_files
            ]
            if details is not None and details["date"] >= earliest_keep_date
        ]

        index_fname = html.make_index_file(date, audio_files_for_first_index)
        if options.upload:
            ftp_conn.storlines("STOR index.html", open(index_fname, "r"))

    if options.upload:
        ftp_conn.remove_old_audio(date, keep_days)

        # XXX Here we should delete local copies of MP3s that are more than N
        # days old, in case the upload has failed for more than N days.
        pass

        # Always build the list again as we can pick up files we missed before.
        if True:  # mp3_files is None:
            if config.has_option("main", "mp3s"):
                mp3s_dir = config.get("main", "mp3s")
            else:
                mp3s_dir = os.getcwd()
            mp3_files = glob.glob(os.path.join(mp3s_dir, "*.mp3"))

        try:
            uploaded = remote.upload_audio(ftp_conn, mp3_files)
        except:
            import traceback

            print "Exception uploading files"
            traceback.print_exc(file=sys.stdout)
            print "Continuing..."

        # Reconnect (or grab the cached connection) in case there were failures
        # during the upload.  A better structure would see us making this
        # completely transparent across all remote calls, but for now we focus
        # on the big upload.
        ftp_conn = remote.connect()

        if True:  # XXX look for no-delete option later
            print "Deleting local copies of MP3s..."
            for mp3_path in mp3_files:
                if os.path.split(mp3_path)[1] in uploaded and os.path.isfile(mp3_path):
                    print "   ", mp3_path
                    os.unlink(mp3_path)
            print "done."
            print

        notify.notify_all(mp3_files)

    if options.index:
        # Second index file: whatever's on the server.
        remote_audio_files = ftp_conn.get_list_of_audio_files()

        index_fname = html.make_index_file(date, remote_audio_files)
        if options.upload:
            ftp_conn.storlines("STOR index.html", open(index_fname, "r"))
            # XXX Now also sync up anything that's in the www directory
            # (resource files such as JS, CSS, images, jPlayer...).
            pass

    if ftp_conn is not None:
        ftp_conn.quit()

    end_time = time.time()
    if not options.print_schedule:
        duration = end_time - start_time
        print "Took %2.2dm %2.2ds" % divmod(duration, 60)

    return 0