Ejemplo n.º 1
0
    def get(self, account_id):
        meals = models.Meal.select().where(models.Meal.account == self.account).order_by(models.Meal.date.desc(), models.Meal.time.desc())

        if 'meal-date-from' in request.args:
            if not utils.is_valid_date(request.args['meal-date-from']):
                raise APIError('Invalid meal-date-from format')

            dt = parse_date(request.args['meal-date-from']).date()
            meals = meals.where(models.Meal.date >= dt)

        if 'meal-date-to' in request.args:
            if not utils.is_valid_date(request.args['meal-date-to']):
                raise APIError('Invalid meal-date-to format')

            dt = parse_date(request.args['meal-date-to']).date()
            meals = meals.where(models.Meal.date <= dt)

        if 'meal-time-from' in request.args:
            if not utils.is_valid_time(request.args['meal-time-from']):
                raise APIError('Invalid meal-time-from format')

            meals = meals.where(models.Meal.time >= request.args['meal-time-from'])

        if 'meal-time-to' in request.args:
            if not utils.is_valid_time(request.args['meal-time-to']):
                raise APIError('Invalid meal-time-to format')

            meals = meals.where(models.Meal.time <= request.args['meal-time-to'])

        return self.return_paginated(meals, 'meals', {'account_id': account_id})
Ejemplo n.º 2
0
    def buildSubQuery(self, groupid, range=[None, time.strftime("%Y-%m-%d")], ignore_aggregates=False):
        # Parameters used in Subquery select Clause
        subParameters = self.returnParametersAsList(groupid)
        subGroupBy = self.returnGroupByAsList(groupid)
        # Required parameters for a subquery to do joins (date ranges are done internally)
        # Build Date Ranges for the where clause
        # Check DATES are valid, if not use default
        if not utils.is_valid_date(utils.xstr(range[0])):
            range[0] = ""
        if not utils.is_valid_date(utils.xstr(range[1])):
            range[1] = time.strftime("%Y-%m-%d")
        subDateRange = (
            self.name
            + ".ds>='"
            + utils.xstr(range[0]).split(" ")[0]
            + "' and "
            + self.name
            + ".ds<='"
            + utils.xstr(range[1]).split(" ")[0]
            + "'"
        )
        # Check if DATES have time, if so append server_date.
        if utils.has_valid_time(utils.xstr(range[0])):
            subDateRange += " and " + self.name + ".server_date>='" + utils.xstr(range[0]) + "'"
        if utils.has_valid_time(utils.xstr(range[1])):
            subDateRange += " and " + self.name + ".server_date<='" + utils.xstr(range[1]) + "'"
        subWhere = " where " + subDateRange
        if len(self.wheres) > 0:
            logicOp = " and "
        """for where in self.wheres:
			subWhere += logicOp
			subWhere += where
			logicOp = " or "
		"""
        for key in self.wheresMap.keys():
            subWhere += logicOp
            counter = 1
            subWhere += "("
            for where in self.wheresMap[key]:
                logicOp = " or "
                subWhere += where
                if counter != len(self.wheresMap[key]):
                    subWhere += logicOp
                counter += 1
            subWhere += ")"
            logicOp = " and "
        query = "select " + subParameters + " from " + utils.xstr(self.name) + subWhere
        # if ignore aggregates is not nessesary then include the groupby parameters
        if self.ignore_aggregates == False:
            query = query + " group by " + subGroupBy
        return query
Ejemplo n.º 3
0
    def post(self):
        if has_payload(request) is False:
            return "No valid json payload", 400

        region = request.get_json()['region'].lower()
        pm = request.get_json()['pm']
        timestamp = request.get_json()['timestamp']

        if region is None:
            return None, 400

        if pm is None or len(pm) != 6:
            return None, 400

        timestamp = is_valid_date(timestamp)
        if timestamp is False:
            return None, 400

        db_region = BridgePredictions.query.filter_by(region=region).first()

        if db_region:
            BridgePredictions.query.filter_by(region=region).delete()

        db_region = BridgePredictions(region, *pm, timestamp)
        db.session.add(db_region)
        db.session.commit()

        return None, 201
Ejemplo n.º 4
0
    def put(self, account_id, meal_id):
        if any(k for k in self.data if k not in ('id', 'account', 'date', 'time', 'description', 'calories')):
            raise APIError('Invalid parameters')

        # skipping id and account fields

        if 'date' in self.data:
            if not utils.is_valid_date(self.data['date']):
                raise APIError('Invalid date')

            self.meal.date = parse_date(self.data['date']).date()

        if 'time' in self.data:
            if not utils.is_valid_time(self.data['time']):
                raise APIError('Invalid time')

            self.meal.time = self.data['time']

        if 'description' in self.data:
            self.meal.description = self.data['description']

        if 'calories' in self.data:
            if not isinstance(self.data['calories'], int) or self.data['calories'] <= 0:
                raise APIError('Invalid calories value')

            self.meal.calories = self.data['calories']

        self.meal.save()
        return self.meal.serialize()
Ejemplo n.º 5
0
	def __parseQueryDateRanges(self,range):
		if not utils.is_valid_date(utils.xstr(range[0])):
			range[0] = ''
		if not utils.is_valid_date(utils.xstr(range[1])):
			range[1] = time.strftime('%Y-%m-%d')
		if utils.has_valid_time(utils.xstr(range[0])):
			start_date, start_time = range[0].split(" ")
		else:
			start_date = range[0]
			start_time = ''
		if utils.has_valid_time(utils.xstr(range[1])):
			end_date, end_time = range[1].split(" ")
		else:
			end_date = range[1]
			end_time = '23:59:59'
		query = self.query
		query = query.replace("#STARTDATE",start_date)
		query = query.replace("#STARTTIME",start_time)
		query = query.replace("#ENDDATE",end_date)
		query = query.replace("#ENDTIME",end_time)
		self.query = query
Ejemplo n.º 6
0
    def post(self, account_id):
        if any(f for f in ('date', 'time', 'description', 'calories') if f not in self.data):
            raise APIError('Missing Parameter(s)')

        if not utils.is_valid_date(self.data['date']):
            raise APIError('Invalid date')

        if not utils.is_valid_time(self.data['time']):
            raise APIError('Invalid time format')

        if not isinstance(self.data['calories'], int) or self.data['calories'] <= 0:
            raise APIError('Invalid calories value')

        meal_dt = parse_date('%s %s' % (self.data['date'], self.data['time']))

        meal = models.Meal(account=self.account, date=meal_dt.date(), time=self.data['time'],
                           description=self.data['description'], calories=self.data['calories'])
        meal.save()

        return created_response('meal', account_id=self.account.id, meal_id=meal.id)
Ejemplo n.º 7
0
def main():
    parser = argparse.ArgumentParser(
        description='Dump accesses'
    )

    parser.add_argument(
        'issns',
        nargs='*',
        help='ISSN\'s separated by spaces'
    )

    parser.add_argument(
        '--collection',
        '-c',
        help='Collection Acronym'
    )

    parser.add_argument(
        '--dayly_granularity',
        '-d',
        action='store_true',
        help='Accesses granularity default will be monthly if not specified'
    )

    parser.add_argument(
        '--from_date',
        '-b',
        default=FROM,
        help='Delimite the accesses start period'
    )

    parser.add_argument(
        '--until_date',
        '-u',
        default=UNTIL,
        help='Delimite the accesses end period'
    )

    parser.add_argument(
        '--output_format',
        '-f',
        choices=['json', 'csv'],
        default=OUTPUT_FORMAT,
        help='Output format'
    )

    parser.add_argument(
        '--output_file',
        '-r',
        help='File to receive the dumped data'
    )

    parser.add_argument(
        '--logging_file',
        '-o',
        help='Full path to the log file'
    )

    parser.add_argument(
        '--logging_level',
        '-l',
        default='DEBUG',
        choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
        help='Logggin level'
    )

    args = parser.parse_args()
    _config_logging(args.logging_level, args.logging_file)
    logger.info('Dumping data for: %s' % args.collection)
 
    issns = None
    if len(args.issns) > 0:
        issns = utils.ckeck_given_issns(args.issns)

    if not utils.is_valid_date(args.from_date):
        logger.error('Invalid from date: %s' % args.from_date)
        exit()

    if not utils.is_valid_date(args.until_date):
        logger.error('Invalid until date: %s' % args.until_date)
        exit()

    dumper = Dumper(args.collection, issns, args.from_date, args.until_date,
        args.dayly_granularity, args.output_format, args.output_file)

    dumper.run()
Ejemplo n.º 8
0
    def process(self, req):
        attr_keys = req.keys()
        if is_debug():
            radiuslog.info("::Received an authentication request")
            radiuslog.info("Attributes: ")
            for attr in attr_keys:
                radiuslog.info("%s: %s" % (attr, req[attr]))

        nasaddr = req.get_nasaddr()
        macaddr = req.get_macaddr()
        nas = service.get_nas(nasaddr)

        # check roster
        if service.in_black_roster(macaddr):
            return self.send_reject(req, nas, 'user in black roster')

        vlanid, vlanid2 = req.get_vlanids()
        username1 = req.get_username()
        domain = None
        username2 = username1
        if "@" in username1:
            username2 = username1[:username1.index("@")]
            req["User-Name"] = username2
            domain = username1[username1.index("@") + 1:]

        if not service.user_exists(username2):
            return self.send_reject(req, nas, 'user not exists')

        user = service.get_user(username2)

        if user.status != 1:
            return self.send_reject(req, nas, 'Invalid user status')

        if domain and domain not in user.domain_code:
            return self.send_reject(req, nas, 'user domain does not match')

        if nasaddr not in service.get_nas_ips(user.node_id):
            return self.send_reject(req, nas, 'node does not match')

        if not utils.is_valid_date(user.auth_begin_date, user.auth_end_date):
            return self.send_reject(req, nas,
                                    'user is not effective or expired')

        userpwd = utils.decrypt(user.password)
        if not req.is_valid_pwd(userpwd):
            return self.send_reject(req, nas, 'user password does not match')

        uproduct = service.get_product(user.product_id)
        if not uproduct:
            return self.send_reject(req, nas, 'user product does not match')

        if uproduct.policy == service.POLICY_TIMING and user.time_length <= 0:
            return self.send_reject(req, nas,
                                    'user does not have the time length')

        if not self.verify_macaddr(user, macaddr):
            return self.send_reject(req, nas, 'user macaddr bind not match')

        valid_vlanid = self.verify_vlan(user, vlanid, vlanid2)
        if valid_vlanid == 1:
            return self.send_reject(req, nas, 'user vlanid does not match')
        elif valid_vlanid == 2:
            return self.send_reject(req, nas, 'user vlanid2 does not match')

        if user.concur_number > 0:
            if user.concur_number <= service.get_online_num(user.user_name):
                return self.send_reject(req, nas, 'user concur_number control')
        return self.send_accept(
            req, nas,
            **dict(ipaddr=user.ip_addr,
                   bandcode=uproduct.bandwidth_code,
                   input_max_limit=str(uproduct.input_max_limit),
                   output_max_limit=str(uproduct.output_max_limit),
                   input_rate_code=uproduct.input_rate_code,
                   output_rate_code=uproduct.output_rate_code,
                   domain_code=user.domain_code))
Ejemplo n.º 9
0
 def is_valid_user(args):
     return is_name(args['fullname']) and args['gender'].upper() in (
         'M', 'F') and is_valid_date(args['birthdate'])
Ejemplo n.º 10
0
def get_cmd_params():
	logger.info('cmd: %s', sys.argv)

	# usage提示信息中,各命令行参数的value
	cmd_param = {
		P_PROJECT: '<group>' + SEP_CMD_PROJ + '<project>',
		P_UPDATE_CODES: '',
		P_CREATE_LOG: '',
		P_SINCE: '<yyyy-mm-dd>',
		P_BEFORE: '<yyyy-mm-dd>',
		P_ORIGINAL_AUTHOR: '',
		P_SUBTOTAL: '',
		P_DEBUG: '',
		P_OUTPUT: P_OUTPUT_CONSOLE + '/' + P_OUTPUT_FILE,
		P_STAT_BY_MONTH: '',
		P_STAT_TYPE: P_STAT_TYPE_COMMITS + '/' + P_STAT_TYPE_FINAL_LINES,
		P_CHART: '',
		P_SKIPPED_FILES: '',
		P_NOT_UTF8_FILES: '',
		P_ERROR_FILES: ''
	}

	# 构造usage提示信息
	usage = 'Usage: python ' + sys.argv[0]
	for p in cmd_param:
		if cmd_param[p] != '':
			usage += ' [' + p + SEP_CMD_PV + cmd_param[p] + ']'
		else:
			usage += ' [' + p + ']'

	# 设置各命令行参数的默认值
	cmd_pv = {
		P_PROJECT: '',
		P_UPDATE_CODES: False,
		P_CREATE_LOG: False,
		P_SINCE: '',
		P_BEFORE: '',
		P_ORIGINAL_AUTHOR: False,
		P_SUBTOTAL: False,
		P_DEBUG: False,
		P_OUTPUT: P_OUTPUT_CONSOLE,
		P_STAT_BY_MONTH: False,
		P_STAT_TYPE: '',
		P_CHART: False,
		P_SKIPPED_FILES: False,
		P_NOT_UTF8_FILES: False,
		P_ERROR_FILES: True
	}

	group = ''
	proj = ''
	since = ''
	before = ''
	stat_type = ''
	i = 0
	for a in sys.argv:
		# 跳过第一个参数,即脚本名称自身
		if i == 0:
			i += 1
			continue

		if P_STAT_TYPE in a:
			stat_type = get_pv(a)
			if stat_type == '':
				logger.error('value of %s is null', P_STAT_TYPE)
				logger.error(usage)
				exit()
			elif not (stat_type in [P_STAT_TYPE_COMMITS, P_STAT_TYPE_FINAL_LINES]):
				logger.error('%s format: %s', P_STAT_TYPE, cmd_param[P_STAT_TYPE])
				exit()
			cmd_pv[P_STAT_TYPE] = stat_type
		elif P_PROJECT in a:
			project = get_pv(a)
			if project == '':
				logger.error('value of %s is null', P_PROJECT)
				logger.error(usage)
				exit()
			elif not (SEP_CMD_PROJ in project):
				logger.error('%s format: %s', P_PROJECT, cmd_param[P_PROJECT])
				exit()
			else:
				group = project.split(SEP_CMD_PROJ)[0]
				proj = project.split(SEP_CMD_PROJ)[1]
				if group == '' or proj == '':
					logger.error('%s: group or project is null', a)
					exit()
			cmd_pv[P_PROJECT] = project
		elif P_UPDATE_CODES == a:
			cmd_pv[P_UPDATE_CODES] = True
		elif P_CREATE_LOG == a:
			cmd_pv[P_CREATE_LOG] = True
		elif P_SINCE in a:
			since = get_pv(a)
		elif P_BEFORE in a:
			before = get_pv(a)
		elif P_ORIGINAL_AUTHOR == a:
			cmd_pv[P_ORIGINAL_AUTHOR] = True
		elif P_SUBTOTAL == a:
			cmd_pv[P_SUBTOTAL] = True
		elif P_DEBUG == a:
			cmd_pv[P_DEBUG] = True
		elif P_OUTPUT in a:
			output = get_pv(a)
			if output == '':
				logger.error('value of %s is null', P_OUTPUT)
				logger.error(usage)
				exit()
			elif not (output in [P_OUTPUT_CONSOLE, P_OUTPUT_FILE]):
				logger.error('%s format: %s', P_OUTPUT, cmd_param[P_OUTPUT])
				exit()
			cmd_pv[P_OUTPUT] = output
		elif P_STAT_BY_MONTH == a:
			cmd_pv[P_STAT_BY_MONTH] = True
		elif P_CHART == a:
			cmd_pv[P_CHART] = True
		elif P_SKIPPED_FILES == a:
			cmd_pv[P_SKIPPED_FILES] = True
		elif P_NOT_UTF8_FILES == a:
			cmd_pv[P_NOT_UTF8_FILES] = True
		elif P_ERROR_FILES == a:
			cmd_pv[P_ERROR_FILES] = True
		else:
			logger.error('%s is invalid', a)
			logger.error(usage)
			exit()

	if stat_type == '':
		logger.error('%s is missed', P_STAT_TYPE)
		logger.info(usage)
		exit()

	# 统计commits时,才需要有since、before参数
	if stat_type == P_STAT_TYPE_COMMITS:
		if since == '' and before == '':
			logger.error('%s or %s is missed', P_SINCE, P_BEFORE)
			logger.info(usage)
			exit()

		if not (since == '') and not utils.is_valid_date(since):
			logger.error('value of %s is not a valid date. format: yyyy-mm-dd', P_SINCE)
			logger.info(usage)
			exit()
		if not (before == '') and not utils.is_valid_date(before):
			logger.error('value of %s is not a valid date. format: yyyy-mm-dd', P_BEFORE)
			logger.info(usage)
			exit()

		# 对日期格式进行标准化
		since = utils.normalize_date(since)
		before = utils.normalize_date(before)

		if not (since == '') and not (before == '') and before <= since:
			logger.error('value of %s must > %s', P_BEFORE, P_SINCE)
			logger.info(usage)
			exit()

		cmd_pv[P_SINCE] = since
		cmd_pv[P_BEFORE] = before

	# 如果需要更新代码,则必须重新生成log文件,此时忽略命令行参数
	if cmd_pv[P_UPDATE_CODES]:
		cmd_pv[P_CREATE_LOG] = True

	# 打印命令行参数值
	for pv in cmd_pv:
		logger.info('%s: %s', pv, cmd_pv[pv])
	logger.info('')

	return cmd_pv
Ejemplo n.º 11
0
def main():
    parser = argparse.ArgumentParser(
        description='Dump accesses'
    )

    parser.add_argument(
        'issns',
        nargs='*',
        help='ISSN\'s separated by spaces'
    )

    parser.add_argument(
        '--collection',
        '-c',
        help='Collection Acronym'
    )

    parser.add_argument(
        '--dayly_granularity',
        '-d',
        action='store_true',
        help='Accesses granularity default will be monthly if not specified'
    )

    parser.add_argument(
        '--from_date',
        '-b',
        default=FROM,
        help='Delimite the accesses start period'
    )

    parser.add_argument(
        '--until_date',
        '-u',
        default=UNTIL,
        help='Delimite the accesses end period'
    )

    parser.add_argument(
        '--output_format',
        '-f',
        choices=['json', 'csv'],
        default=OUTPUT_FORMAT,
        help='Output format'
    )

    parser.add_argument(
        '--output_file',
        '-r',
        help='File to receive the dumped data'
    )

    parser.add_argument(
        '--logging_file',
        '-o',
        help='Full path to the log file'
    )

    parser.add_argument(
        '--logging_level',
        '-l',
        default='DEBUG',
        choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
        help='Logggin level'
    )

    args = parser.parse_args()
    _config_logging(args.logging_level, args.logging_file)
    logger.info('Dumping data for: %s' % args.collection)

    issns = None
    if len(args.issns) > 0:
        issns = utils.ckeck_given_issns(args.issns)

    if not utils.is_valid_date(args.from_date):
        logger.error('Invalid from date: %s' % args.from_date)
        exit()

    if not utils.is_valid_date(args.until_date):
        logger.error('Invalid until date: %s' % args.until_date)
        exit()

    dumper = Dumper(args.collection, issns, args.from_date, args.until_date,
        args.dayly_granularity, args.output_format, args.output_file)

    dumper.run()
Ejemplo n.º 12
0
    def process(self,req):
        attr_keys = req.keys()
        if is_debug():
            radiuslog.info("::Received an authentication request")
            radiuslog.info("Attributes: ")        
            for attr in attr_keys:
                radiuslog.info( "%s: %s" % (attr, req[attr]))

        nasaddr = req.get_nasaddr()
        macaddr = req.get_macaddr()
        nas = service.get_nas(nasaddr)

        # check roster 
        if service.in_black_roster(macaddr):
            return self.send_reject(req,nas,'user in black roster')

        vlanid,vlanid2 = req.get_vlanids()
        username1 = req.get_username()
        domain = None
        username2 = username1
        if "@" in username1:
            username2 = username1[:username1.index("@")]
            req["User-Name"] = username2
            domain = username1[username1.index("@")+1:]

        if not service.user_exists(username2):
            return self.send_reject(req,nas,'user not exists')

        user = service.get_user(username2)

        if user.status != 1:
            return self.send_reject(req,nas,'Invalid user status')          

        if domain and domain not in user.domain_code:
            return self.send_reject(req,nas,'user domain does not match')       

        if nasaddr not in service.get_nas_ips(user.node_id):   
            return self.send_reject(req,nas,'node does not match')

        if not utils.is_valid_date(user.auth_begin_date,user.auth_end_date):
            return self.send_reject(req,nas,'user is not effective or expired')

        userpwd = utils.decrypt(user.password)
        if not req.is_valid_pwd(userpwd):
            return self.send_reject(req,nas,'user password does not match')

        uproduct = service.get_product(user.product_id)
        if not uproduct:
            return self.send_reject(req,nas,'user product does not match')

        if uproduct.policy == service.POLICY_TIMING and user.time_length <= 0:
            return self.send_reject(req,nas,'user does not have the time length')

        if not self.verify_macaddr(user,macaddr):
            return self.send_reject(req,nas,'user macaddr bind not match')

        valid_vlanid = self.verify_vlan(user,vlanid,vlanid2)
        if valid_vlanid == 1:
            return self.send_reject(req,nas,'user vlanid does not match')            
        elif valid_vlanid == 2:
            return self.send_reject(req,nas,'user vlanid2 does not match')    

        if user.concur_number > 0:
            if user.concur_number <= service.get_online_num(user.user_name):
                return self.send_reject(req,nas,'user concur_number control')  
        return self.send_accept(req,nas,**dict(ipaddr=user.ip_addr,
                      bandcode=uproduct.bandwidth_code,
                      input_max_limit=str(uproduct.input_max_limit),
                      output_max_limit=str(uproduct.output_max_limit),
                      input_rate_code=uproduct.input_rate_code,
                      output_rate_code=uproduct.output_rate_code,
                      domain_code=user.domain_code))
Ejemplo n.º 13
0
def index():
    todays_date = get_todays_date()
    date_str = request.args.get('date', default=todays_date.isoformat())
    page_num = request.args.get('p', default=0, type=int)

    date_str_split = date_str.split('-')
    if date_str_split[0] == "random":
        random_date = get_random_date()
        if date_str_split[1] == "day":
            return redirect("/?date={}-{}-{}".format(random_date.year, random_date.month, random_date.day))
        elif date_str_split[1] == "month":
            return redirect("/?date={}-{}".format(random_date.year, random_date.month))
        elif date_str_split[1] == "year":
            return redirect("/?date={}".format(random_date.year))

    if date_str_split[0] == "current":
        if date_str_split[1] == "day":
            return redirect("/?date={}-{}-{}".format(todays_date.year, todays_date.month, todays_date.day))
        elif date_str_split[1] == "month":
            return redirect("/?date={}-{}".format(todays_date.year, todays_date.month))
        elif date_str_split[1] == "year":
            return redirect("/?date={}".format(todays_date.year))

    if not is_valid_date(date_str):
        date_str = todays_date.isoformat()
        message = "Date input is invalid, here are stories from today:"
    else:
        message = None

    date_input = DateInput(date_str, todays_date)

    try:
        stories_and_pages = get_stories_and_pages(date_str, page_num)
    except Exception:
        message = "We had trouble fetching stories (our server might be having a rough time). " \
                  "Instead, here are stories from the first day Hacker News was online:"
        date_input = DateInput("2006-10-9", todays_date)
        with open(os.path.dirname(os.path.realpath(__file__)) + '/static/first_day.json') as json_file:
            json_response = json.load(json_file)
            stories = get_stories_and_pages_from_json(json_response)[0]
        return render_template('show_posts.html',
                                date_input=date_input,
                                stories=stories,
                                message=message,
                                page_num=1)

    stories = stories_and_pages[0]
    num_pages = stories_and_pages[1]

    if page_num < num_pages - 1:
        next_page_num = page_num + 1
        next_page_url = "?date={}&p={}".format(date_str, next_page_num)
    else:
        next_page_url = None
    
    return render_template('show_posts.html',
                           date_input=date_input,
                           stories=stories,
                           page_num=page_num,
                           next_page_url=next_page_url,
                           message=message)
Ejemplo n.º 14
0
def index():
    todays_date = get_todays_date()
    date_str = request.args.get('date', default=todays_date.isoformat())
    page_num = request.args.get('p', default=0, type=int)

    date_str_split = date_str.split('-')
    if date_str_split[0] == "random":
        random_date = get_random_date()
        if date_str_split[1] == "day":
            return redirect("/?date={}-{}-{}".format(random_date.year,
                                                     random_date.month,
                                                     random_date.day))
        elif date_str_split[1] == "month":
            return redirect("/?date={}-{}".format(random_date.year,
                                                  random_date.month))
        elif date_str_split[1] == "year":
            return redirect("/?date={}".format(random_date.year))

    if date_str_split[0] == "current":
        if date_str_split[1] == "day":
            return redirect("/?date={}-{}-{}".format(todays_date.year,
                                                     todays_date.month,
                                                     todays_date.day))
        elif date_str_split[1] == "month":
            return redirect("/?date={}-{}".format(todays_date.year,
                                                  todays_date.month))
        elif date_str_split[1] == "year":
            return redirect("/?date={}".format(todays_date.year))

    if not is_valid_date(date_str):
        date_str = todays_date.isoformat()
        message = "Date input is invalid, here are stories from today:"
    else:
        message = None

    date_input = DateInput(date_str, todays_date)

    try:
        stories_and_pages = get_stories_and_pages(date_str, page_num)
    except Exception:
        message = "We had trouble fetching stories (our server might be having a rough time). " \
                  "Instead, here are stories from the first day Hacker News was online:"
        date_input = DateInput("2006-10-9", todays_date)
        with open(
                os.path.dirname(os.path.realpath(__file__)) +
                '/static/first_day.json') as json_file:
            json_response = json.load(json_file)
            stories = get_stories_and_pages_from_json(json_response)[0]
        return render_template('show_posts.html',
                               date_input=date_input,
                               stories=stories,
                               message=message,
                               page_num=1)

    stories = stories_and_pages[0]
    num_pages = stories_and_pages[1]

    if page_num < num_pages - 1:
        next_page_num = page_num + 1
        next_page_url = "?date={}&p={}".format(date_str, next_page_num)
    else:
        next_page_url = None

    return render_template('show_posts.html',
                           date_input=date_input,
                           stories=stories,
                           page_num=page_num,
                           next_page_url=next_page_url,
                           message=message)