def main(): logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO) try: logging.info("logging in as %s...", USERNAME) token = linky.login(USERNAME, PASSWORD) logging.info("logged in successfully!") logging.info("retrieving data...") today = datetime.date.today() # Years res_year = linky.get_data_per_year(token) # 12 months ago - today res_month = linky.get_data_per_month(token, dtostr(today - relativedelta(months=11)), \ dtostr(today)) # One month ago - yesterday res_day = linky.get_data_per_day(token, dtostr(today - relativedelta(days=1, months=1)), \ dtostr(today - relativedelta(days=1))) # Yesterday and the day before res_hour = linky.get_data_per_hour(token, dtostr(today - relativedelta(days=2)), \ dtostr(today)) logging.info("got data!") ############################################ # Export of the JSON files, with exception handling as Enedis website is not robust and return empty data often try: export_hours_values(res_hour) except Exception as exc: # logging.info("hours values non exported") logging.error(exc) try: export_days_values(res_day) except Exception: logging.info("days values non exported") sys.exit(70) try: export_months_values(res_month) except Exception: logging.info("months values non exported") try: export_years_values(res_year) except Exception: logging.info("years values non exported") ############################################ except linky.LinkyLoginException as exc: logging.error(exc) sys.exit(1)
def main(): logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO) try: logging.info("logging in as %s...", USERNAME) token = linky.login(USERNAME, PASSWORD) logging.info("logged in successfully!") logging.info("retrieving data...") today = datetime.date.today() # Years res_year = linky.get_data_per_year(token) # 12 months ago - today res_month = linky.get_data_per_month(token, dtostr(today - relativedelta(months=11)), dtostr(today)) # One month ago - yesterday res_day = linky.get_data_per_day(token, dtostr(today - relativedelta(days=1, months=1)), dtostr(today - relativedelta(days=1))) # Yesterday and the day before res_hour = linky.get_data_per_hour(token, dtostr(today - relativedelta(days=2)), \ dtostr(today)) logging.info("got data!") ############################################ # Export of the JSON files, with exception handling as Enedis website is not robust and return empty data often try: export_hours_values(res_hour) except Exception as exc: # logging.info("hours values non exported") logging.error(exc) try: export_days_values(res_day) except Exception: logging.info("days values non exported") sys.exit(70) try: export_months_values(res_month) except Exception: logging.info("months values non exported") try: export_years_values(res_year) except Exception: logging.info("years values non exported") ############################################ except linky.LinkyLoginException as exc: logging.error(exc) sys.exit(1)
def main(): logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO) parser = argparse.ArgumentParser() parser.add_argument( "-o", "--output-dir", type=str, default="out", help="the directory in which the graphs will be placed") args = parser.parse_args() outdir = args.output_dir try: locale.setlocale(locale.LC_ALL, 'fr_FR.utf8') except locale.Error as exc: logging.error(exc) try: logging.info("logging in as %s...", USERNAME) token = linky.login(USERNAME, PASSWORD) logging.info("logged in successfully!") logging.info("retreiving data...") today = datetime.date.today() res_year = linky.get_data_per_year(token) # 6 months ago - today res_month = linky.get_data_per_month( token, dtostr(today - relativedelta(months=6)), dtostr(today)) # One month ago - yesterday res_day = linky.get_data_per_day( token, dtostr(today - relativedelta(days=1, months=1)), dtostr(today - relativedelta(days=1))) # Yesterday - today res_hour = linky.get_data_per_hour( token, dtostr(today - relativedelta(days=1)), dtostr(today)) logging.info("got data!") logging.info("generating graphs...") generate_graph_months(outdir, res_month) generate_graph_years(outdir, res_year) generate_graph_days(outdir, res_day) generate_graph_hours(outdir, res_hour) logging.info("successfully generated graphs!") except linky.LinkyLoginException as exc: logging.error(exc) sys.exit(1) except linky.LinkyServiceException as exc: logging.error(exc) sys.exit(1)
def main(): logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO) try: DATEDEBUT = os.environ['LINKY_DATE_DEBUT'] DATEFIN = os.environ['LINKY_DATE_FIN'] except: DATEDEBUT = "" DATEFIN = "" try: logging.info("logging in as %s...", USERNAME) token = linky.login(USERNAME, PASSWORD) logging.info("logged in successfully!") logging.info("retreiving data...") today = datetime.date.today() if TYPEDATA == "year": res_year = linky.get_data_per_year(token) try: export_years_values(res_year) except Exception: logging.info("years values non exported") elif TYPEDATA == "month": res_month = linky.get_data_per_month( token, dtostr(today - relativedelta(months=11)), dtostr(today)) try: export_months_values(res_month) except Exception: logging.info("months values non exported") elif TYPEDATA == "day": res_day = linky.get_data_per_day( token, dtostr(today - relativedelta(days=1, months=1)), dtostr(today - relativedelta(days=1))) try: export_days_values(res_day) except Exception: logging.info("days values non exported") elif TYPEDATA == "hour": if DATEDEBUT == "": DATEDEBUT = dtostr(today - relativedelta(days=1)) if DATEFIN == "": DATEFIN = dtostr(today) res_hour = linky.get_data_per_hour(token, DATEDEBUT, DATEFIN) try: export_hours_values(res_hour) except Exception as exc: logging.error(exc) logging.info("ok") except linky.LinkyLoginException as exc: logging.error(exc) sys.exit(1)