Esempio n. 1
0
    def testDateDeltaSub(self):
        d = datetime.datetime(year=2011, month=12, day=9)
        dd = datedelta(months=3)
        r = d - dd
        log.debug(r)
        self.assertEqual(r, datetime.datetime.strptime("9/9/2011", "%d/%m/%Y"))

        d = datetime.datetime(year=2011, month=12, day=9)
        dd = datedelta(years=1)
        r = d - dd
        log.debug(r)
        self.assertEqual(r, datetime.datetime.strptime("9/12/2010",
                                                       "%d/%m/%Y"))
Esempio n. 2
0
    def testDateDeltaAddYear(self):
        d = datetime.datetime(year=2011, month=12, day=9)
        dd = datedelta(years=8)
        r = d + dd
        log.debug(r)
        self.assertEqual(r, datetime.datetime.strptime("9/12/2019",
                                                       "%d/%m/%Y"))

        d = datetime.datetime(year=2011, month=12, day=9)
        dd = datedelta(years=-8)
        r = d + dd
        log.debug(r)
        self.assertEqual(r, datetime.datetime.strptime("9/12/2003",
                                                       "%d/%m/%Y"))
Esempio n. 3
0
    def testDateDeltaStr(self):
        dd = datedelta(years=12)
        self.assertTrue("144 months" in str(dd))

        dd = datedelta(months=12)
        self.assertTrue("12 months" in str(dd))

        dd = datedelta(years=2, months=12)
        self.assertTrue("36 months" in str(dd), str(dd))

        dd = datedelta(years=2, months=6, days=3)
        self.assertTrue("30 months, 3 days" in str(dd), str(dd))

        dd = datedelta(years=2, months=1, seconds=15)
        self.assertTrue("25 months, 0:00:15" in str(dd), str(dd))
Esempio n. 4
0
    def testDateDeltaAddMonth(self):
        d = datetime.datetime(year=2011, month=12, day=9)
        dd = datedelta(months=3)
        r = d + dd
        log.debug(r)
        self.assertEqual(r, datetime.datetime.strptime("9/3/2012", "%d/%m/%Y"))

        d = datetime.datetime(year=2011, month=5, day=9)
        dd = datedelta(months=3)
        r = d + dd
        log.debug(r)
        self.assertEqual(r, datetime.datetime.strptime("9/8/2011", "%d/%m/%Y"))

        d = datetime.datetime(year=2011, month=6, day=9)
        dd = datedelta(months=7)
        r = d + dd
        log.debug(r)
        self.assertEqual(r, datetime.datetime.strptime("9/1/2012", "%d/%m/%Y"))

        d = datetime.datetime(year=2011, month=12, day=9)
        dd = datedelta(months=-3)
        r = d + dd
        log.debug(r)
        self.assertEqual(r, datetime.datetime.strptime("9/9/2011", "%d/%m/%Y"))

        d = datetime.datetime(year=2011, month=5, day=9)
        dd = datedelta(months=-6)
        r = d + dd
        log.debug(r)
        self.assertEqual(r, datetime.datetime.strptime("9/11/2010",
                                                       "%d/%m/%Y"))

        d = datetime.datetime(year=2011, month=5, day=9)
        dd = datedelta(months=-5)
        r = d + dd
        log.debug(r)
        self.assertEqual(r, datetime.datetime.strptime("9/12/2010",
                                                       "%d/%m/%Y"))
Esempio n. 5
0
def main(argv=None):  # IGNORE:C0111
    '''Command line options.'''
    if argv is None:
        argv = sys.argv
    else:
        sys.argv.extend(argv)

    program_name = os.path.basename(sys.argv[0])
    program_version_message = '%%(prog)s %s' % (__version__)
    program_shortdesc = "LINKY Power consumption toolbox (from ENEDIS)"
    program_license = '''%s

  Created by lmolinier on %s.

  Distributed on an "AS IS" basis without warranties
  or conditions of any kind, either express or implied.

USAGE
''' % (program_shortdesc, str(__date__))

    try:
        config = MyLinkyConfig()

        # Setup argument parser
        parser = ArgumentParser(description=program_license,
                                formatter_class=RawDescriptionHelpFormatter)
        parser.add_argument('-v',
                            '--verbose',
                            action='count',
                            default=0,
                            help="set verbosity level [default: %(default)s]")
        parser.add_argument('-V',
                            '--version',
                            action='version',
                            version=program_version_message)
        parser.add_argument('-c', "--config", help="configuration file")

        group = parser.add_argument_group("importer",
                                          "Data importer parameters")
        group.add_argument("--importer",
                           choices=["enedis"],
                           help="select importer [default: %(default)s]",
                           default="enedis")

        enedis = group.add_argument_group("enedis",
                                          "ENEDIS Access and Data paremeters")
        enedis.add_argument('-u', '--username', help="Enedis username")
        enedis.add_argument('-p', '--password', help="Enedis password")
        enedis.add_argument("--timesheet",
                            action="append",
                            type=timesheet_converter,
                            help="enter new HP/HC timesheet")

        parser.add_argument("--type",
                            choices=Enedis.RESOURCE.keys(),
                            default="hourly",
                            help="query data source (default: %(default)s)")

        date = parser.add_argument_group("date range", "select the date range")
        date.add_argument("--to",
                          help="to/end query data range (format DD/MM/YYYY)",
                          type=datetime_converter,
                          default=datetime.datetime.now().replace(
                              tzinfo=pytz.timezone("Europe/Paris")))
        group = date.add_mutually_exclusive_group()
        group.add_argument(
            "--from",
            help="from/start query date range (format DD/MM/YYYY)",
            type=datetime_converter)
        group.add_argument(
            "--last",
            help="query for last days/months/year depending 'type'",
            type=datedelta_converter)

        subparsers = parser.add_subparsers(help='exporter help',
                                           dest="exporter")

        subparser = subparsers.add_parser("influxdb",
                                          help="Export to InfluxDB")
        subparser.add_argument("--host", help="Database hostname")
        subparser.add_argument("--db", help="Database name")
        subparser.add_argument("--dbuser", help="Database username")
        subparser.add_argument("--dbpassword", help="Database password")

        subparser = subparsers.add_parser("stdout", help="Export to STDOUT")
        subparser.add_argument("--pretty", help="enable pretty printing")

        subparser = subparsers.add_parser("csv", help="Export to STDOUT")
        subparser.add_argument("--filename", help="csv filename")
        subparser.add_argument(
            "--mode",
            help="open mode (default %(default)s - overwrite)",
            default="w")

        args = parser.parse_args()
        kwargs = vars(args)

        log.info('setting log level to {}'.format(args.verbose))
        levels = [
            logging.CRITICAL, logging.ERROR, logging.WARNING, logging.INFO,
            logging.DEBUG
        ]
        level = levels[max(0, min(args.verbose - 1, len(levels) - 1))]
        log.info('effective verbose value set to {}'.format(level))
        logging.getLogger().setLevel(level)

        log.debug("args: %s" % args)
        log.debug("kwargs: %s" % kwargs)
        if args.config:
            config.load_from_file(args.config)
        config.override_from_args(kwargs)
        log.debug("config: %s" % config.data)

        enedis = Enedis(timesheets=config["enedis"]["timesheets"])
        enedis.login(config["enedis"]["username"],
                     config["enedis"]["password"])

        startDate = kwargs["from"]
        endDate = kwargs["to"]
        if startDate is None:
            delta = args.last
            if not delta:
                delta = {
                    "hourly": datedelta(days=1),
                    "monthly": datedelta(months=1),
                    "yearly": datedelta(years=1)
                }[args.type]
            startDate = endDate - delta

        data = enedis.getdata(args.type, startDate=startDate, endDate=endDate)

        if args.exporter == "influxdb":
            influx = InfluxdbExporter(
                host=config["influxdb"]["host"],
                port=config["influxdb"]["port"],
                database=config["influxdb"]["database"],
                username=config["influxdb"]["username"],
                password=config["influxdb"]["password"],
                prefix=config["influxdb"]["measurement-prefix"])

            influx.save_data(args.type, data)

        elif args.exporter == "stdout":
            pprint.pprint(data)

        elif args.exporter == "csv":
            csv = CsvExporter(fname=args.filename, mode=args.mode)
            csv.save_data(args.type, data)

        return 0

    except Exception as e:
        if DEBUG or TESTRUN:
            raise (e)
        indent = len(program_name) * " "
        sys.stderr.write(program_name + ": " + repr(e) + "\n")
        sys.stderr.write(indent + "  for help use --help")
        return 2

    return 0
Esempio n. 6
0
def run(events, context):
    response = []

    s3 = boto3.resource("s3")
    log.setLevel(logging.getLevelName(events["log"] if "log" in events else "INFO"))

    resource = events["resource"] if "resource" in events else "hourly"
    if PASSWORD_TYPE == "kms":
        username = boto3.client('kms').decrypt(CiphertextBlob=b64decode(ENEDIS_USERNAME))['Plaintext'].decode()
        password = boto3.client('kms').decrypt(CiphertextBlob=b64decode(ENEDIS_PASSWORD))['Plaintext'].decode()
    elif PASSWORD_TYPE == "s3:clear":
        with tempfile.TemporaryFile() as f:
            s3.Bucket(BUCKET).download_fileobj(Key="creds.json", Fileobj=f)
            f.seek(0)
            creds = json.load(f)
            username = creds["username"]
            password = creds["password"]

    # Load the state from s3
    state = {}
    try:
        with tempfile.TemporaryFile() as f:
            s3.Bucket(BUCKET).download_fileobj(Key="state.json", Fileobj=f)
            f.seek(0)
            state = json.load(f)
    except Exception as e:
        log.info("running with empty sate, because of error: %s" % e)
    if resource not in state:
        state[resource] = {}

    if "state" in events:
        state.update(events["state"])
    log.info("State: %s" % state)

    # Load holes from s3
    holes = []
    try:
        with tempfile.TemporaryFile() as f:
            s3.Bucket(BUCKET).download_fileobj(Key="holes.json", Fileobj=f)
            f.seek(0)
            holes = json.load(f)
    except Exception as e:
        log.info("running with empty holes, because of error: %s" % e)

    # Load configuration
    config = MyLinkyConfig()
    try:
        with tempfile.TemporaryFile() as f:
            s3.Bucket(BUCKET).download_fileobj(Key="config.yml", Fileobj=f)
            f.seek(0)
            config.load_from_fileobj(f)
    except Exception as e:
        log.info("running with empty config, because of error: %s" % e)

    config.load_from_dict(events)
    config.override_from_args({"username": username, "password": password})
    log.info("Configuration: %s" % config.data)

    enedis = Enedis(timesheets=config["enedis"]["timesheets"])
    enedis.login(config["enedis"]["username"], config["enedis"]["password"])

    startDate = datetime.datetime.strptime(state[resource]["last"], "%d/%m/%Y").replace(tzinfo=pytz.timezone("Europe/Paris"))+datedelta(days=1) if "last" in state[resource] else None
    endDate = datetime.datetime.now().replace(hour=0,minute=0,second=0,microsecond=0,tzinfo=pytz.timezone("Europe/Paris"))
    if startDate is None:
        # Max retention in ENEDIS is 1 year
        startDate = endDate - datedelta(years=1)
    if startDate == endDate:
        response.append({
            'error': "same date start/end %s" % endDate
        })
        return response
    
    data = enedis.getdata(resource, startDate=startDate, endDate=endDate)

    if len(data)==0:
        log.info("Empty data")
    else:
        if startDate < data[0]["date"]:
            holes.append({"start": startDate, "end": data[0]["date"]})
            s3.Bucket(BUCKET).put_object(Key="holes.json", Body=json.dumps(holes, default=json_converter).encode("utf-8"))

        s3.Bucket(BUCKET).put_object(Key="%s/%s.json" % (resource, endDate.strftime("%Y-%m-%d")), Body=json.dumps(data, default=json_converter).encode("utf-8"))

        # Save the state
        lastDate = data[-1]["date"]
        state[resource]["last"] = lastDate.strftime("%d/%m/%Y")
        s3.Bucket(BUCKET).put_object(Key="state.json", Body=json.dumps(state).encode("utf-8"))

    response.append({
        'startDate': str(startDate),
        'endDate': str(endDate),
        'items': len(data)
    })

    return response
Esempio n. 7
0
class Data:
    URL = 'https://espace-client-particuliers.enedis.fr/group/espace-particuliers/suivi-de-consommation'

    RESOURCE_HOURLY = "urlCdcHeure"
    RESOURCE_MONTHLY = "urlCdcMois"
    RESOURCE_YEARLY = "urlCdcAn"

    STEPS = {
        RESOURCE_HOURLY: datedelta(minutes=30),
        RESOURCE_MONTHLY: datedelta(months=1),
        RESOURCE_YEARLY: datedelta(years=1),
    }

    def __init__(self, cookies, timesheets=None, url=None):
        self.url = url if url is not None else Data.URL
        self.session = requests.Session()
        self.timesheets = timesheets

        if cookies is not None:
            if type(cookies) != list:
                cookies = [cookies]
            for cookie in cookies:
                self.session.cookies.set_cookie(cookie)

        log.debug("Creating data (%s) with session %s" %
                  (self.url, self.session))

    def get_data(self,
                 resource,
                 startDate=datetime.datetime(year=1970, month=1, day=1),
                 endDate=datetime.datetime.today()):
        raw = self._query_data(resource, startDate, endDate)

        data = self._transform_data(resource, raw, (startDate, endDate))
        return data

    def _get_type(self, startdate):
        daytime = startdate.time()
        if self.timesheets is None:
            return "normale"

        for (stime, etime) in self.timesheets:
            if stime < etime:
                # not crossing a day, this is
                # the normal case
                # e.g.: 00:00 --> 06:00
                if daytime >= stime and daytime < etime:
                    return "creuse"
            else:
                # we are crossing a day
                # e.g.: 22:00 --> 06:30
                if daytime >= stime or daytime < etime:
                    return "creuse"

        return "pleine"

    def _transform_data(self, resource, raw, bounds=None):
        start = datetime.datetime.strptime(
            raw["periode"]["dateDebut"],
            "%d/%m/%Y").replace(tzinfo=pytz.timezone("Europe/Paris"))
        end = datetime.datetime.strptime(
            raw["periode"]["dateFin"],
            "%d/%m/%Y").replace(tzinfo=pytz.timezone("Europe/Paris"))
        step = Data.STEPS[resource]

        if resource == Data.RESOURCE_YEARLY:
            start = start.replace(day=1, month=1)
            end = end.replace(day=1, month=1)

        data = []
        offset = raw["decalage"] if raw["decalage"] > 0 else 0
        for item in raw["data"]:
            rank = int(item["ordre"]) - 1

            # apparently, the yearly data is a bit different and start its 'ordre' to 0, instead of 1...
            if resource in [Data.RESOURCE_YEARLY, Data.RESOURCE_MONTHLY]:
                rank += 1

            # correct the start with the 'decalage' field for incomplete graphe
            if rank < offset:
                continue
            rank = rank - offset
            value = float(item["valeur"])

            begin = start + (rank * step)
            end = start + ((rank + 1) * step)
            duration = end - begin

            if bounds and (begin < bounds[0] or begin >= bounds[1]):
                continue

            # Value is given in kW
            #  - if value is '-2', there is no value --> drop
            #  - if value is '-1', TODO
            if item["valeur"] < 0:
                continue
            d = {
                'date': begin,
                'duration': duration.total_seconds(),
                'value': value
            }
            if resource == Data.RESOURCE_HOURLY:
                d.update({'type': self._get_type(start + (rank * step))})
            data.append(d)

        return data

    def _query_data(self, resource, startDate, endDate):

        self.session.headers.update({'User-agent': "mylinky"})

        # note: payload is useless for yearly resource
        payload = {
            '_lincspartdisplaycdc_WAR_lincspartcdcportlet_dateDebut':
            startDate.strftime("%d/%m/%Y"),
            '_lincspartdisplaycdc_WAR_lincspartcdcportlet_dateFin':
            endDate.strftime("%d/%m/%Y")
        }

        params = {
            'p_p_id': 'lincspartdisplaycdc_WAR_lincspartcdcportlet',
            'p_p_lifecycle': 2,
            'p_p_state': 'normal',
            'p_p_mode': 'view',
            'p_p_resource_id': resource,
            'p_p_cacheability': 'cacheLevelPage',
            'p_p_col_id': 'column-1',
            'p_p_col_pos': 1,
            'p_p_col_count': 3
        }

        log.info("Sending data request for resource %s (%s - %s)" %
                 (resource, startDate, endDate))
        resp = self.session.post(self.url,
                                 data=payload,
                                 params=params,
                                 allow_redirects=False)

        if 300 <= resp.status_code < 400:
            # It appears that it is frequent to get first a 302, even if the request is correct
            # #nocomment
            resp = self.session.post(self.url,
                                     data=payload,
                                     params=params,
                                     allow_redirects=False)

        log.debug("resp: %s" % dump.dump_response(resp).decode('utf-8'))
        body = resp.json()

        if body["etat"]["valeur"] == "erreur":
            raise DataException("Error on server when retrieving data: %s" %
                                body["etat"]["erreurText"] if "erreurText" in
                                body["etat"] else "n/a")

        if body["etat"]["valeur"] not in ["termine"]:
            raise DataException("Invalid response state code '%s'" %
                                body["etat"]["valeur"])

        return body["graphe"]
Esempio n. 8
0
 def testDateDeltaMul(self):
     dd = datedelta(years=3)
     dd = dd * 5
     log.debug(dd)
     self.assertEqual(dd.months, 3 * 12 * 5)