Beispiel #1
0
class Exporter:
    def __init__(self):
        self.client = Client(API_KEY, API_KEY, API_SECRET)
        self.series_filename = os.path.join(EXPORT_DIR, "series_info")

        if not os.path.exists(EXPORT_DIR):
            print "Making export directory"
            os.makedirs(EXPORT_DIR)

    def export_metadata(self):
        if os.path.isfile(self.series_filename):
            print "series_info exists, skipping series discovery"
            return

        print "Exporting series metadata"
        all_series = self.client.list_series()

        with open(self.series_filename, 'w') as outfile:
            for series in all_series.data:
                line = self.series_to_string(series) + "\n"
                outfile.write(line.encode("utf-8"))

    def series_to_string(self, series):
        local_id = uuid.uuid4()
        j = {"uuid": str(local_id)}  # Generate series UUID since keys
        for p in series.properties:  # could be inconvenient filenames
            j[p] = getattr(series, p)

        return json.dumps(j, ensure_ascii=False)

    def export_all_series(self):
        if not os.path.isfile(self.series_filename):
            print "ERROR: No series_info file found, can't export series data"
            return

        with open(self.series_filename, 'r') as series_list:
            for text in series_list:
                series = json.loads(text)
                self.export_single_series(series.get('key'),
                                          series.get('uuid'))

    def export_single_series(self, key, uuid):
        filename = os.path.join(EXPORT_DIR, uuid + ".csv")

        if os.path.isfile(filename):
            print "Data file exists for series " + key + ", skipping"
            return

        print "Exporting series " + key + " to " + filename
        response = self.client.read_data(key=key.encode('utf-8'),
                                         start=START_DATE,
                                         end=END_DATE)

        with open(filename, 'w') as outfile:
            for dp in response.data:
                line = dp.t.isoformat() + "," + str(dp.v) + "\n"
                outfile.write(line)
Beispiel #2
0
class Exporter:
    def __init__(self):
        self.client = Client(API_KEY, API_KEY, API_SECRET)
        self.series_filename = os.path.join(EXPORT_DIR, "series_info")
 
        if not os.path.exists(EXPORT_DIR):
            print "Making export directory"
            os.makedirs(EXPORT_DIR)
 
    def export_metadata(self):
        if os.path.isfile(self.series_filename):
            print "series_info exists, skipping series discovery"
            return
 
        print "Exporting series metadata"
        all_series = self.client.list_series()
 
        with open(self.series_filename, 'w') as outfile:
            for series in all_series:
                line = self.series_to_string(series) + "\n"
                outfile.write(line.encode("utf-8"))
 
    def series_to_string(self, series):
        local_id = uuid.uuid4()
        j = {"uuid": str(local_id)}    # Generate series UUID since keys
        for p in series.properties:    # could be inconvenient filenames
            j[p] = getattr(series, p)
 
        return json.dumps(j, ensure_ascii=False)
 
    def export_all_series(self):
        if not os.path.isfile(self.series_filename):
            print "ERROR: No series_info file found, can't export series data"
            return
 
        with open(self.series_filename, 'r') as series_list:
            for text in series_list:
                series = json.loads(text)
                self.export_single_series(series.get('key'),
                                          series.get('uuid'))
 
    def export_single_series(self, key, uuid):
        filename = os.path.join(EXPORT_DIR, uuid + ".csv")
 
        if os.path.isfile(filename):
            print "Data file exists for series " + key + ", skipping"
            return
 
        print "Exporting series " + key + " to " + filename
        response = self.client.read_data(key=key.encode('utf-8'), start=START_DATE, end=END_DATE)
 
        with open(filename, 'w') as outfile:
            for dp in response:
                line = dp.t.isoformat() + "," + str(dp.v) + "\n"
                outfile.write(line)
Beispiel #3
0
class Migrator:
    def __init__(self, scheme, create_devices=True,
                 write_data=True,
                 start_date="2000-01-01T00:00:00Z",
                 end_date="2014-12-31T00:00:00Z",
                 pool_size=3):
        self.scheme = scheme
        self.create_devices = create_devices
        self.should_write_data = write_data
        self.start_date = start_date
        self.end_date = end_date
        self.tdb = TDBClient(scheme.db_key, scheme.db_key,
                             scheme.db_secret,
                             base_url=scheme.db_baseurl)

        iq_endpoint = HTTPEndpoint(scheme.iq_baseurl,
                                   scheme.iq_key,
                                   scheme.iq_secret)
        self.tiq = TIQClient(iq_endpoint)
        self.queue = JoinableQueue()
        self.lock = Lock()
        self.dp_count = 0
        self.req_count = 0
        self.dp_reset = time.time()
        for i in range(pool_size):
            gevent.spawn(self.worker)

    def worker(self):
        while True:
            series = self.queue.get()
            try:
                self.migrate_series(series)
            finally:
                self.queue.task_done()

    def migrate_all_series(self, start_key="", limit=None):
        start_time = time.time()

        (keys, tags, attrs) = self.scheme.identity_series_filter()
        series_set = self.tdb.list_series(keys, tags, attrs)

        # Keep our own state of whether we passed the resume point, so we don't
        # need to assume client and server sort strings the same.
        found_first_series = False

        series_count = 0

        for series in series_set:
            if not found_first_series and series.key < start_key:
                continue
            else:
                found_first_series = True

            if limit and series_count >= limit:
                print("Reached limit of %d devices, stopping." % (limit))
                break

            if self.scheme.identity_series_client_filter(series):
                # If the series looks like an identity series,
                # queue it to be processed by the threadpool
                self.queue.put(series)
                series_count += 1

        self.queue.join()

        end_time = time.time()
        print("Exporting {} devices took {} seconds".format(series_count, end_time - start_time))

    def migrate_series(self, series):
        print("  Beginning to migrate series: %s" % (series.key))
        error = False
        try:
            if self.create_devices:
                error = self.create_device(series)

            if self.should_write_data and not error:
                error = self.write_data(series)
        except Exception, e:
            logging.exception(e)
            error = True

        if not error:
            print("COMPLETED migrating for series %s" % (series.key))
        else:
            print("ERROR migrating series %s" % (series.key))
Beispiel #4
0
class Migrator:
    def __init__(self,
                 scheme,
                 create_devices=True,
                 write_data=True,
                 start_date="2000-01-01T00:00:00Z",
                 end_date="2014-12-31T00:00:00Z",
                 pool_size=3):
        self.scheme = scheme
        self.create_devices = create_devices
        self.should_write_data = write_data
        self.start_date = start_date
        self.end_date = end_date
        self.tdb = TDBClient(scheme.db_key,
                             scheme.db_key,
                             scheme.db_secret,
                             base_url=scheme.db_baseurl)

        iq_endpoint = HTTPEndpoint(scheme.iq_baseurl, scheme.iq_key,
                                   scheme.iq_secret)
        self.tiq = TIQClient(iq_endpoint)
        self.queue = JoinableQueue()
        self.lock = Lock()
        self.dp_count = 0
        self.req_count = 0
        self.dp_reset = time.time()
        for i in range(pool_size):
            gevent.spawn(self.worker)

    def worker(self):
        while True:
            series = self.queue.get()
            try:
                self.migrate_series(series)
            finally:
                self.queue.task_done()

    def migrate_all_series(self, start_key="", limit=None):
        start_time = time.time()

        (keys, tags, attrs) = self.scheme.identity_series_filter()
        series_set = self.tdb.list_series(keys, tags, attrs)

        # Keep our own state of whether we passed the resume point, so we don't
        # need to assume client and server sort strings the same.
        found_first_series = False

        series_count = 0

        for series in series_set:
            if not found_first_series and series.key < start_key:
                continue
            else:
                found_first_series = True

            if limit and series_count >= limit:
                print("Reached limit of %d devices, stopping." % (limit))
                break

            if self.scheme.identity_series_client_filter(series):
                # If the series looks like an identity series,
                # queue it to be processed by the threadpool
                self.queue.put(series)
                series_count += 1

        self.queue.join()

        end_time = time.time()
        print("Exporting {} devices took {} seconds".format(
            series_count, end_time - start_time))

    def migrate_series(self, series):
        print("  Beginning to migrate series: %s" % (series.key))
        error = False
        try:
            if self.create_devices:
                error = self.create_device(series)

            if self.should_write_data and not error:
                error = self.write_data(series)
        except Exception, e:
            logging.exception(e)
            error = True

        if not error:
            print("COMPLETED migrating for series %s" % (series.key))
        else:
            print("ERROR migrating series %s" % (series.key))