def changetime(stime):
            dt=datetime.strptime(stime,'%Y-%m-%dT%H:%M:%S')
            #print dt
            return calendar.timegm(datetime.timetuple(dt))*1000
            
c=RiakClient()
c.ping()


#to load data in the table

totalcount=0
batchcount=0
batchsize=100
ds=[]
t=c.table('aarhus')
print t


with open('./demo-data-extract.csv', 'rU') as infile:
    r=csv.reader(infile)
    for l in r:
		if l[0]!='status':
			newl=[l[0],str(l[3]),datetime.strptime(l[5],'%Y-%m-%dT%H:%M:%S'),int(l[1]),int(l[2]),int(l[4]),int(l[6])]
			totalcount=totalcount+1
			#print count
			ds.append(newl)
			batchcount=batchcount+1
			if batchcount==batchsize:
				#add the records to the table
				print "Count at  ", totalcount
from riak.util import unix_time_millis, datetime_from_unit_time_millis

from datetime import datetime
import calendar
import csv

c = RiakClient()
c.ping()

#to load data in the table

totalcount = 0
batchcount = 0
batchsize = 100  #most efficient batch size per documentation and testing
ds = []
t = c.table('aarhus13-4')
print t

with open(
        './traffic_feb_june/all-data-2.csv', 'rU'
) as infile:  #need to change pathing to match where source file is as necessary
    r = csv.reader(infile)
    for l in r:
        if l[0] != 'status':
            newl = [
                l[0],
                str(l[3]),
                datetime.strptime(l[5], '%Y-%m-%dT%H:%M:%S'),
                int(l[1]),
                int(l[2]),
                int(l[4]),
print("   Use TTB: {}".format(use_ttb))
print("Batch Size: {}".format(batchsz))
print("      CPUs: {}".format(cpu_count()))
print("      Rows: {}".format(len(rows)))
print()

tbl = 'GeoCheckin'
h = 'riak-test'
n = [
    {'host': h, 'pb_port': 10017},
    {'host': h, 'pb_port': 10027},
    {'host': h, 'pb_port': 10037},
    {'host': h, 'pb_port': 10047},
    {'host': h, 'pb_port': 10057}
]
client = RiakClient(nodes=n, protocol='pbc',
                    transport_options={'use_ttb': use_ttb})
table = client.table(tbl)

with benchmark.measure() as b:
    for i in (1, 2, 3):
        with b.report('populate-%d' % i):
            for i in range(0, rowcount, batchsz):
                x = i
                y = i + batchsz
                r = rows[x:y]
                ts_obj = table.new(r)
                result = ts_obj.store()
                if result is not True:
                    raise AssertionError("expected success")
def changetime(stime):
    dt = datetime.strptime(stime, '%Y-%m-%dT%H:%M:%S')
    #print dt
    return calendar.timegm(datetime.timetuple(dt)) * 1000


c = RiakClient()
c.ping()

#to load data in the table

totalcount = 0
batchcount = 0
batchsize = 100
ds = []
t = c.table('aarhus')
print t

with open('./demo-data-extract.csv', 'rU') as infile:
    r = csv.reader(infile)
    for l in r:
        if l[0] != 'status':
            newl = [
                l[0],
                str(l[3]),
                datetime.strptime(l[5], '%Y-%m-%dT%H:%M:%S'),
                int(l[1]),
                int(l[2]),
                int(l[4]),
                int(l[6])
            ]
Beispiel #5
0
def changetime(stime):
            dt=datetime.strptime(stime,'%Y-%m-%dT%H:%M:%S')
            #print dt
            return calendar.timegm(datetime.timetuple(dt))*1000
            
c=RiakClient()
c.ping()


#to load data in the table

totalcount=0
batchcount=0
batchsize=5000
ds=[]
t=c.table('aarhus2')
print t


with open('./traffic_feb_june/all-data-2.csv', 'rU') as infile:
    r=csv.reader(infile)
    for l in r:
		if l[0]!='status':
			newl=[l[0],str(l[3]),datetime.strptime(l[5],'%Y-%m-%dT%H:%M:%S'),int(l[1]),int(l[2]),int(l[4]),int(l[6]),int(l[7]),int(l[8])]
			totalcount=totalcount+1
			#print count
			ds.append(newl)
			batchcount=batchcount+1
			if batchcount==batchsize:
				#add the records to the table
				print "Count at  ", totalcount
Beispiel #6
0
print("   Use TTB: {}".format(use_ttb))
print("Batch Size: {}".format(batchsz))
print("      CPUs: {}".format(cpu_count()))
print("      Rows: {}".format(len(rows)))
print()

tbl = 'GeoCheckin'
h = 'riak-test'
n = [
    {'host': h, 'pb_port': 10017},
    {'host': h, 'pb_port': 10027},
    {'host': h, 'pb_port': 10037},
    {'host': h, 'pb_port': 10047},
    {'host': h, 'pb_port': 10057}
]
client = RiakClient(nodes=n, protocol='pbc',
                    transport_options={'use_ttb': use_ttb})
table = client.table(tbl)

with benchmark.measure() as b:
    for i in (1, 2, 3):
        with b.report('populate-%d' % i):
            for i in range(0, rowcount, batchsz):
                x = i
                y = i + batchsz
                r = rows[x:y]
                ts_obj = table.new(r)
                result = ts_obj.store()
                if result is not True:
                    raise AssertionError("expected success")
    data_set.append(new_row)
    
    # Update date and values to fake progression of time
    epoch_date += 60000
    if row_count % 2 == 0:
        waterPressure = 41.0
        gallonsPerHour = 2.5
    elif row_count % 3 == 0:
        waterPressure = 42.0
        gallonsPerHour = 3.0
    elif row_count % 5 == 0:
        waterPressure = 39.0
        gallonsPerHour = 3.5
    else:
        waterPressure = 40.0
        gallonsPerHour = 2.0
    totalGallons += gallonsPerHour
    
    # Add one hour to the our date
    reading_date += timedelta(hours=1)
    
    print str(row_count) + " " + str(new_row)
    row_count += 1

try:   
    # Create new tsObject and save to the database with .store()
    table_object = client.table(table).new(data_set)
    result = table_object.store()
    print "Records written: " + str(result)    
except Exception as e:
    print "Error: " + str(e)
'''
    ListAllKeys.py
    Demonstrates how to use the ListKeys builder to return all of the primary keys in a table
    HOWEVER listing all of the keys is an expensive operation that shouldn't be run
    against a production cluster for performance reasons.

    For more information see the Python Client API documentation at: 
    http://docs.basho.com/riakts/latest/developing/python/

    Note: This example uses the table created in
    CreateTable.py and the data written in WriteTo.py.
'''
from riak import RiakClient
client = RiakClient()

mytable = "waterMeterData"
table = client.table(mytable)

key_count = 0

stream = client.ts_stream_keys(table)
for list_of_keys in stream:
    for key in list_of_keys:
        key_count += 1
        print key
stream.close()

print "Total Keys: " + str(key_count)
'''
    WriteTo.py
    Demonstrates how to use the Riak TS Client .ts_put method 
    to write a single record.
    For more information see the Python Client API documentation at: 
    http://docs.basho.com/riakts/latest/developing/python/

    Note: This example uses the table created in
    CreateTable.py and will fail if that code hasn't been successfully
    executed against your Riak TS cluster first.
'''
from datetime import datetime
from riak import RiakClient
client = RiakClient()
table = "waterMeterData"

record = [ ['CUSTOMER-0001', 'METER-0001', datetime(2016, 4, 11, 2, 0), 40.0, 2.0, 1000.0] ]

table_object = client.table(table).new(record)

try:
    result = client.ts_put(table_object)
    print "Record written: " + str(result)
except Exception as e:
    print "Error: " + str(e)
Beispiel #10
0
class RiakTSWriter(TimeSeriesDatabase):
  plugin_name = 'graphite_riakts'

  def __init__(self, option):
    print "Here we be initializing yon connections and things: %r" % option
    self.riak = RiakClient(host=option['RIAKTS_HOST'], pb_port=int(option['RIAKTS_PORT']))
    self.nodes_table = option['RIAKTS_NODES_TABLE']
    self.table_name = option['RIAKTS_TABLE_NAME']
    self.family = option['RIAKTS_FAMILY']
    self.nodes = self.riak.bucket(self.nodes_table)

  def write(self, metric, datapoints):
    try:

      #print "Here we be writing to %s the value of %r" % ( metric, datapoints )
      table = self.riak.table(self.table_name)

      rows = []
      for point in datapoints:
        time = datetime.datetime.utcfromtimestamp(point[0])
        rows.append([self.family, metric, time, float(point[1])])

      #print rows
      ts_obj = table.new(rows)
      #print vars(ts_obj)
      res = ts_obj.store()
      #print res
      return
    except Exception as e:
      raise RuntimeError("write error: %s" % (traceback.format_exc(e),))


  def exists(self, metric):
    try:
      key = "node-%s" % metric
      #print "Here we be checking if yon metric %s is existing" % metric
      o = self.nodes.get(key)
      if o.exists:
        #print "Yar, there be existing the metric of %s" % metric
        return True
      else:
        #print "Lo! The metric of %s be not here!" % metric
        o = self.nodes.new(key)
        branch = '.'.join(metric.split('.')[:-1])
        o.data = { "node_s": metric, "type_s": "node"}
#        print "Adding LEAF: %s" % metric
        o.store()
        while len(branch):
          bkey = "branch-%s" % branch
          b = self.nodes.get(bkey)
          if not b.exists:
            b.data = { "branch_s": branch, "type_s": "branch"}
#            print "Adding BRANCH: %s" % branch
            b.store()
          branch = '.'.join(branch.split('.')[:-1])

        return True
    except Exception as e:
      raise RuntimeError("exists error: %s" % (traceback.format_exc(e),))

  def create(self, metric, **options):
    try:
      #print "Here we be attempting yon creation of %s metric, with options: %r" % ( metric, options )
      # o = self.nodes.new(metric)
      # o.data = { "node_t" : metric }
      # o.store()
      return metric
    except Exception as e:
      raise RuntimeError("create error: %s" % (traceback.format_exc(e),))

  def get_metadata(self, metric, key):
    try:
      #print "Here we be getting yon metadata of metric %s and key %s" % ( metric, key )
      return True
    except Exception as e:
      raise RuntimeError("get_metadata error: %s" % (traceback.format_exc(e),))

  def set_metadata(self, metric, key, value):
    try:
      #print "Here we be setting yon metadata of metric %s and key %s with value %r" % ( metric, key, value )
      return True
    except Exception as e:
      raise RuntimeError("set_metadata error: %s" % (traceback.format_exc(e),))
  # def getFilesystemPath(self, metric):
  #   try:
  #     print "Here we be getting yon file system path of here metric %s" % metric
  #     return metric
  #   except (Exception) as e:
  #     raise RuntimeError("getFilesystemPath error: %s" % (traceback.format_exc(e),))
from riak import RiakClient
client = RiakClient()

__doc__ = """\
DescribeTable.py
Demonstrates how to use the DESCRIBE command in Riak TS via the Python Client API
to view a table's schmea or simply verify a table has been created

For more information see the Java Client API documentation at:
http://docs.basho.com/riakts/latest/developing/python/

Note: This example uses the table created in
CreateTable.py and will fail if that code hasn't been successfully
executed against your Riak TS cluster first.
"""

table = "waterMeterData"

try:    
    description = client.table(table).describe()
    for column_desc in description.rows:
        print(column_desc)
except Exception as e:
    print(e)
Beispiel #12
0
def changetime(stime):
    dt = datetime.strptime(stime, '%Y-%m-%dT%H:%M:%S')
    #print dt
    return calendar.timegm(datetime.timetuple(dt)) * 1000


c = RiakClient()
c.ping()

#to load data in the table

totalcount = 0
batchcount = 0
batchsize = 100
ds = []
t = c.table('aarhus2')
print t

with open('./demo-data-extract.csv', 'rU') as infile:
    r = csv.reader(infile)
    for l in r:
        if l[0] != 'status':
            newl = [
                l[0],
                str(l[3]),
                datetime.strptime(l[5], '%Y-%m-%dT%H:%M:%S'),
                int(l[1]),
                int(l[2]),
                int(l[4]),
                int(l[6]),
                int(l[7]),