Example #1
0
 def __init__(self, api=MongoStampedAPI(), logsQuery=logsQuery()):
     self.stamp_collection = api._stampDB._collection
     self.acct_collection = api._userDB._collection
     self.query = logsQuery
     self.writer = statWriter("dashboard")
     conn = SDBConnection(keys.aws.AWS_ACCESS_KEY_ID, keys.aws.AWS_SECRET_KEY)
     self.domain = conn.get_domain("dashboard")
Example #2
0
class SimpleDBBackend:
    
    def __init__(self):
        self.connection = SDBConnection(aws_credentials.accessKey,
                                        aws_credentials.secretKey)
        self.domain = self.connection.get_domain(TABLE_NAME)      


    def put(self, key, value):
        sampler.begin()
        try:
            self.domain.put_attributes(key, {VALUE:value})        
        finally:
            sampler.end()


    def get(self, key):
        sampler.begin()
        try:
            # First try an eventually consistent read.
            result = self.domain.get_attributes(key, consistent_read=False)
            return result[VALUE]
        except KeyError:
            # The eventually consistent read failed. Try a strongly consistent
            # read.
            result = self.domain.get_attributes(key, consistent_read=True)
            return result[VALUE]
        finally:
            sampler.end()
        

    def incRefCount(self, key):
        # Not implemented.
        pass
        
        
    def decRefCount(self, key):
        # Not implemented.
        pass
    
    
    def nuke(self):
        # Delete and re-create the table.
        self.connection.delete_domain(TABLE_NAME)
        self.domain = self.connection.create_domain(TABLE_NAME)
        
                    
    def flush(self):
        pass # No-op.
def submit():
    if len(sys.argv) < 3:
        print "Usage:"
        print "  %s -seed <seed> [<spawn coords>]" % sys.argv[0]
        return

    # TODO use less crappy command line parsing
    seed = sys.argv[2]

    if len(sys.argv) == 4:
        spawn = [int(x) for x in sys.argv[3].split(",")]
        assert(len(spawn) == 3)
        print "Generate world with this seed (\"%s\") with spawn %r [y/N]?" % (seed, spawn)
    else:
        spawn = None
        print "Generate world with this seed (\"%s\") [y/N]?" % seed
    if raw_input().lower() == 'y':
        uid = uuid.uuid4()

        print "Submitting job %s to queue..." % uid
        sqs = SQSConnection()
        sdb = SDBConnection()
        queue = sqs.get_queue("overviewer-genfromseed")
        db = sdb.get_domain("overviewerdb")
        print queue
        print db

        data = dict()
        data['uuid'] = str(uid)
        data['seed'] = seed
        data['generated'] = False
        if spawn:
            data['target_spawn'] = spawn
        if not db.put_attributes(uid, data):
            print "***Error: Failed to update the db"
            return 1
        
        msg = Message()
        msg.set_body(str(uid))
        if not queue.write(msg):
            print "***Error: Failed to enqueue"
            return 1

        print "Ok, job enqueued"


    else:
        print "Ok, not submitting.  Bye"
        return
Example #4
0
def submit():
    if len(sys.argv) < 3:
        print "Usage:"
        print "  %s -submit <world uuid>" % sys.argv[0]
        return

    sdb = SDBConnection()
    db = sdb.get_domain("overviewerdb")

    # TODO use less crappy command line parsing
    world_uuid = uuid.UUID(sys.argv[2])

    world_item = db.get_item(world_uuid)
    if not world_item:
        print "Can't find that world!"
        return 1

    print "Submit this world for rendering? [y/N]"
    if raw_input().lower() != 'y':
        return "Ok, nevermind."
        return 0

    from boto.sqs.connection import SQSConnection 
    sqs = SQSConnection()
    
    queue = sqs.get_queue("overviewer-render")
    
    render_uuid = uuid.uuid4()
    print "Render UUID:", render_uuid
    data = dict()
    data['uuid'] = str(render_uuid)
    data['rendered'] = False
    data['world_uuid'] = str(world_uuid)

    if not db.put_attributes(str(render_uuid), data):
        print "***Error: Failed to update the db"
        return 1

    msg = Message()
    msg.set_body(str(render_uuid))

    if not queue.write(msg):
        print "***Error: Failed to enqueue"
        return 1
    print "Ok, job enqueued"
    return 0
def add_from_url():
    if len(sys.argv) < 3:
        print "Usage:"
        print "  %s -url <url>" % sys.argv[0]
        return

    url = sys.argv[2]
    print "Generate world with this url (\"%s\") [y/N]?" % url
    if raw_input().lower() != 'y':
        print "Ok, nevermind"
        return
    
    uid = uuid.uuid4()
    data = dict()
    data['uuid'] = uid
    data['world_url'] = url

    sdb = SDBConnection()
    db = sdb.get_domain("overviewerdb")
    if not db.put_attributes(uid, data):
        print "***Error: Failed to update the db"
        return 1
    print "Ok. DB updated"
    print uid
Example #6
0
class statWriter(object):
    
    def __init__(self,domain=None):
        self.conn = SDBConnection(keys.aws.AWS_ACCESS_KEY_ID, keys.aws.AWS_SECRET_KEY)
        self.domain_name = domain
        
    def write(self,stat,key=None,domain=None):
        if domain is None:
            if self.domain_name is None:
                return False
            else:
                domain = self.domain_name
        
        if key is None:
            key = uuid.uuid1()
            
        if len(stat) > 0:
            try:
                domain = self.conn.get_domain(domain)
            except SDBResponseError:
                domain = self.conn.create_domain(domain)
            domain.put_attributes(key, stat, replace=False)
        
        return True
#!/usr/bin/python2

import sys
import os
from pprint import pprint

try:
    import boto
except ImportError:
    sys.path.append(os.path.expanduser("~/devel/boto"))
    import boto


from boto.sdb.connection import SDBConnection 

uid = sys.argv[1]
print "Looking up", uid

sdb = SDBConnection()
db = sdb.get_domain("overviewerdb")
data = db.get_item(uid)
pprint(data)
Example #8
0
class SimpleDB(object):
    
    def __init__(self, domain=None):
        self.conn = SDBConnection(keys.aws.AWS_ACCESS_KEY_ID, keys.aws.AWS_SECRET_KEY)
        self.domain_name = None
        self.domains = {}

        if domain is None and is_ec2():
            stack = get_stack()
            stack_name = str(stack['instance']['stack'])
            self.domain_name = stack_name
        elif domain is not None:
            self.domain_name = domain

    def addStat(self, stat):
        if self.domain_name is None:
            return

        try:
            # Only add specific parameters
            data = {}

            if 'user_id' in stat:
                data['uid'] = stat['user_id']

            if 'path' in stat:
                data['uri'] = stat['path']

            if 'method' in stat:
                data['mtd'] = stat['method']

            if 'form' in stat:
                try:
                    for k, v in stat['form'].items():
                        try:
                            if not isinstance(v, basestring):
                                v = str(v)
                            if len(v.encode('utf-8')) > 1024:
                                v = '<INPUT TOO LONG>'
                            data['frm_%s' % k] = v
                        except Exception as e:
                            print e
                except Exception as e:
                    print e

            if 'result' in stat:
                data['cde'] = str(stat['result'])

            if 'begin' in stat:
                data['bgn'] = stat['begin'].isoformat()

            if 'finish' in stat:
                data['end'] = stat['finish'].isoformat()

            if 'node' in stat:
                data['nde'] = stat['node']
                
            if 'client_id' in stat:
                data['cid'] = stat['client_id']

            if 'duration' in stat:
                data['dur'] = "{0:10d}".format(stat['duration'])

            if len(data) > 0:
                statId = str(ObjectId())
                if data['uri'] != '/v1/ping.json' and data['uri'] != '/v1/temp/ping.json':
                    suffix = '0%s' % (sha1(statId).hexdigest()[0])
                    if suffix in self.domains:
                        domain = self.domains[suffix]
                    else:
                        try:
                            domain = self.conn.get_domain('%s_%s' % (self.domain_name, suffix))
                        except SDBResponseError:
                            domain = self.conn.create_domain('%s_%s' % (self.domain_name, suffix))
                        self.domains[suffix] = domain
                    domain.put_attributes(statId, data, replace=False)


        except Exception as e:
            print e
            raise
def add_from_path():
    if len(sys.argv) < 3:
        print "Usage:"
        print "  %s -path <path>" % sys.argv[0]
        return
    
    path = sys.argv[2]

    if os.path.isdir(path):
        print "You've specified a directory.  I'll tar it up before uploading"
        print "OK? [y/N] ", 
        if raw_input().lower() != 'y':
            print "Ok, nevermind."
            return

        tmpdir = tempfile.mkdtemp(prefix="mc_gen")
        print "tmpdir is", tmpdir
        print "Making tarball..."
        p = subprocess.Popen(["tar", "-cf", os.path.join(tmpdir, "world.tar"), "."],
                cwd=path)
        p.wait()
        if p.returncode != 0:
            print "***Error: tar failed"
            return

        print "OK."
        print "Compressing..."
        p = subprocess.Popen(["bzip2", "world.tar"],
                shell=False,
                cwd=tmpdir)
        p.wait()
        if p.returncode != 0:
            print "***Error: compress failed"
            return
        print "OK."

        print "Checking filesize..."
        s = os.stat(os.path.join(tmpdir, "world.tar.bz2"))
        if s.st_size > 10*1024*1024:
            print "***Error: Compressed world is too big"
            return 1
        print "OK."


        uid = uuid.uuid4()
        print uid
        s3 = S3Connection()
        bucket = s3.get_bucket("overviewer-worlds")
        k = Key(bucket)
        k.key = "%s.tar.bz2" % uid
        print "Uploading to S3..."
        k.set_contents_from_filename(os.path.join(tmpdir, "world.tar.bz2"), reduced_redundancy=True)
        print "OK."
        k.make_public()

        urlbase = "https://s3.amazonaws.com/overviewer-worlds/"
        url = urlbase + k.key
        print "World is now available at:", url
   
        data = dict()
        data['uuid'] = uid
        data['world_url'] = url
        sdb = SDBConnection()
        db = sdb.get_domain("overviewerdb")
        if not db.put_attributes(uid, data):
            print "***Error: Failed to update the db"
            return 1
        print "Ok. DB updated"
    elif os.path.isfile(path):
        print "You've specified a file. I'll upload it without modification"
        print "OK? [y/N] ", 
        if raw_input().lower() != 'y':
            print "Ok, nevermind."
            return
        
        uid = uuid.uuid4()
        print uid
        s3 = S3Connection()
        bucket = s3.get_bucket("overviewer-worlds")
        k = Key(bucket)
        k.key = "%s.tar.bz2" % uid
        print "Uploading to S3..."
        k.set_contents_from_filename(path, reduced_redundancy=True)
        print "OK."
        k.make_public()

        urlbase = "https://s3.amazonaws.com/overviewer-worlds/"
        url = urlbase + k.key
        print "World is now available at:", url
   
        data = dict()
        data['uuid'] = uid
        data['world_url'] = url
        sdb = SDBConnection()
        db = sdb.get_domain("overviewerdb")
        if not db.put_attributes(uid, data):
            print "***Error: Failed to update the db"
            return 1
        print "Ok. DB updated"
    else:
        print "Sorry, I can't find that."
        return 1
def generate():
    pass
    from boto.sqs.connection import SQSConnection 
    sqs = SQSConnection()

    queue = sqs.get_queue("overviewer-genfromseed")
    sdb = SDBConnection()
    db = sdb.get_domain("overviewerdb")

    message = queue.read(visibility_timeout=15)
    if not message:
        print "Nothing in the queue.  Please try again later"
        return 0
    uid = message.get_body()
    print "Got a job for %r" % uid
    data = db.get_item(uid)

    if 'target_spawn' in data:
        data['target_spawn'] = map(int, data['target_spawn'])
    print data

    # this script generate maps from seeds
    # if this map is already generated, then upate the db
    if data['generated'] == 'True':
        print "---Warning: I was asked to generate this map, but it's already generated"
        print "            I'm going to update the db, but not re-generate"
        data['generated'] = True
        data.save()
        queue.delete_message(message)
        return 


    # check config options
    if not os.path.isfile(config.minecraft_server):
        raise Exception("minecraft_server isn't configured")

    # with a generous amount of rounding up, assume it'll take 5 minutes to generate the map
    message.change_visibility(5*60)

    tmpdir = tempfile.mkdtemp(prefix="mc_gen")

    # create a settings.properties file with our seed
    with open(os.path.join(tmpdir, "server.properties"), "w") as f:
        f.write("level-seed=%s" % data['seed'])

    p = subprocess.Popen(["java", "-jar",
        config.minecraft_server, "-noGUI"],
        shell=False,
        stdin=subprocess.PIPE,
        cwd=tmpdir)

    p.stdin.write("stop\n")
    p.stdin.close()
    p.wait()
    print ""
    print "Minecraft server exited with %r" % p.returncode
    print "World resided in %r" % tmpdir

    # if we want a specific spawn, we need to rewrite the level.dat file,
    # remove the old region files, and restart the server
    if 'target_spawn' in data:
        s = data['target_spawn']
        leveldat = redstone.NBT.parse_from_file(os.path.join(tmpdir, "world", "level.dat"))
        root = leveldat.root
        root['Data']['SpawnX'].set_integer(int(s[0]))
        root['Data']['SpawnY'].set_integer(int(s[1]))
        root['Data']['SpawnZ'].set_integer(int(s[2]))
        leveldat.write_to_file(os.path.join(tmpdir, "world", "level.dat"))

        shutil.rmtree(os.path.join(tmpdir,"world","region"))
        p = subprocess.Popen(["java", "-jar",
            config.minecraft_server, "-noGUI"],
            shell=False,
            stdin=subprocess.PIPE,
            cwd=tmpdir)

        p.stdin.write("stop\n")
        p.stdin.close()
        p.wait()
        print "Minecraft server exited with %r" % p.returncode

    message.change_visibility(5*60)
    print "Making tarball..."
    p = subprocess.Popen(["tar", "-cf", "world.tar", "world/"],
        cwd=tmpdir)
    p.wait()
    if p.returncode != 0:
        print "***Error: tar failed"
        return
    print "OK."
    print "Compressing..."
    p = subprocess.Popen(["bzip2", "world.tar"],
            shell=False,
            cwd=tmpdir)
    p.wait()
    if p.returncode != 0:
        print "***Error: compress failed"
        return
    print "OK."

    s3 = S3Connection()
    bucket = s3.get_bucket("overviewer-worlds")
    k = Key(bucket)
    k.key = "%s.tar.bz2" % uid
    print "Uploading to S3..."
    k.set_contents_from_filename(os.path.join(tmpdir, "world.tar.bz2"), reduced_redundancy=True)
    print "OK."
    k.make_public()

    urlbase = "https://s3.amazonaws.com/overviewer-worlds/"
    url = urlbase + k.key
    print "World is now available at:", url

    data['generated'] = True
    data['world_url'] = url
    data.save()
    print "Database updated."

    queue.delete_message(message)

    print "All done!"
Example #11
0
class weeklyScore(object):
    
    def __init__(self,api):
        self.api = api
        self.conn = SDBConnection(keys.aws.AWS_ACCESS_KEY_ID, keys.aws.AWS_SECRET_KEY)
        self.domains = {}
        self.statDict = {}

        for i in range (0,16):
            suffix = '0'+hex(i)[2]
            self.domains[suffix] = self.conn.get_domain('stats_dev_%s' % (suffix))
    
    
    
    def scoreQuery(self,domain,t0,t1,uri,points):
        
        query = 'select uri,uid from `%s` where uri = "%s" and bgn >= "%s" and bgn <= "%s"' % (domain.name,uri,t0.isoformat(),t1.isoformat())
        stats = domain.select(query)
        
        for stat in stats:
            
            if 'uid' not in stat or 'uri' not in stat or 'cde' in stat:
                continue
            key = stat['uid']
            try:
                self.statDict[key] += points
            except KeyError:
                self.statDict[key] = points

    
    def segmentationReport(self,t0,t1,bMonth,percentage=False):
        self.statDict = {}
        
        scores = {
            #2 points for likes
            '/v1/stamps/likes/create.json': 2,
            '/v1/stamps/likes/remove.json': -2,
            
            #2 points for todos
            '/v1/todos/create.json': 2,
            '/v1/todos/remove.json': -2,
            
            #3 points for comments
            '/v1/comments/create.json': 3,
            '/v1/comments/remove.json': -3,
            
            #3 points for adding a friend
            '/v1/friendships/create.json': 3,
            '/v1/friendships/remove.json': -3,
            
            #3 points for taking an action
            '/v1/actions/complete.json': 2,
            
            #5 points for a stamp
            '/v1/stamps/create.json': 5,
            '/v1/stamps/remove.json': -5,
            
            #5 points for an invite
            '/v1/frienships/invite.json': 5,
            
            }
        
        pool = Pool(30)
        
        for i in range (0,16):
            suffix = '0'+hex(i)[2]
            for uri in scores:
                pool.spawn(self.scoreQuery,self.domains[suffix],t0,t1,uri,scores[uri])
            
        pool.join()
        
        power = 0
        active = 0
        irregular = 0
        dormant = 0
        
        agg_score = 0
        
        if bMonth:
            POWER = 100
            AVERAGE = 10
        else: #Looking at a week
            POWER = 25
            AVERAGE = 5
            
        for uid in self.statDict:
            agg_score += self.statDict[uid]
            
            if self.statDict[uid] >= POWER:
                power += 1
            elif self.statDict[uid] >= AVERAGE:
                active += 1
            elif self.statDict[uid] > 0:
                irregular += 1
            else:
                dormant +=1

           
        users = len(self.statDict)
        
        if users == 0:
            return 0,0,0,0,0,0
        
        if percentage:
            power = float(power)/users*100
            active = float(active) / users*100
            irregular = float(irregular)/users*100
            dormant = float(dormant)/users*100

        mean_score = float(agg_score) / users
               
        
        return users,power,active,irregular,dormant,mean_score
        
        
        
        
Example #12
0
class SimpleDBConnection(object):
    def __init__(self, stack):
        try:
            self.conn = SDBConnection(keys.aws.AWS_ACCESS_KEY_ID, keys.aws.AWS_SECRET_KEY)
        except SDBResponseError:
            print "SimpleDB Connection Refused"
            raise

        self._stack = stack

    @lazyProperty
    def domains(self):
        result = []
        for i in range(16):
            suffix = "0" + hex(i)[2]
            name = "%s_%s" % (self._stack, suffix)
            result.append(self.conn.get_domain(name))
        return result

    def _queryParams(self, params, bgn, end):
        queryParams = []
        if params is not None:
            for k, v in params.items():
                queryParams.append("%s='%s'" % (k, v))

        if bgn is not None:
            queryParams.append("end >= '%s'" % bgn.isoformat())

        if end is not None:
            queryParams.append("end < '%s'" % end.isoformat())

        return queryParams

    def _deserialize(self, item):
        def isoparse(s):
            # Credit: http://blog.client9.com/2010/02/fast-iso-datetime-format-parsing-in.html
            try:
                return datetime(int(s[0:4]), int(s[5:7]), int(s[8:10]), int(s[11:13]), int(s[14:16]), int(s[17:19]))
            except:
                return None

        # Convert datetimes
        if "bgn" in item:
            item["bgn"] = isoparse(item["bgn"])

        if "end" in item:
            item["end"] = isoparse(item["end"])

        # Convert numbers
        if "dur" in item:
            item["dur"] = int(item["dur"])

        return item

    def query(self, params=None, fields=None, bgn=None, end=None):

        queryFields = "*"
        if fields is not None:
            queryFields = ",".join(fields)

        queryParams = self._queryParams(params, bgn, end)

        pool = Pool(len(self.domains))

        data = []

        def run(domain, query):
            data.extend(domain.select(query))

        for domain in self.domains:

            # Build query
            query = "select %s from `%s`" % (queryFields, domain.name)
            if len(queryParams) > 0:
                query += " where %s" % (" and ".join(queryParams))

            # Run it
            pool.spawn(run, domain, query)

        pool.join()

        results = []
        for item in data:
            results.append(self._deserialize(item))

        return results

    def count(self, params=None, bgn=None, end=None):

        queryParams = self._queryParams(params, bgn, end)

        pool = Pool(len(self.domains))

        data = []

        def run(domain, query):
            data.extend(domain.select(query))

        for domain in self.domains:

            # Build query
            query = "select count(*) from `%s`" % domain.name
            if len(queryParams) > 0:
                query += " where %s" % (" and ".join(queryParams))

            # Run it
            pool.spawn(run, domain, query)

        pool.join()

        count = 0
        for item in data:
            count += int(item.pop("Count", 0))
        return count
Example #13
0
def render():
    
    # check config options
    if not os.path.isdir(config.overviewer_root):
        raise Exception("overviewer_root isn't configured")
    if not os.path.isfile(config.upload_ssh_key):
        raise Exception("upload_ssh_key isn't configured")

    sqs = SQSConnection()
    queue = sqs.get_queue("overviewer-render")
    sdb = SDBConnection()
    db = sdb.get_domain("overviewerdb")
    
    message = queue.read(visibility_timeout=15)
    if not message:
        print "Nothing in the queue.  Please try again later"
        return 0

    render_uuid = message.get_body()
    print "render uuid:", render_uuid
    
    render_item = db.get_item(str(render_uuid))
    if not render_item:
        print "***Error can't find a world with that UUID"
        return 1
    if render_item.get("rendered") != "False":
        print "***Error: this render has already been started"
        print "          state", render_item.get("rendered")
        return 1


    world_uuid = render_item.get("world_uuid")
    world_item = db.get_item(str(world_uuid))
    print "world uuid:", world_uuid


    url = world_item.get("world_url", None)
    if not url:
        print "***Error: can't find worldurl"
        return 1

    message.change_visibility(3*60)
    render_item['rendered'] = "inprogress"
    render_item.save()

    print "Getting map..." 
    map_url = urllib2.urlopen(url)
    print "OK."

    tmpdir = tempfile.mkdtemp(prefix="mc_gen")
    fobj = open(os.path.join(tmpdir, "world.tar.bz2"), "w")
    print "Downloading map to %s..." % tmpdir
    shutil.copyfileobj(map_url, fobj)
    fobj.close()
    print "OK."

    print "Uncompressing..."
    os.mkdir(os.path.join(tmpdir, "world"))
    p = subprocess.Popen(["tar", "-jxf", os.path.join(tmpdir,"world.tar.bz2")],
            cwd=os.path.join(tmpdir,"world"))
    p.wait()
    if p.returncode != 0:
        print "***Error: decompressing"
        return 1

    # find the exact directory containing level.dat
    def findLevel(start):
        for root, dirs, files in os.walk(start):
            if "level.dat" in files: return root
            for d in dirs:
                findLevel(os.path.join(root, d))
        raise Exception("Failed to find level.dat")

    real_world_dir = findLevel(os.path.join(tmpdir, "world"))

    # TODO message.change_visibility(10*60)
    p = subprocess.Popen(["python2", 
        os.path.join(config.overviewer_root, "overviewer.py"),
        real_world_dir,
        os.path.join(tmpdir, "output_dir"),
        "--rendermode=smooth-lighting"])
    p.wait()
    if p.returncode != 0:
        print "***Error: rendering"
        return 1

    print "Making tarball..."
    p = subprocess.Popen(["tar", "-cf", 
        os.path.join(tmpdir, "render.tar"),
        "."],
        cwd=os.path.join(tmpdir, "output_dir"))

    p.wait()
    if p.returncode != 0:
        print "***Error: tar failed"
        return
    print "OK."
    print "Compressing..."
    p = subprocess.Popen(["bzip2", "render.tar"],
            shell=False,
            cwd=tmpdir)
    p.wait()
    if p.returncode != 0:
        print "***Error: compress failed"
        return
    print "OK."


    message.change_visibility(5*60)
    print "Uploading to overviewer.org..."
    p = subprocess.Popen(["ssh",
        "-l", "upload",
        "-i", config.upload_ssh_key,
        "new.overviewer.org",
        render_uuid],
        stdin=subprocess.PIPE)
    fobj = open(os.path.join(tmpdir, "render.tar.bz2"))
    shutil.copyfileobj(fobj, p.stdin)
    p.stdin.close()
    p.wait()
    if p.returncode != 0:
        print "***Error: uploading"
        return 1
    print "OK"

    render_item['rendered'] = "True"
    render_item.save()
    print "Database updated"
    queue.delete_message(message)
Example #14
0
class SimpleDBConnection(object):
    
    def __init__(self):
        try:
            self.conn = SDBConnection(keys.aws.AWS_ACCESS_KEY_ID, keys.aws.AWS_SECRET_KEY)
        except SDBResponseError:
            print "SimpleDB Connection Refused"
        
        self.statList = []
        self.statCount = 0
        
        self.v1_prod_domains = {}
        self.v2_prod_domains = {}
        self.v2_dev_domains = {}
        self.v2_stress_domains = {}
        
        for i in range (0,16):
            suffix = '0'+hex(i)[2]
            
            self.v1_prod_domains[suffix] = self.conn.get_domain('stats_prod_%s' % suffix)
            self.v2_prod_domains[suffix] = self.conn.get_domain('bowser_%s' % suffix)
            self.v2_dev_domains[suffix] = self.conn.get_domain('stats_dev_%s' % suffix)
            self.v2_stress_domains[suffix] = self.conn.get_domain('stress_%s' % suffix)
    
    # Perform a query and write to an optional list, set or dict (key is first field specified)
    def execute(self, domain, query_dict, fields, bgn=None, end=None, destination=None, limit=None, duration=None):

        query = 'select %s from `%s`' % (fields,domain.name)
        transition = 'where'

        for key,value in query_dict.items():
            if "!=" in value:
                query = '%s %s %s != "%s"' %  (query, transition, key, value[2:])
            else:
                query = '%s %s %s="%s"' % (query, transition, key, value)
            transition = 'and'
        
        if bgn is not None: 
            query = '%s %s bgn > "%s"' % (query, transition, bgn.isoformat())
            transition = 'and'
        
        if end is not None:
            query = '%s %s bgn > "%s"' % (query, transition, bgn.isoformat())
            transition = 'and'
            
        init_results = domain.select(query)
        
        results = reveal(init_results,limit=limit,duration=duration)
        
        if destination != None:
            if fields == 'count(*)':
                destination += results[0]
            else:
                destination.extend(results)
        
        return results
    
    def query (self, stack, query_dict, fields='*', bgn=None, end=None, limit=None, duration=None):
        
        if fields == 'count(*)':
            self.statCount = 0
            destination = self.statCount
        else:
            self.statList = []
            destination = self.statList
            
        if stack == 'bowser':
            domains = self.v2_prod_domains
        else:
            domains = self.v2_dev_domains
        
        pool = Pool(16)
        
        for i in range (0,16):
            suffix = '0'+hex(i)[2]
            
            pool.spawn(self.execute, domains[suffix], query_dict, fields=fields, bgn=bgn,end=end, destination=destination, limit=limit/16 + 1, duration=duration)

        pool.join()
        
        if fields == 'count(*)':
            return self.statCount
        else:
            return self.statList[:limit]
        
        
            
            
            
            
            
            
            
            
            
Example #15
0
class logsQuery(object):
    
    def __init__(self,domain_name=None):
        self.conn = SDBConnection(keys.aws.AWS_ACCESS_KEY_ID, keys.aws.AWS_SECRET_KEY)
        self.domains = {}
        self.statSet  = set()
        self.statDict = {}
        self.errDict = {}
        self.statCount = 0
        self.statCountByNode = {}
        self.statTimeByNode = {}

        if domain_name is None:
            domain_name = 'stress'
            
        for i in range (0,16):
            suffix = '0'+hex(i)[2]
            self.domains[suffix] = self.conn.get_domain('%s_%s' % (domain_name,suffix))
            
            

    def performQuery(self,domain,fields,uri,t0,t1,byNode=False):
        
        if uri is not None:
            query = 'select %s from `%s` where uri = "%s" and bgn >= "%s" and bgn <= "%s"' % (fields, domain.name, uri, t0.isoformat(), t1.isoformat())
        else:
            query = 'select %s from `%s` where bgn >= "%s" and bgn <= "%s"' % (fields, domain.name, t0.isoformat(), t1.isoformat())

        stats = domain.select(query)
        
        for stat in stats:
            
            try:
                if fields == 'count(*)':
                    self.statCount += int(stat['Count'])
                elif byNode:
                    bgn = stat['bgn'].split('T')
                    end = stat['end'].split('T')
                    if end[0] == bgn[0]:
                        bgn = bgn[1].split(':')
                        end = end[1].split(':')
                        hours = float(end[0]) - float(bgn[0])
                        minutes = float(end[1]) - float(bgn[1])
                        seconds = float(end[2]) - float(bgn[2])
                        diff = seconds + 60*(minutes + 60*hours)
                    
                    try:
                        self.statCountByNode[stat['nde']] += 1
                        self.statTimeByNode[stat['nde']] += diff
                    except KeyError:
                        self.statCountByNode[stat['nde']] = 1
                        self.statTimeByNode[stat['nde']] = diff
                else:
                    self.statSet.add(stat[fields])
            except KeyError:
                pass
        
        
    def activeUsers(self,t0, t1):
        self.statSet = set()
        
        pool = Pool(16)
        
        for i in range (0,16):
            suffix = '0'+hex(i)[2]
            #Just use collections inbox for speed
            pool.spawn(self.performQuery,self.domains[suffix],'uid',"/v1/activity/unread.json",t0,t1)
        
        pool.join()
        
        return len(self.statSet)
    
    def latencyQuery(self,domain,t0,t1,uri,blacklist,whitelist):
        if uri is None:
            query = 'select uri,frm_scope,bgn,end,cde,uid from `%s` where uri like "/v1/%%" and bgn >= "%s" and bgn <= "%s"' % (domain.name,t0.isoformat(),t1.isoformat())
        else:
            query = 'select uri,frm_scope,bgn,end,cde,uid from `%s` where uri = "%s" and bgn >= "%s" and bgn <= "%s"' % (domain.name,uri,t0.isoformat(),t1.isoformat())
        stats = domain.select(query)
        
        for stat in stats:
            if 'uid' in stat and stat['uid'] in blacklist:
                continue
            elif len(blacklist) == 0 and len(whitelist) > 0 and 'uid' in stat and stat['uid'] not in whitelist:
                continue
            bgn = stat['bgn'].split('T')
            end = stat['end'].split('T')
            if end[0] == bgn[0]:
                bgn = bgn[1].split(':')
                end = end[1].split(':')
                hours = float(end[0]) - float(bgn[0])
                minutes = float(end[1]) - float(bgn[1])
                seconds = float(end[2]) - float(bgn[2])
                diff = seconds + 60*(minutes + 60*hours)
                key = stat['uri']

                if 'frm_scope' in stat:
                    key = "%s?scope=%s" % (stat['uri'], stat['frm_scope'])
                
                if 'cde' in stat:
                    errType = stat['cde'][0]
                    try:
                        self.errDict['%s-%s' % (key,errType)] +=1
                    except KeyError:
                        self.errDict['%s-%s' % (key,errType)] = 1
                else:
                    try:
                        self.statDict[key].append(diff)
                    except KeyError:
                        self.statDict[key] = [diff]
            
    def latencyReport(self,t0,t1,uri=None,blacklist=[],whitelist=[]):
        self.statDict = {}
        self.errDict = {}
        
        pool = Pool(16)
        
        for i in range (0,16):
            suffix = '0'+hex(i)[2]
            pool.spawn(self.latencyQuery,self.domains[suffix],t0,t1,uri,blacklist,whitelist)
            
        pool.join()
        
        for uri in self.statDict:
            sum = 0
            max = 0
            for num in self.statDict[uri]:
                sum += num
                if num > max:
                    max = num
            mean = float(sum) / len(self.statDict[uri])
            sorte = sorted(self.statDict[uri])
            median = percentile(sorte,.5)
            ninetieth = percentile(sorte,.9)
            n = len(self.statDict[uri])
            errors4 = 0
            errors5 = 0
            if uri+'-4' in self.errDict:
                errors4 = self.errDict[uri+'-4']
            if uri+'-5' in self.errDict:
                errors5 = self.errDict[uri+'-5']
            
            self.statDict[uri] = '%.3f' % mean,'%.3f' % median,'%.3f' % ninetieth, '%.3f' % max, n, errors4,errors5
            
        return self.statDict
    
    
    def dailyLatencyReport(self,t0,t1,uri,blacklist,whitelist):
        self.statDict = {}
        diff = (t1 - t0).days + 1
        output = []
        for i in range (0,diff):
            t2 = today(t0+datetime.timedelta(days=i+1))
            t3 = today(t0+datetime.timedelta(days=i+2))
            self.statDict = {}
            self.errDict = {}
            
            pool = Pool(16)
            
            for k in range (0,16):
                suffix = '0'+hex(k)[2]
                pool.spawn(self.latencyQuery,self.domains[suffix],t2,t3,uri,blacklist,whitelist)
                
            pool.join()
            
            for uri in self.statDict:
                sum = 0
                max = 0
                for num in self.statDict[uri]:
                    sum += num
                    if num > max:
                        max = num
                mean = float(sum) / len(self.statDict[uri])
                sorte = sorted(self.statDict[uri])
                median = percentile(sorte,.5)
                ninetieth = percentile(sorte,.9)
                n = len(self.statDict[uri])
                errors4 = 0
                errors5 = 0
                if uri+'-4' in self.errDict:
                    errors4 = self.errDict[uri+'-4']
                if uri+'-5' in self.errDict:
                    errors5 = self.errDict[uri+'-5']
                
                output.append((t2.date().isoformat(),'%.3f' % mean,'%.3f' % median,'%.3f' % ninetieth, '%.3f' % max, n, errors4,errors5))
                
        return output
    
    def qpsReport(self,time,interval,total_seconds):
        blacklist=[]
        whitelist=[]
        

        count_report = {}
        mean_report = {}
        t0 = time - datetime.timedelta(0,total_seconds)
        for i in range (0,total_seconds/interval):
            self.statCountByNode = {}
            self.statTimeByNode = {}
            
            t1 = t0 + datetime.timedelta(0,i*interval)
            t2 = t0 + datetime.timedelta(0,(i+1)*interval)
            
            pool = Pool(32)
        
            for j in range (0,16):
                suffix = '0'+hex(j)[2]
                
                pool.spawn(self.performQuery,self.domains[suffix],'nde,bgn,end',None,t1,t2,byNode=True)
    
            pool.join()
            

            for node in self.statCountByNode:
                count = self.statCountByNode[node]
                mean = float(self.statTimeByNode[node])/count
                try:
                    while len(count_report[node]) < i:
                        count_report[node].insert(0,0)
                        mean_report[node].insert(0,0)
                    count_report[node].insert(0,"%.3f" % (float(count)/interval))
                    mean_report[node].insert(0,"%.3f" % (mean))
                except KeyError:
                    count_report[node] = [0]*i
                    mean_report[node] = [0]*i
                    count_report[node].insert(0,"%.3f" % (float(count)/interval))
                    mean_report[node].insert(0,"%.3f" % (mean))
                    
        for node in count_report:
            while len(count_report[node]) < total_seconds/interval:
                count_report[node].insert(0,0)
                mean_report[node].insert(0,0)
        
        return count_report,mean_report
        
    def customQuery(self,t0,t1,fields,uri):
        
        if fields == 'count(*)':
            self.statCount = 0
        else:
            self.statSet = set()
        
        pool = Pool(16)
        
        for i in range (0,16):
            suffix = '0'+hex(i)[2]
            
            pool.spawn(self.performQuery,self.domains[suffix],fields,uri,t0,t1)

        pool.join()
        
        if fields == 'count(*)':
            return self.statCount
        else:
            return len(self.statSet)
Example #16
0
class Catalog:
    DOMAIN = "cloudvision"
    CATALOG_ITEM = "catalog"
    NEXT_TABLE_ID_ATTR = "next_table_id"
    TABLES_ATTR = "tables"
    TABLES_DELIM = ","
    TABLES_ASSIGN = ":"
    TABLE_NAME_ATTR = "name"

    def __init__(self):
        self.conn = None
        self.dom = None
    
    def connect(self):
        self.conn = SDBConnection()
        self.dom = self.conn.get_domain(self.DOMAIN)
    
    def get_tables(self):
        tables = [ ]
        tables_str = self.get_tables_assign()
        for t in tables_str:
            tname, tid = t.split(self.TABLES_ASSIGN)
            tid = int(tid)
            tables.append(Table(tid, tname))
        return tables
    
    def get_tables_assign(self):
        catalog = self.dom.get_item(self.CATALOG_ITEM)
        if len(catalog[self.TABLES_ATTR]) > 0:
            return catalog[self.TABLES_ATTR].split(self.TABLES_DELIM)
        return [ ]
                
    def get_table_by_name(self, name):
        assert name is not None
        tables = self.get_tables()
        for t in tables:
            if t.get_name() == name:
                return t
        return None
    
    def get_table_by_id(self, id):
        assert id is not None
        tables = self.get_tables()
        for t in tables:
            if t.get_id() == id:
                return t
        return None
    
    def put_table(self, name):
        assert name is not None
        catalog = self.dom.get_item(self.CATALOG_ITEM)
        tables = self.get_tables_assign()
        for t in tables:
            tname, tid = t.split(self.TABLES_ASSIGN)
            assert tname != name
        id = int(catalog[self.NEXT_TABLE_ID_ATTR])
        tables.append(self.TABLES_ASSIGN.join([name, str(id)]))
        self.dom.put_attributes(self.CATALOG_ITEM,
                                { self.TABLES_ATTR : self.TABLES_DELIM.join(tables),
                                  self.NEXT_TABLE_ID_ATTR : str(id + 1) })
        self.dom.put_attributes(str(id), { })
        return Table(id, name)
        
    def del_table(self, table):
        assert table is not None
        catalog = self.dom.get_item(self.CATALOG_ITEM)
        tables = self.get_tables_assign()
        for i in range(len(tables)):
            tname, tid = tables[i].split(self.TABLES_ASSIGN)
            tid = int(tid)
            if tname == table.get_name():
                del tables[i]
                break
        else:
            assert False
        self.dom.put_attributes(self.CATALOG_ITEM,
                                { self.TABLES_ATTR : self.TABLES_DELIM.join(tables) })
        if table.get_id() == int(catalog[self.NEXT_TABLE_ID_ATTR]) - 1:
            self.dom.put_attributes(self.CATALOG_ITEM, 
                                    { self.NEXT_TABLE_ID_ATTR : str(table.get_id()) })
        t = self.dom.get_item(str(table.get_id()))
        assert t is not None
        self.dom.delete_item(t)