def _persist_to_sdb(self, spider, stats): ts = self._get_timestamp(spider).isoformat() sdb_item_id = "%s_%s" % (spider.domain_name, ts) sdb_item = dict((k, self._to_sdb_value(v, k)) for k, v in stats.iteritems()) sdb_item["domain"] = spider.domain_name sdb_item["timestamp"] = self._to_sdb_value(ts) connect_sdb().put_attributes(self._sdbdomain, sdb_item_id, sdb_item)
def main(domain, key, deployment): sdbconn = boto.connect_sdb() maxtime = int(time()) + 5 * 60 chef_attribs = json.load(open('/root/chef_attribs.json')) traits = chef_attribs.get('traits', []) if isinstance(traits, basestring): traits = [traits] lockr_attribs = dict(((_, key) for _ in traits)) state = {} while 1: if not state.get('deregistered', False): try: r = sdbconn.delete_attributes(domain, key) if r is True: state['deregistered'] = True except boto.exception.SDBResponseError, e: if e.status == 404: state['deregistered'] = True else: raise e except socket.gaierror, e: logging.warn("socket gaierror: %s" % (e, )) except socket.error, e: logging.warn("socket error: %s" % (e, ))
def load(self,id): sdb = boto.connect_sdb(**aws_config) domain = sdb.get_domain('awsapp') item = domain.get_item(id) if not item: pass self._load_from_dict(item)
def delete(self): # This needs expanding sdb = boto.connect_sdb(**aws_config) domain = sdb.get_domain('awsapp') items = domain.select("SELECT * FROM `awsapp` WHERE `__classname__` = '%s'" % self.name) for item in items: domain.delete_item(item)
def __init__(self, config=None): if config: config = defaultdict(str, config) for k in Cluster.keys: setattr(self, k, config[k]) self.sdb = boto.connect_sdb(ACCESS_KEY_ID, SECRET_ACCESS_KEY) self.domain = util.getAWS(self.sdb, "clusters")
def __init__(self): self.sdb = boto.connect_sdb(setting.AWS_KEY, setting.AWS_SECRET) self.__keywords__ = get_filter_keywords(self.sdb) self.__cores__ = cpu_count() self.tweets_queue = Queue() self.db_tweets = self.sdb.get_domain(setting.SDB_DOMAIN) self.__buffer__ = ""
def __init__(self, domain): config = ConfigParser.RawConfigParser() config.read('.boto') key = config.get('Credentials', 'aws_access_key_id') secretKey = config.get('Credentials', 'aws_secret_access_key') self.conn = boto.connect_sdb(key, secretKey) self.domain = domain
def commentsubmit(req): sdb = boto.connect_sdb(AWSKey, AWSSecret) domain = sdb.get_domain('comment') form = req.form imagekey = form['imagekey'] user = form['commentuser'] cmt = form['comment'] import uuid from time import strftime guid = str(uuid.uuid1()) item = domain.new_item(guid) item['submituser'] = user item['imagekey'] = imagekey item['comment'] = cmt item['status'] = "processing" item['submitdate'] = strftime("%Y-%m-%dT%H:%M:%S") item.save() sqsconn = SQSConnection(AWSKey, AWSSecret) q = sqsconn.get_queue('commentprocess') request = {} request['commentkey'] = guid request['submitdate'] = strftime("%Y-%m-%dT%H:%M:%S") request['comment'] = str(cmt) request['submituser'] = str(user) m = RawMessage() m.set_body(json.write(request)) status = q.write(m) response = {} if status==m: response['complete'] = True response['commentkey'] = guid else: response['complete'] = False return json.write(response)
def connect(self): """ Connect to AWS SDB Use this method first before attempting any other @raise SDB_Connect: """ try: if self.aws_access_key is None: self.sdbconn=boto.connect_sdb() else: self.sdbconn=boto.connect_sdb(self.aws_access_key, self.aws_secret_key) except: try: self.sdbconn=boto.connect_sdb() except: raise SDB_Connect()
def __init__(self): self.domain_name = config["app_conf"]["sdb_user_openid_domain"] sdb = boto.connect_sdb() try: self.domain = sdb.get_domain(self.domain_name) except boto.exception.SDBResponseError: self.domain = sdb.create_domain(self.domain_name)
def main(): GLOBAL ENRICHED_ZMQ, SURROGATE_ZMQ args = parse_args() conf_file = args.conf_file news_file = args.news_file key_id = args.key_id secret = args.secret file_dir = args.file_dir ENRICHED_ZMQ = args.enrich_zmq_port SURROGATE_ZMQ = args.surrogate_zmq_port conn = boto.connect_sdb(key_id,secret) if file_dir is not None: files = os.listdir(file_dir) for f in files: f_name = file_dir + "/" + f enrich(conn,conf_file,f_name) print f_name, " Done" pass else: enrich(conn,conf_file,news_file) if conn: conn.close()
def insertYearData(freqTable,year,awskey,awssecret): print("using awskey:" + awskey + " and awssecret:" + awssecret) sdb = boto.connect_sdb(awskey,awssecret) print(sdb) if sdb: exists = sdb.lookup('poplyrics',validate=True) if not exists: domain = sdb.create_domain('poplyrics') else: domain = sdb.get_domain('poplyrics') print(domain) yearitems = domain.get_item(year) if not yearitems: item = domain.new_item(year) if freqTable: cnt = 0; for key in freqTable: if cnt > 254: break print(key + ": " + str(freqTable[key])) item[key] = freqTable[key] cnt += 1 item.save() else: print("year already exists in domain") pass
def scrapeStation( station, url, post, timezone ): sdb = boto.connect_sdb() domain = sdb.create_domain("%s-whatson" % station ) try: websvc_source = urllib.urlopen(url, post) except IOError as error: print("Error reading URL:", error) sys.exit(1) source = websvc_source.read() websvc_source.close() times = get_timestamps( source, timezone ) last_song_time = get_last_song_time( domain ) # print(last_song_time) # Filter anything older than last song time # TODO OrderedDict list comprehension filtered_times = collections.OrderedDict() for key, value in times.items(): if key > last_song_time: filtered_times[key] = value if times: plays = getPlays( filtered_times, url, post ) if plays: store_in_cloud_db( domain, plays )
def connect_db_s3(): # connect to simple db global sdb sdb = boto.connect_sdb(AWS_ACCESS_KEY, AWS_SECRET_KEY) global domain_user domain_user = sdb.get_domain(USER_TABLE) # item.name=username user_type user_pass global domain_book domain_book = sdb.get_domain(BOOK_TABLE) # item.name = isbn book_name book_author book_desc #get list of registered users global username_list username_list = [item.name for item in domain_user] #get list of available books global book_isbn_list book_isbn_list = [item.name for item in domain_book] global book_entries book_entries = [dict(isbn=item.name, bookname=item['book_name'], author=item['book_author'], description=item['book_desc']) for item in domain_book] # connect to s3 global s3 s3 = boto.connect_s3(AWS_ACCESS_KEY, AWS_SECRET_KEY) global bucket bucket = s3.get_bucket(BUCKET_NAME) session['admin_logged_in'] = False
def setUp(self): self.s3_connection = connect_s3() self.sdb_connection = connect_sdb() self.sns_connection = connect_sns() self.sqs_connection = connect_sqs() self.gpg = GPG() self.event_handler = LockboxEventHandler()
def recent(req): sdb = boto.connect_sdb('AKIAJHJXHTMTVQYVZJOA','2YVZfFXQ7mhdFeUnMjcMOJ8uc5GBjz5LXhmh8LiM') domain = sdb.get_domain('picture') nextSubmitDate = repr(2900) form = util.FieldStorage(req,keep_blank_values=1) if form.has_key("nextsubmitdate"): nextSubmitDate = form.get("nextsubmitdate", None) query = "SELECT * FROM picture WHERE submitdate <= '" + nextSubmitDate + "' ORDER BY submitdate desc" result = domain.select(query) response = {} response['images'] = [] count = 0 for item in result: if count < 10: response['images'].append({}) response['images'][-1]['imagekey'] = item.name response['images'][-1]['thumburl'] = "http://theimageproject.s3.amazonaws.com/" + item.name + "t.jpg" response['images'][-1]['submituser'] = item.get('submituser') response['images'][-1]['submitdate'] = item.get('submitdate') response['images'][-1]['description'] = item.get('description') response['images'][-1]['rating'] = float(item.get('rating')) / 100 count = count + 1 else: nextsubmitdate = item.get('submitdate') break if nextsubmitdate != "2900": response['nextsubmitdate'] = nextsubmitdate else: response['nextsubmitdate'] = "" req.content_type = "text/plain" req.send_http_header() return json.write(response)
def get_domain(): """Get the storage domain (table) for the Bloomberg data.""" keyId = "AKIAJZ2N4UOI4TP4YBRQ" secretkey = "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" conn = boto.connect_sdb(keyId,secretkey) conn.create_domain(DOMAIN) # you can create repeatedly return conn.get_domain(DOMAIN)
def clear_items_simpledb(): sdb = boto.connect_sdb() dom = sdb.get_domain('ec2_clients') for item in dom: dom.delete_item(item)
def main(): ap = args.get_parser() ap.add_argument('--test', action="store_true", help="Test Flag, if contain this argument, it means a test case") arg = ap.parse_args() assert arg.sub, 'Need a queue to subscribe to' assert arg.pub, 'Need a queue to publish to' logs.init(arg) queue.init(arg) test_flag = arg.test conn = boto.connect_sdb() with queue.open(arg.sub, 'r') as inq: for m in inq: try: durationProcess(conn, m, arg.pub, test_flag) except KeyboardInterrupt: log.info('GOT SIGINT, exiting!') break except EmbersException as e: log.exception(e.value) except: log.exception("Unexpected exception in process")
def deletearchive(args): region = args.region vault = args.vault archive = args.archive BOOKKEEPING= args.bookkeeping BOOKKEEPING_DOMAIN_NAME= args.bookkeeping_domain_name if BOOKKEEPING: sdb_conn = boto.connect_sdb(aws_access_key_id=args.aws_access_key, aws_secret_access_key=args.aws_secret_key) domain_name = BOOKKEEPING_DOMAIN_NAME try: domain = sdb_conn.get_domain(domain_name, validate=True) except boto.exception.SDBResponseError: domain = sdb_conn.create_domain(domain_name) glacierconn = glaciercorecalls.GlacierConnection(args.aws_access_key, args.aws_secret_key, region=region) gv = glaciercorecalls.GlacierVault(glacierconn, vault) parse_response( gv.delete_archive(archive) ) # TODO: can't find a method for counting right now query = 'select * from `%s` where archive_id="%s"' % (BOOKKEEPING_DOMAIN_NAME, archive) items = domain.select(query) for item in items: domain.delete_item(item)
def store_to_simpledb(instance_id): __platform__={'ami-60c77761' : 'Ubuntu 12.04 LTS 64-bit', 'ami-44328345' : 'Ubuntu 11.10 64-bit', 'ami-942f9995' : 'Ubuntu 10.04 LTS 64-bit'} sdb = boto.connect_sdb() dom = sdb.get_domain('ec2_clients') # establish connection to ec2 conn = boto.connect_ec2() reservations = conn.get_all_instances([instance_id]) instance = reservations[0].instances[0] # record id is item_name item_name = instance.id item_attrs = {'hostname' : instance.private_dns_name, 'instance_id' : instance.id, 'state' : instance.state, 'ami_id' : instance.image_id, 'platform': __platform__[instance.image_id], 'type' : instance.instance_type, 'key_pair' : instance.key_name, 'public_dns' : instance.public_dns_name, 'private_ip' : instance.private_ip_address, 'launch_time' : launch_time(instance.launch_time), 'root_device' : instance.root_device_name, 'region' : instance.region.name, 'tag' : instance.tags.keys()} dom.put_attributes(item_name, item_attrs)
def get_domain_connection(cls): connection = boto.connect_sdb(aws_access_key_id = cls._manager.db_user, aws_secret_access_key = cls._manager.db_passwd, is_secure = cls._manager.enable_ssl) domain = connection.lookup(cls._manager.db_name, validate=False) return domain, connection
def main(): """Finds any entries that have a NULL LFID""" print(datetime.datetime.now()) try: stations_yaml = open('stations.yaml') except IOError: print("Failed to load station list", file=sys.stderr) sys.exit(-1) stations = yaml.load(stations_yaml) sdb = boto.connect_sdb() null_lfid = [] for station_id in stations.keys(): find_null_lfid( station_id, sdb, null_lfid ) results = yaml.safe_dump(sorted(null_lfid), default_flow_style=False) print(results) print(datetime.datetime.now())
def main(): args = parse_args() predict_date = args.predict_day conf_f = args.conf_f cur_list = args.currency_list key_id = args.key_id secret = args.secret zmq_port = args.zmq_port conn = boto.connect_sdb(key_id,secret) all_config = json.load(open(conf_f)) "Get the latest version of CONFIG " latest_version = max([int(k) for k in all_config.keys()]) CONFIG = all_config[str(latest_version)] if cur_list is None: cur_list = CONFIG["currency_list"] with queue.open(zmq_port, 'w', capture=False) as outq: for currency in cur_list: prediction = predict(conn,currency,predict_date,CONFIG) if prediction and prediction["eventType"]!="0000": "push message to ZMQ" outq.write(prediction)
def main(domain, key, deployment): sdbconn = boto.connect_sdb() maxtime = int(time()) + 5 * 60 chef_attribs = json.load(open('/root/chef_attribs.json')) traits = chef_attribs.get('traits', []) if isinstance(traits, basestring): traits = [traits] lockr_attribs = dict(((_, key) for _ in traits)) state = {} while 1: if not state.get('deregistered', False): try: r = sdbconn.delete_attributes(domain, key) if r is True: state['deregistered'] = True except boto.exception.SDBResponseError, e: if e.status == 404: state['deregistered'] = True else: raise e except socket.gaierror, e: logging.warn("socket gaierror: %s" % (e,)) except socket.error, e: logging.warn("socket error: %s" % (e,))
def sdb_revert_upload(uuid): sdb = boto.connect_sdb() cs = sdb.get_domain(SDB_DOMAIN_TO_USE) query = "SELECT * FROM `%s` WHERE `upload`='%s'" % (SDB_DOMAIN_TO_USE, uuid) result = cs.select(query) for res in result: res.delete()
def sdb_clear_cf(): sdb = boto.connect_sdb() cs = sdb.get_domain(SDB_DOMAIN_TO_USE) query = "SELECT * FROM `%s` WHERE `type`='CF'" % (SDB_DOMAIN_TO_USE) result = cs.select(query) for res in result: res.delete()
def sdb(self): if not hasattr(self, '_sdb'): settings = self.connection.settings_dict self._sdb = boto.connect_sdb( aws_access_key_id=settings['AWS_ACCESS_KEY_ID'], aws_secret_access_key=settings['AWS_SECRET_ACCESS_KEY']) return self._sdb
def connect_database(config_filename): """ connect to AWS SimpleDB using a configuration file. @param config_filename: a configuration filename @type config_filename: C{string} """ global sdb cfg = ConfigParser.SafeConfigParser() cfg.read(config_filename) if cfg.has_section('AWS_authentication'): sdb = boto.connect_sdb(cfg.get('AWS_authentication', 'public_key'), \ cfg.get('AWS_authentication', 'secret_key')) else: raise Exception('Bad configuration file!') if cfg.has_section('proxy_settings'): try: sdb.use_proxy = cfg.getboolean('proxy_settings', 'enable_proxy') sdb.protocol = cfg.get('proxy_settings', 'protocol') sdb.proxy = cfg.get('proxy_settings', 'proxy') sdb.proxy_port = cfg.getint('proxy_settings', 'port') sdb.proxy_user = cfg.get('proxy_settings', 'username') sdb.proxy_pass = cfg.get('proxy_settings', 'passwd') except Exception: sdb.use_proxy = False print 'Faulty proxy configuration. Proxy disabled.' else: sdb.use_proxy = False return cfg
def deletearchive(args): region = args.region vault = args.vault archive = args.archive BOOKKEEPING = args.bookkeeping BOOKKEEPING_DOMAIN_NAME = args.bookkeeping_domain_name if BOOKKEEPING: sdb_conn = boto.connect_sdb(aws_access_key_id=args.aws_access_key, aws_secret_access_key=args.aws_secret_key) domain_name = BOOKKEEPING_DOMAIN_NAME try: domain = sdb_conn.get_domain(domain_name, validate=True) except boto.exception.SDBResponseError: domain = sdb_conn.create_domain(domain_name) glacierconn = glaciercorecalls.GlacierConnection(args.aws_access_key, args.aws_secret_key, region=region) gv = glaciercorecalls.GlacierVault(glacierconn, vault) parse_response(gv.delete_archive(archive)) # TODO: can't find a method for counting right now query = 'select * from `%s` where archive_id="%s"' % ( BOOKKEEPING_DOMAIN_NAME, archive) items = domain.select(query) for item in items: domain.delete_item(item)
def save_rule_for_user(user_id, rule): """Store a rule in the DB to be used when matching A random unique ID i.e., item name, will tried to be generated, however if there are too many items in the DB then this may fail. Args: user_id: String of the user-ID to associate the rule with in the DB. For instances of the User class from Stormpath, this is usually obtained by calling `user.get_id()`. rule: Rule instance to be stored in the DB. Raises: UniqueItemNameError if a unique ID could not be generated. """ sdb = boto.connect_sdb() # Save time by not sending a request that would ensure the domain exists. domain = sdb.get_domain('arrestnotify_rule', validate=False) for _ in xrange(3): # Try to generate a unique item name. item_name = _generate_item_name() if domain.get_item(item_name) is None: break # TODO(bwbaugh|2014-06-15): Log that a retry had to be made. else: # Couldn't generate a unique name. raise UniqueItemNameError('Could not generate a unique ID.') item = domain.new_item(item_name) item['user_id'] = user_id item.update(rule._asdict()) item.save()
def connect_domain(key, secret, name): ''' Return a connection to a simpledb domain for atlases. ''' #reg = [reg for reg in regions() if reg.name == 'us-west-1'][0] sdb = connect_sdb(key, secret) #, region=reg) domain = sdb.get_domain(name) return domain
def sdb(self): """ A connection to SDB """ if self._sdb is None: self._sdb = boto.connect_sdb(self.aws.access_key_id, self.aws.secret_access_key) return self._sdb
def Lookup(domain, column, name, proto_name): conn = boto.connect_sdb() d = conn.get_domain(domain) item = d.get_item(name) data = simpledb._DecodeItem(item, column) p = protodb.GetProtoByName(proto_name)() p.ParseFromString(data) print p
def mdb_connect(userId, secretKey, ipAddress): mdbRegion = boto.sdb.regioninfo.SDBRegionInfo(name='mdb', endpoint=ipAddress) return boto.connect_sdb(userId, secretKey, is_secure=False, region=mdbRegion, path='/mdb/request.mgwsi')
def search(args, print_results=True): region = args.region vault = args.vault search_term = args.search_term if BOOKKEEPING: sdb_conn = boto.connect_sdb(aws_access_key_id=args.aws_access_key, aws_secret_access_key=args.aws_secret_key) domain_name = BOOKKEEPING_DOMAIN_NAME try: domain = sdb_conn.get_domain(domain_name, validate=True) except boto.exception.SDBResponseError: domain = sdb_conn.create_domain(domain_name) else: raise Exception(u"You have to enable bookkeeping in your settings \ before you can perform search.") search_params = [] table_title = "" if region: search_params += ["region='%s'" % (region, )] else: table_title += "Region\t" if vault: search_params += ["vault='%s'" % (vault, )] else: table_title += "Vault\t" table_title += "Filename\tArchive ID" search_params += [ "(filename like '" + search_term + "%' or description like '" + search_term + "%')" ] search_params = " and ".join(search_params) query = 'select * from `%s` where %s' % (BOOKKEEPING_DOMAIN_NAME, search_params) items = domain.select(query) if print_results: print table_title for item in items: # print item, item.keys() item_attrs = [] if not region: item_attrs += [item[u'region']] if not vault: item_attrs += [item[u'vault']] item_attrs += [item[u'filename']] item_attrs += [item[u'archive_id']] if print_results: print "\t".join(item_attrs) if not print_results: return items
def inventory(args): region = args.region vault = args.vault force = args.force BOOKKEEPING = args.bookkeeping BOOKKEEPING_DOMAIN_NAME = args.bookkeeping_domain_name glacierconn = glaciercorecalls.GlacierConnection(args.aws_access_key, args.aws_secret_key, region=region) gv = glaciercorecalls.GlacierVault(glacierconn, vault) if force: job = gv.retrieve_inventory(format="JSON") return True try: gv.list_jobs() inventory_retrievals_done = [] for job in gv.job_list: if job['Action'] == "InventoryRetrieval" and job[ 'StatusCode'] == "Succeeded": d = dateutil.parser.parse( job['CompletionDate']).replace(tzinfo=pytz.utc) job['inventory_date'] = d inventory_retrievals_done += [job] if len(inventory_retrievals_done): list.sort(inventory_retrievals_done, key=lambda i: i['inventory_date'], reverse=True) job = inventory_retrievals_done[0] print "Inventory with JobId:", job['JobId'] job = glaciercorecalls.GlacierJob(gv, job_id=job['JobId']) inventory = json.loads(job.get_output().read()) if BOOKKEEPING: sdb_conn = boto.connect_sdb( aws_access_key_id=args.aws_access_key, aws_secret_access_key=args.aws_secret_key) domain_name = BOOKKEEPING_DOMAIN_NAME try: domain = sdb_conn.get_domain(domain_name, validate=True) except boto.exception.SDBResponseError: domain = sdb_conn.create_domain(domain_name) d = dateutil.parser.parse( inventory['InventoryDate']).replace(tzinfo=pytz.utc) item = domain.put_attributes("%s" % (d, ), inventory) if ((datetime.datetime.utcnow().replace(tzinfo=pytz.utc) - d).days > 1): gv.retrieve_inventory(format="JSON") render_inventory(inventory) else: job = gv.retrieve_inventory(format="JSON") except Exception, e: print "exception: ", e print json.loads(e[1])['message']
def __init__(self, domain): self.writeFile("GetChords Init called") config = ConfigParser.RawConfigParser() config.read('.boto') key = config.get('Credentials', 'aws_access_key_id') secretKey = config.get('Credentials', 'aws_secret_access_key') self.conn = boto.connect_sdb(key, secretKey) self.writeFile("GetChords connection done") self.domain = domain
def __init__(self, key1="", key2=""): if key1 == "" and key2 == "": keyfile = open('keys.txt') key1 = (keyfile.readline()[:-1]).split(' ')[1] key2 = (keyfile.readline()[:-1]).split(' ')[1] keyfile.close() print(key1, key2) self.sdb = boto.connect_sdb(key1, key2) self.domains = self.sdb.get_all_domains() self.activeDomain = self.domains[0]
def setUp(self): super(_BotoTests, self).setUp() local_region = boto.regioninfo.RegionInfo(name='local', endpoint='localhost') self.conn = boto.connect_sdb('', '', region=local_region, is_secure=False, port=self.port)
def _connect(self): self._sdb = boto.connect_sdb(aws_access_key_id=self.db_user, aws_secret_access_key=self.db_passwd, is_secure=self.enable_ssl) # This assumes that the domain has already been created # It's much more efficient to do it this way rather than # having this make a roundtrip each time to validate. # The downside is that if the domain doesn't exist, it breaks self._domain = self._sdb.lookup(self.db_name, validate=False) if not self._domain: self._domain = self._sdb.create_domain(self.db_name)
def _aws_sdb_connection(cls): """ Lazy-loading of the SimpleDB boto connection. Refer to this instead of referencing cls.__aws_sdb_connection directly. :returns: A boto connection to Amazon's SimpleDB interface. """ if not cls.__aws_sdb_connection: cls.__aws_sdb_connection = boto.connect_sdb( settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY) return cls.__aws_sdb_connection
def __init__(self, aws_access_id=None, aws_secret_key=None): # Used to validate short URLs self.charSpace = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789' if not (aws_access_id and aws_secret_key): aws_access_id = settings.aws_access_id aws_secret_key = settings.aws_secret_key try: self.conn = boto.connect_sdb(aws_access_id, aws_secret_key) self.domain = self._get_domain(settings.sdb_domain) except Exception, e: raise(e)
def __init__(self, awsaccesskey, awssecretkey, domainname): u""" Opens the connection or initializes a new domain on SimpleDB if it doesn't exist. """ self.CONNECTION = boto.connect_sdb(awsaccesskey, awssecretkey) try: self.DOMAIN = self.CONNECTION.get_domain(domainname) print 'Established connection to SimpleDB domain %s' % domainname except SDBResponseError, e: print u'SimpleDB domain doesn’t exist yet, creating domain with name %s' % domainname self.DOMAIN = self.CONNECTION.create_domain(domainname)
def connect_sdb(self): """ Connect to SDB and return boto connection object (or False). """ self.logger.debug("connect to SDB...") try: sdbconn = boto.connect_sdb(self.aws_accesskey, self.aws_secretkey) return sdbconn except: self.logger.error("Error while connecting to SDB.") self.logger.error("Traceback:\n%s" % traceback.format_exc()) return False
def Dump(domain, column, writer): conn = boto.connect_sdb() d = conn.get_domain(domain) rows = d.select('select * from `%s`' % domain) n = 0 max_size = 0 for item in rows: n += 1 data = simpledb._DecodeItem(item, column) writer.Write(data) max_size = max(max_size, len(data)) print 'Dumped %d records.' % n print 'Max record size: %d bytes' % max_size
def _connect(self): """Connect to our domain""" if not self._db: import boto sdb = boto.connect_sdb() if not self.domain_name: self.domain_name = boto.config.get("DB", "sequence_db", boto.config.get("DB", "db_name", "default")) try: self._db = sdb.get_domain(self.domain_name) except SDBResponseError, e: if e.status == 400: self._db = sdb.create_domain(self.domain_name) else: raise
def initSDB(dbname='jobs', quiet=False): """ Connect to SimpleDB """ global sdbHandle sdbHandle = boto.connect_sdb(SimpleDBAccess['akey'], SimpleDBAccess['skey']) global alldomains, jobdomains if dbname == 'jobs' and len(jobdomains) == 0: alldomains = sdbHandle.get_all_domains() for d in alldomains: if d.name.startswith('PandaJobs'): jobdomains.append(d) elif dbname != 'jobs': print "Open domain", dbname curdomain = sdbHandle.get_domain(dbname)
def dump_to_sdb(self, domain_name, item_name): import simplejson sdb = boto.connect_sdb() domain = sdb.lookup(domain_name) if not domain: domain = sdb.create_domain(domain_name) item = domain.new_item(item_name) item.active = False for section in self.sections(): d = {} for option in self.options(section): d[option] = self.get(section, option) item[section] = simplejson.dumps(d) item.save()
def connect_sdb(self, aws_access_key_id=None, aws_secret_access_key=None, **kwargs): if aws_access_key_id == None: aws_access_key_id = self.config.get( "DB", "db_user", self.config.get("Credentials", "aws_access_key_id")) if aws_secret_access_key == None: aws_secret_access_key = self.config.get( "DB", "db_passwd", self.config.get("Credentials", "aws_secret_access_key")) return boto.connect_sdb(aws_access_key_id, aws_secret_access_key, **kwargs)
def iter_servers(self): if not hasattr(self, 'sdb_domain'): sdb = boto.connect_sdb() log = logging.getLogger('boto') log.setLevel(logging.INFO) self.sdb_domain = sdb.get_domain('chef') query = 'select fqdn,ec2_public_hostname from chef where fqdn is not null' region = self.get_setting('region') if region: query += " and ec2_region = '%s'" % region logger.debug('looking for peers with the query: %s' % query) servers = self.sdb_domain.select(query) for server in servers: yield server
def main(): DOMAIN = "benchmark" conn = boto.connect_sdb() domain = conn.get_domain(DOMAIN) # Prepare item list items = [] now = time.time() for i in domain: items.append(i) elapsed = time.time() - now if not items: print "No items found." return msg = "Fetched manifest of %d items in %f seconds, proceeding." print msg % (len(items), elapsed) # THE REAL MEAT: # Prepare the pool print "Initializing pool." def toolbox_factory(): return SDBToolBox(DOMAIN) def worker_factory(job_queue): return EquippedWorker(job_queue, toolbox_factory) pool = WorkerPool(size=20, worker_factory=worker_factory) print "Starting to fetch items..." now = time.time() # Insert jobs results_queue = Queue() for i in items: j = SdbJob(results_queue, boto.sdb.domain.Domain.get_item, [i]) pool.put(j) # Fetch results r = [results_queue.get() for i in items] elapsed = time.time() - now print "Fetched %d items paralleled in %f seconds." % (len(r), elapsed) pool.shutdown()
def _connect(self): args = dict(aws_access_key_id=self.db_user, aws_secret_access_key=self.db_passwd, is_secure=self.enable_ssl) try: region = [x for x in boto.sdb.regions() if x.endpoint == self.db_host][0] args['region'] = region except IndexError: pass self._sdb = boto.connect_sdb(**args) # This assumes that the domain has already been created # It's much more efficient to do it this way rather than # having this make a roundtrip each time to validate. # The downside is that if the domain doesn't exist, it breaks self._domain = self._sdb.lookup(self.db_name, validate=False) if not self._domain: self._domain = self._sdb.create_domain(self.db_name)
def load_from_sdb(self, domain_name, item_name): import simplejson sdb = boto.connect_sdb() domain = sdb.lookup(domain_name) item = domain.get_item(item_name) for section in item.keys(): if not self.has_section(section): self.add_section(section) d = simplejson.loads(item[section]) for attr_name in d.keys(): attr_value = d[attr_name] if attr_value == None: attr_value = 'None' if isinstance(attr_value, bool): self.setbool(section, attr_name, attr_value) else: self.set(section, attr_name, attr_value)
def Dump(domain, column, proto): conn = boto.connect_sdb() d = conn.get_domain(domain) rows = d.select('select * from `%s`' % domain) n = 0 for item in rows: if column not in item: print 'invalid: ', item.name, item print continue p = proto() p.ParseFromString(base64.b64decode(item[column])) print p n += 1 print 'total items:', n
def iter_servers(self): if not hasattr(self, 'sdb_domain'): sdb = boto.connect_sdb() log = logging.getLogger('boto') log.setLevel(logging.INFO) self.sdb_domain = sdb.get_domain('chef') query = 'select fqdn,ec2_public_hostname from chef where fqdn is not null' region = self.get_setting('region') if region: query += " and ec2_region = '%s'" % region query += " order by fqdn" logger.debug('looking for peers with the query: %s' % query) hostname = self.config.get('core', 'hostname') collection = remainder = [] top = [] for server in self.sdb_domain.select(query): if server['fqdn'] == hostname: collection = top continue collection.append(server) if collection is top: collection.extend(remainder) del remainder datapoints = self.get_setting('datapoints', opt_type=int) if datapoints >= len(collection): interval = 1 begin = 0 else: interval = len(collection) / datapoints if not hasattr(self, '_interval_set'): self._interval_set = 0 elif self._interval_set >= interval: self._interval_set = 0 else: self._interval_set += 1 begin = self._interval_set for i in xrange(begin, datapoints * interval, interval): yield collection[i]
def prime_simpledb(): print "prime simpledb database" print "access key is " + ACCESS_KEY print "secret key is " + SECRET_KEY sdb = boto.connect_sdb(ACCESS_KEY, SECRET_KEY) while True: if not domain_exists(sdb, APPSCALE_DOMAIN): break try: sdb.delete_domain(APPSCALE_DOMAIN) print "trying to delete domain" except: print "delete - got an exception" time.sleep(5) while True: if domain_exists(sdb, APPSCALE_DOMAIN): break print "creating domain" sdb.create_domain(APPSCALE_DOMAIN) time.sleep(5) db = appscale_datastore.DatastoreFactory.getDatastore("simpledb") db.create_table(USERS_TABLE, USERS_SCHEMA) db.create_table(APPS_TABLE, APPS_SCHEMA) users_schema = db.get_schema(USERS_TABLE) apps_schema = db.get_schema(APPS_TABLE) if len(users_schema) > 1 and len(apps_schema) > 1: print "CREATE TABLE SUCCESS FOR USER AND APPS" print users_schema print apps_schema return 0 else: print "FAILED TO CREATE TABLE FOR USER AND APPS" return 1
def __init__(self, domain_name=None, aws_access_key_id=None, aws_secret_access_key=None, debug=0): self.domain_name = domain_name self.aws_access_key_id = aws_access_key_id self.aws_secret_access_key = aws_secret_access_key self.domain = None self.sdb = None self.s3 = None if not self.domain_name: self.domain_name = self.DefaultDomainName if self.domain_name: boto.log.info('No SimpleDB domain set, using default_domain: %s' % self.domain_name) else: boto.log.warning('No SimpleDB domain set, persistance is disabled') if self.domain_name: self.sdb = boto.connect_sdb(aws_access_key_id=self.aws_access_key_id, aws_secret_access_key=self.aws_secret_access_key, debug=debug) self.domain = self.sdb.lookup(self.domain_name) if not self.domain: self.domain = self.sdb.create_domain(self.domain_name)