def __startRecordingAndStreaming(): # Connect a client socket to my_server:8000 (change my_server to the # hostname of your server) if isMonitoringWorking(): return global __monitoringWorking __monitoringWorking = True LedChanger.lightPhotoLedOn() print("Monitoring has been started") try: with picamera.PiCamera() as camera: global __camera __camera = camera camera.resolution = (1640, 1232) # camera.framerate = 23 import DataManager from DataManager import deviceName, videoDir DataManager.createVideoDirIfNotExists() videoPath = str(videoDir) + str(deviceName) + "_" + str( datetime.datetime.now()) + '.h264' print("a") camera.start_recording(videoPath, resize=(1024, 768)) print("b") __currentMonitoringPeriodicTask.launchMonitoringPeriodicTask( camera, videoPath) except Exception as e: utils.printException(e) onMonitoringStopped() print("__startRecordingAndStreaming finished")
def _parse_entity(self, item): # TODO: This is completely out of keeping with how every other source does this. The rest all return raw # XML here and then map fields in the ResolverObject class, whereas this actually converts to an Entity here # and then has a resolver class that just passes through to the entity. Ugh. def makeBasicEntityMini(title, mini_type=BasicEntityMini): result = mini_type() result.title = title return result translators = [ TheTVDB.SingleFieldTranslator("SeriesName", "title"), TheTVDB.SingleFieldTranslator("Overview", "desc"), TheTVDB.SingleFieldTranslator("IMDB_ID", ["sources", "imdb_id"]), TheTVDB.SingleFieldTranslator("id", ["sources", "thetvdb_id"]), TheTVDB.SingleFieldTranslator("ContentRating", "mpaa_rating"), TheTVDB.SingleFieldTranslator("Network", "networks", lambda n: [makeBasicEntityMini(n)]), TheTVDB.SingleFieldTranslator( "Actors", "cast", lambda n: map( lambda _: makeBasicEntityMini(_, mini_type=PersonEntityMini), filter(lambda _: len(_) > 0, n.split("|")), ), ), TheTVDB.SingleFieldTranslator("Genre", "genres", lambda n: filter(lambda _: len(_) > 0, n.split("|"))), TheTVDB.SingleFieldTranslator("Runtime", "length", lambda n: 60 * int(n)), TheTVDB.SingleFieldTranslator("FirstAired", "release_date", parseDateString), ] try: entity = MediaCollectionEntity() entity.types = ["tv"] for translator in translators: translator.translateFieldToEntity(item, entity) if entity.title is None: return None images = ["poster", "fanart", "banner"] entity.images = [] for image in images: image = item.find(image) if image is not None and image.text is not None: image = image.text.strip() if len(image) > 0: inner_img_schema = ImageSizeSchema() inner_img_schema.url = "http://thetvdb.com/banners/%s" % image img_schema = ImageSchema() img_schema.sizes = [inner_img_schema] entity.images = [img_schema] break return entity except: utils.printException() return None
def getLocalSearchResults(self, query, latLng=None, params=None, transform=True): response, url = self._getLocalSearchResponse(query, latLng, params) if response is None: return None if 200 != response["responseStatus"]: utils.log('[GoogleLocal] unexpected return status "' + \ response["responseStatus"] + ' (' + url + ')"') return None if not transform: return response output = [] try: results = response['responseData']['results'] for result in results: output.append(self._parseEntity(result)) except: utils.printException() raise return output
def _parseLocationPage(self, pool, region_name, href): utils.log("[%s] parsing region '%s' (%s)" % (self, region_name, href)) try: soup = utils.getSoup(href) except: utils.printException() utils.log("[%s] error downloading page %s" % (self, href)) return try: # find all cities within this state # note: could be none if zagat has not rated any cities within a given state (such as Alaska) cityLists = soup.find("div", {"id" : "loc_allCities"}).findAll("div", {"class" : "letterBlock"}) except AttributeError: # no cities found within this region; return gracefully return # asynchronously parse each city within this region for cityList in cityLists: cityList = cityList.find('ul') cities = cityList.findAll('a') for city in cities: city_name = city.getText().strip() city_href = self.base + city.get("href") pool.spawn(self._parseCityPage, pool, region_name, city_name, city_href)
def _parseAllRestaurantsInCityPage(self, pool, region_name, city_name, href): utils.log("[%s] parsing all restaurants in city '%s.%s' (%s)" % (self, region_name, city_name, href)) try: soup = utils.getSoup(href) except: utils.printException() utils.log("[%s] error downloading page %s" % (self, href)) return # parse all zagat-rated restaurants on this page restaurants = soup.findAll("li", {"class" : "zr"}) if restaurants is not None: for restaurant in restaurants: a = restaurant.find('a') restaurant_name = a.getText().strip() restaurant_href = self.base + a.get("href") # asynchronously parse the current restaurant pool.spawn(self._parseRestaurantPage, pool, region_name, city_name, restaurant_name, restaurant_href) try: # parse next page next_page = soup.find("li", {"class" : re.compile("pager-next")}).find("a", {"class" : "active"}) if next_page is not None: next_page_href = self.base + next_page.get("href") self._parseAllRestaurantsInCityPage(pool, region_name, city_name, next_page_href) except AttributeError: # no next paginated page for restaurants within this city pass
def _getLocalSearchResponse(self, query, latLng=None, optionalParams=None): params = { 'v' : '1.0', 'q' : query, 'rsz' : 8, 'mrt' : 'localonly', 'key' : 'ABQIAAAAwHbLTrUsG9ibtIA3QrujsRRB6mhcr2m5Q6fm3mUuDbLfyI5H4xTNn-E18G_3Zu-sDQ3-BTh9hK2BeQ', } if latLng is not None: params['sll'] = self._geocoder.getEncodedLatLng(latLng) self._handleParams(params, optionalParams) url = "http://ajax.googleapis.com/ajax/services/search/local?%s" % urllib.urlencode(params) utils.log('[GoogleLocal] ' + url) try: # GET the data and parse the response as json request = urllib2.Request(url, None, {'Referer' : 'http://www.stamped.com' }) return json.loads(utils.getFile(url, request)), url except: utils.log('[GoogleLocal] unexpected error searching "' + url + '"') utils.printException() return None, url return None, url
def send_reset_email(request, schema, **kwargs): email = schema.email api = globalMongoStampedAPI() if not utils.validate_email(email): msg = "Invalid format for email address" logs.warning(msg) raise StampedInvalidEmailError("Invalid email address") # verify account exists try: user = stampedAPIProxy.checkAccount(email) if user is None: raise except Exception: utils.printException() logs.error("ERROR: invalid email '%s'" % email) ### TODO: Display appropriate error message errorMsg = 'No account information was found for that email address.' raise StampedHTTPError(404, msg="Email address not found", kind='invalid_input') account = stampedAPIProxy.getAccount(user['user_id']) auth_service = account['auth_service'] if auth_service != 'stamped': raise StampedInputError("Account password not managed by Stamped for user '%s' (primary account service is '%s')" % (account['screen_name'], auth_service)) # send email logs.info("sending email to '%s' (user: '******')" % (email, user['screen_name'])) result = g_stamped_auth.forgotPassword(email) return transform_output(result)
def main(): for collection in collections: print "RUN %s" % collection if collection == "tempentities": print "PASS" if collection == "logs": print "PASS" else: ret = mongoExportImport(collection) if 0 == ret: print "COMPLETE" else: print "ERROR restoring collection '%s'" % collection print convertEntities() try: utils.runMongoCommand('db.runCommand( {createCollection:"logs", capped:true, size:500000} )') except: utils.printException()
def stopCameraRecording(): global __camera if __camera is not None: try: __camera.stop_recording() except Exception as e: utils.printException(e)
def refreshServerAddressFromFile(): global serverUrl try: with open(lastKnownServerAddressFileName, 'r') as file: serverUrl = file.readline() return except Exception as e: utils.printException(e)
def parse_work(title, genre, link, output): utils.log("parsing work %s) %s (%s)" % (title, genre, link)) try: soup = utils.getSoup(link) except Exception, e: utils.log("error parsing work %s) %s (%s) - %s" % (title, genre, link, e)) utils.printException()
def __onDestroyTask(self): try: self.__stopCameraMonitoringStreaming() except Exception as e: utils.printException(e) try: self.__streaming_thread.stop() except Exception as e: utils.printException(e)
def parseFileUpload(schema, request, fileName='image', **kwargs): ### Parse Request try: if request.method != 'POST': raise rawData = request.POST # Build the dict because django sucks data = {} for k, v in rawData.iteritems(): data[k] = v # Extract file if fileName in request.FILES: f = request.FILES[fileName] max_size = 1048576 # 1 MB if f.size > max_size: msg = "Uploaded file is too large (%s) (max size is %d)" % (f.size, max_size) logs.warning(msg) raise Exception(msg) data[fileName] = f.read() logs.attachment(fileName, f.size) data.pop('oauth_token', None) data.pop('client_id', None) data.pop('client_secret', None) logData = data.copy() obfuscate = kwargs.pop('obfuscate', []) obfuscate.append('password') for item in obfuscate: if item in logData: logData[item] = '*****' if fileName in logData: logData[fileName] = 'FILE (SIZE: %s)' % f.size logs.form(logData) if schema == None: if len(data) > 0: raise return schema.dataImport(data) schema.validate() logs.debug("Parsed request data") return schema except Exception as e: msg = u"Unable to parse form (%s)" % e logs.warning(msg) utils.printException() raise e
def makeHeartbeatCall(): from startServer import app with app.test_request_context(): try: res = pyrequests.post(DataManager.getHeartbeatEndpoint(), headers=jsonHeaders, data=DataManager.getHeartbeatJson()) if res.status_code != 200: LedChanger.lightErrorLedOn() except Exception as e: LedChanger.lightErrorLedOn() utils.printException(e)
def config(self): configFilePath = self.options.config try: with open(configFilePath, "rb") as fp: source = fp.read() return eval(source) except Exception: utils.log("Error parsing config file '%s'" % self.options.config) utils.printException()
def removeOldestVideos(numberOfVideos): list_of_files = os.listdir(str(videoDir)) full_path = [str(videoDir) + "{0}".format(x) for x in list_of_files] for _ in range(0, numberOfVideos): try: oldest_file = min(full_path, key=os.path.getctime) os.remove(oldest_file) full_path.remove(oldest_file) except Exception as e: utils.printException(e)
def __thread_startCameraMonitoringStreaming(self): try: address = ('', videoStreamPort) stream_server = StreamingServer(address, StreamingHandler) print("Before serve forever") stream_server.serve_forever() print("After serve forever") except Exception as e: self.__streaming_stopped = True utils.printException(e) self.__onDestroyTask()
def main(): import argparse parser = argparse.ArgumentParser() parser.add_argument('-d', '--drop', action="store_true", default=False, help="drop existing collections before importing") parser.add_argument("-s", "--source", default=None, type=str, help="db to import from") parser.add_argument("-t", "--target", default=None, type=str, help="db to import to") args = parser.parse_args() host, port = utils.get_db_config(args.source) utils.log("SOURCE: %s:%s" % (host, port)) old_host = host old_connection = pymongo.Connection(host, port) old_database = old_connection['stamped'] collections = old_database.collection_names() new_host = args.target if new_host is None: dest = MongoDBConfig.getInstance() new_host = dest.host utils.log("DEST: %s:%s" % (new_host, port)) if not os.path.isdir('/stamped/tmp/stamped/'): os.makedirs('/stamped/tmp/stamped') ignore = set([ 'tempentities', 'logs', 'logstats', ]) for collection in collections: print 'RUN %s' % collection if collection in ignore: print 'PASS' else: ret = mongoExportImport(collection, old_host, new_host) if 0 == ret: print 'COMPLETE' else: print "ERROR restoring collection '%s'" % collection print try: utils.runMongoCommand('db.runCommand( {createCollection:"logs", capped:true, size:500000} )') except: utils.printException()
def _parseRestaurantPage(self, pool, region_name, city_name, restaurant_name, href): utils.log("[%s] parsing restaurant '%s.%s.%s' (%s)" % (self, region_name, city_name, restaurant_name, href)) try: soup = utils.getSoup(href) except: utils.printException() utils.log("[%s] error downloading page %s" % (self, href)) return # parse the address for the current restaurant addr = soup.find('div', {'class' : 'address'}) street = addr.find('span', {'class' : 'street'}).getText().strip() geo = addr.find('span', {'class' : 'geo'}).getText().strip() address = "%s, %s" % (street, geo) # add the current restaurant to the output for this crawler entity = Entity() entity.subcategory = "restaurant" entity.title = restaurant_name entity.address = address entity.sources.zagat = { 'zurl' : self.base + href, } #self._globals['soup'] = soup # parse cuisine header = soup.find('div', {'id' : "block-zagat_restaurants-14"}) if header is not None: header = header.find('ul').find('li', {'class' : 'first'}) if header is not None: entity.cuisine = header.getText() # parse website site = soup.find('span', {'class' : 'website'}) if site is not None: site = site.find('a') if site is not None: entity.site = site.get('href') # parse preview image img = soup.find('div', {'id' : 'content'}).find('div', {'class' : 'photo'}) if img is not None: img = img.find('img') if img is not None: entity.image = img.get('src') self._output.put(entity)
def importAll(self, sink, limit=None): url = self.getNextURL() while url is not None and len(url) > 0 and ((not self.options.test) or len(self.entities) < 30): try: entities = self.getEntitiesFromURL(url) if not sink.addEntities(entities): utils.log("Error storing %d entities to %s from %s" % \ (utils.count(entities), str(sink), url)) except: utils.log("Error crawling " + url + "\n") utils.printException()
def render(self, context): try: return unicode( self._library.render(self._asset_path, self._asset_type, context)) except Exception, e: print "%s error (%s): %s" % (self, self._asset_path, e) utils.printException() if settings.DEBUG: raise else: return ''
def __monitoringPeriodicTask(self): while self.__periodic_task_should_run: try: _video_path = self.__splitCurrentRecording() DataSender.handleVideoAsynchronously(_video_path) start_to_stream_start_time = time.time() self.__tryToStreamMonitoring() start_to_stream_execution_time = time.time() - start_to_stream_start_time if start_to_stream_execution_time < videoLengthSeconds: time.sleep(videoLengthSeconds - start_to_stream_execution_time) except Exception as e: utils.printException(e) self.__onDestroyTask() self.__task_launched = False print("MonitoringPeriodicTask has just quit")
def eliminate_temporary(ast): try: _eliminate_multres(ast) slots, unused = _collect_slots(ast) _eliminate_temporary(slots) # _remove_unused(unused) except Exception as e: utils.printException(e) print("--eliminate_temporary exception: " + repr(e)) _cleanup_invalid_nodes(ast) return ast
def _try_ping_webServer(self, node): url = "http://%s/index.html" % node.public_dns retries = 0 while retries < 5: try: response = urllib2.urlopen(url) except urllib2.HTTPError, e: utils.log(url) utils.printException() else: return retries += 1 time.sleep(retries * retries)
def thread_handle_sending_and_deleting_video(videoPath): try: with open(videoPath, 'rb') as img: videoBasename = os.path.basename(videoPath) files = {'video': (videoBasename, img, 'multipart/form-data')} with pyrequests.Session() as s: print("DataSender, starting to post video to server: " + str(videoPath)) r = s.post(DataManager.getVideoReceiveEndpoint(), files=files) print("DataManager.getVideoReceiveEndpoint()" + str(DataManager.getVideoReceiveEndpoint()) + ", status code: " + str(r.status_code)) if r.status_code == 200: DataManager.deleteFile(videoPath) except Exception as e: utils.printException(e) DataManager.makeStorageCheck()
def _try_ping_apiServer(self, node): url = "https://%s/v0/ping.json" % node.public_dns_name retries = 0 while retries < 5: try: response = urllib2.urlopen(url) except urllib2.HTTPError, e: utils.log(url) utils.printException() else: return retries += 1 time.sleep(retries * retries)
def __splitCurrentRecording(self): try: import DataManager from DataManager import deviceName, videoDir DataManager.createVideoDirIfNotExists() _video_path = str(videoDir) + str(deviceName) + "_" + str(datetime.datetime.now()).replace(" ", "_") + '.h264' print("c") self.__camera.split_recording(_video_path) print("d") path_to_return = self.__previous_monitoring_video_path self.__previous_monitoring_video_path = _video_path return path_to_return except Exception as e: utils.printException(e) return None
def _parseLocationsPage(self, pool, href): try: soup = utils.getSoup(href) except: utils.printException() utils.log("[%s] error downloading page %s" % (self, href)) return # find all links to domestic zagat regions (states) locations = soup.find("div", {"id" : "loc_domestic"}).findAll("a") # parse each individual location page (state) for location in locations: name = location.getText().strip() href = self.base + location.get("href") pool.spawn(self._parseLocationPage, pool, name, href)
def _parseCityPage(self, pool, region_name, city_name, href): utils.log("[%s] parsing city '%s.%s' (%s)" % (self, region_name, city_name, href)) try: soup = utils.getSoup(href) except: utils.printException() utils.log("[%s] error downloading page %s" % (self, href)) return # use the 'all' link on the zagat search homepage for this city to parse all # restaurants within this city restaurant_list_link = soup.find("div", {"class" : "upper-links"}).find("a") restaurant_list_href = self.base + restaurant_list_link.get("href") self._parseAllRestaurantsInCityPage(pool, region_name, city_name, restaurant_list_href)
def makeNgrokAddressesCall(): from startServer import app with app.test_request_context(): try: res = pyrequests.post(DataManager.getNgrokAddressesEndpoint(), headers=jsonHeaders, data=DataManager.getNgrokAddressesAsJson(), timeout=10) if res.status_code != 200: if res.status_code == 404: # Hotfix!! return True LedChanger.lightErrorLedOn() return False return True except Exception as e: LedChanger.lightErrorLedOn() utils.printException(e) return False
def _init_client(self, **kwargs): retries = 5 while True: try: self._client = pyes.ES(**kwargs) self._client.collect_info() utils.log("[%s] pyes: %s" % (self, pformat(self._client.info))) self.update() break except Exception: retries -= 1 if retries <= 0: raise utils.printException() time.sleep(1)
def _parseResultsPage(self, queue, url, name, depth): try: soup = utils.getSoup(url) except: utils.printException() utils.log("[%s] error downloading page %s (%s)" % (self, name, url)) return if depth < self.max_depth: # extract and parse subcategory pages category_ul = soup.find('ul', {'id' : 'zg_browseRoot'}) if category_ul is not None: while True: temp_ul = category_ul.find('ul') if temp_ul is None: break else: category_ul = temp_ul categories = category_ul.findAll('a') for category in categories: href = category.get('href') name = utils.normalize(category.getText()) queue.put_nowait((href, name, depth + 1)) self._globals['books'] = soup rss_link = soup.find('div', {'id' : 'zg_rssLinks'}) if rss_link is None: return rss_link = rss_link.findAll('a')[1].get('href') if rss_link in self.seen: return self.seen.add(rss_link) entity = Entity() entity.title = rss_link entity.subcategory = 'book' self._output.put(entity)
def _parse_entry(entities, entry): try: # We skip pre-orders because we can't actually look them up by ID. This is actually a pretty fatal # error because right now we don't even use the data in the feed itself; we immediately re-crawl based # on the iTunes ID. Anyway, there doesn't seem to be any big advantage to having preorders, so this is # hardly the most pressing problem. if "id" in entry and "label" in entry["id"]: if "/preorder/" in entry["id"]["label"]: logs.info("Skipping preorder!") return else: logs.info("WARNING: Missing id.label!") entity = self._parse_entity(entry) if entity is not None: entities.append(entity) except: utils.printException()
def add_artist(entity, appleAPI, sink, pool, all_artists, all_albums, all_songs): assert entity.subcategory == 'artist' if int(entity.aid) in all_artists: return all_artists.add(int(entity.aid)) utils.log("adding artist %s" % entity.title) results = appleAPI.lookup(id=entity.aid, media='music', entity='album', limit=200, transform=True) results = filter(lambda r: r.entity.subcategory == 'album', results) for result in results: add_album(result.entity, appleAPI, sink, pool, all_artists, all_albums, all_songs) if len(results) > 0: albums = [] for result in results: schema = ArtistAlbumsSchema() schema.album_name = result.entity.title schema.album_id = result.entity.aid albums.append(schema) entity.albums = albums images = results[0].entity.images for k in images: entity[k] = images[k] results = appleAPI.lookup(id=entity.aid, media='music', entity='song', limit=200, transform=True) results = filter(lambda r: r.entity.subcategory == 'song', results) #for result in results: # add_song(result.entity, appleAPI, sink, pool, all_artists, all_albums, all_songs) songs = [] for result in results: schema = ArtistSongsSchema() schema.song_id = result.entity.aid schema.song_name = result.entity.title songs.append(schema) entity.songs = songs try: sink._processItem(entity) except Exception, e: utils.printException() pprint(entity)
def makePhoto(): LedChanger.lightPhotoLedOn() _imagePath = None global __camera if __camera is None: __camera = picamera.PiCamera() try: current_time = datetime.datetime.now() import DataManager from DataManager import deviceName, photoDir DataManager.createPhotoDirIfNotExists() _imagePath = str(photoDir) + str(deviceName) + "_" + str( current_time) + '.jpeg' __camera.capture(_imagePath, use_video_port=True) except PiCameraError as e: LedChanger.lightErrorLedOn() utils.printException(e) LedChanger.lightPhotoLedOff() return _imagePath
def get_prod_stacks(): if not is_ec2(): return None path = os.path.join(os.path.dirname(os.path.abspath(__file__)), ".prod_stack.txt") if os.path.exists(path): try: f = open(path, "r") names = set(json.loads(f.read())) f.close() if len(names) > 1: return names except: utils.log("error getting cached prod stack info; recomputing") utils.printException() conn = Route53Connection(keys.aws.AWS_ACCESS_KEY_ID, keys.aws.AWS_SECRET_KEY) zones = conn.get_all_hosted_zones() name = None host = None for zone in zones["ListHostedZonesResponse"]["HostedZones"]: if zone["Name"] == u"stamped.com.": host = zone break names = set() if host is not None: records = conn.get_all_rrsets(host["Id"][12:]) for record in records: if record.name == "api.stamped.com." or record.name == "api1.stamped.com.": names.add(record.alias_dns_name.split("-")[0].strip()) if len(names) > 0: f = open(path, "w") f.write(json.dumps(list(names))) f.close() return names
def draw(self): canvas = self.canv canvas.saveState() canvas.setFillColor(colors.HexColor(0xFFFFFF)) self.backdrop.drawOn(canvas, 185, 0) if self.profile_image is not None and os.path.exists(self.profile_image): try: Image(self.profile_image, 132, 132).drawOn(canvas, 194, 9) except Exception: utils.printException() if self.logo_image is not None and os.path.exists(self.logo_image): try: Image(self.logo_image).drawOn(canvas, 250, 100) except Exception: utils.printException() canvas.restoreState()
def _add(tree): try: orig_name = tree.full() if 0 == len(orig_name): return name = encode_s3_name(orig_name) if 0 == len(name) or name in names: return names.add(name) out_name = orig_name.encode("ascii", "replace") if out_name == orig_name: out.write(orig_name + "\n") return except: utils.printException() time.sleep(1) return
def _sample(self, iterable, func, print_progress=True, progress_delta=5, max_retries=0, retry_delay=0.05): progress_count = 100 / progress_delta ratio = self.options.sampleSetRatio count = 0 index = 0 try: count = len(iterable) except: try: count = iterable.count() except: count = utils.count(iterable) for obj in iterable: if print_progress and (count < progress_count or 0 == (index % (count / progress_count))): utils.log("%s : %s" % (self.__class__.__name__, utils.getStatusStr(index, count))) if random.random() < ratio: noop = self.options.noop retries = 0 while True: try: self.options.noop = (retries < max_retries) or noop func(obj) break except Exception, e: utils.printException() retries += 1 if noop or retries > max_retries: prefix = "ERROR" if noop else "UNRESOLVABLE ERROR" utils.log("%s: %s" % (prefix, str(e))) break time.sleep(retry_delay) retry_delay *= 2 finally: self.options.noop = noop
def validate(self, value): if value is None: if self.required: raise InvalidArgument("invalid value for arg %s" % str(value)) else: return value if not isinstance(value, dict): raise InvalidArgument("invalid value for arg %s" % str(value)) output = AttributeDict() # validate resource arguments for arg in value: if arg not in self: raise InvalidArgument("Unexpected argument %s" % (arg, )) elif arg in output: raise InvalidArgument("Duplicate argument %s" % (arg, )) else: try: resourceArg = self[arg] sub_value = value[arg] #print "%s) %s" % (type(resourceArg), sub_value) sub_value = resourceArg.validate(sub_value) output[arg] = sub_value #utils.log("added '%s'='%s' to resource '%s'" % (arg, str(sub_value), str(self))) except InvalidArgument: utils.log("Error initializing argument '%s'" % (arg, )) utils.printException() raise for key in self: if not key in output: if self[key].required: raise Fail("Required argument '%s' not found" % (key, )) output[key] = self[key].default return output
def _elasticsearch(self): try: import pyes except: utils.printException() es_port = 9200 retries = 5 if libs.ec2_utils.is_ec2(): stack = libs.ec2_utils.get_stack() if stack is None: logs.warn("error: unable to find stack info") return None es_servers = filter(lambda node: "search" in node.roles, stack.nodes) es_servers = map(lambda node: str("%s:%d" % (node.private_ip_address, es_port)), es_servers) if len(es_servers) == 0: logs.warn("error: no elasticsearch servers found") return None else: es_servers = "%s:%d" % ("localhost", es_port) while True: try: es = pyes.ES(es_servers) info = es.collect_info() utils.log("[%s] pyes: %s" % (self, pformat(info))) return es except Exception: retries -= 1 if retries <= 0: raise utils.printException() time.sleep(1)
def _create_instance(i): cur_conf = conf.copy() cur_conf["name"] = "%s%d" % (add, top + i) # TODO: this assumes nodes were previously evenly distributed # instead, calculate minimal placement each iteration placement = placements[i % len(placements)][0] cur_conf["placement"] = placement # create and bootstrap the new instance utils.log( "[%s] creating instance %s in availability zone %s" % (self, cur_conf["name"], cur_conf["placement"]) ) instance = AWSInstance(self, cur_conf) try: instance.create() instances.append(instance) except Exception: utils.printException() utils.log("error adding instance %s" % instance) raise
def get_bootstrap_image(self): if hasattr(self, "_bootstrap_image") and self._bootstrap_image is not None: return self._bootstrap_image images = self.conn.get_all_images(owners=[AWS_AMI_USER_ID]) if 0 == len(images): utils.log("[%s] unable to find custom AMI to use" % self) recent = None # return the latest image (empirically the last one returned from amazon, # though as far as i can tell, there is no guarantee this is the latest) for i in xrange(len(images)): try: image = images[-(i + 1)] # stamped.base.ami (2011-12-7 22.47.9) if image.state == u"available": match = self._ami_re.match(image.name) if match is not None: groups = map(lambda s: int(s), match.groups()) date = datetime(*groups) if recent is None or date > recent[0]: recent = (date, image) elif image.state == u"pending": utils.log("[%s] warning: recent AMI %s still pending; falling back to earlier image" % self, image) else: utils.log("[%s] warning: found AMI %s with unexpected state (%s)" % self, image, image.state) except Exception: utils.printException() if recent is not None: self._bootstrap_image = recent[1] return recent[1] return None
def export_stamps(request, schema, **kwargs): login = schema.login try: if "@" in login: account = stampedAPIProxy.getAccountByEmail(login) else: account = stampedAPIProxy.getAccountByScreenName(login) except Exception: utils.printException() account = None if account is None: raise StampedInputError("invalid account") kwargs.setdefault('content_type', 'application/pdf') kwargs.setdefault('mimetype', 'application/pdf') user_id = account['user_id'] screen_name = account['screen_name'] logs.info("screen_name: %s" % screen_name) exporter = api.DataExporter.DataExporter(globalMongoStampedAPI()) tmpfile = '/tmp/%s.pdf' % user_id logs.info("tmpfile: %s" % tmpfile) with open(tmpfile, 'w') as fout: exporter.export_user_data(user_id, fout) logs.info("resulting tmpfile: %s" % tmpfile) f = open(tmpfile, "rb") response = HttpResponse(f, **kwargs) response['Content-Disposition'] = 'attachment; filename="%s_stamps.pdf"' % screen_name return response
def _eliminate_temporary(slots): simple = [] massive = [] tables = [] iterators = [] for info in slots: try: assignment = info.assignment if not isinstance(assignment, nodes.Assignment): assert isinstance(assignment, (nodes.IteratorWarp, nodes.NumericLoopWarp, nodes.FunctionDefinition)) src = info.references[1].identifier simple.append((info.references, src)) continue #zzy: may assert failed??? assert len(assignment.expressions.contents) == 1 is_massive = len(assignment.destinations.contents) > 1 if is_massive: _fill_massive_refs(info, simple, massive, iterators) else: _fill_simple_refs(info, simple, tables) except Exception as e: utils.printException(e) print("--_eliminate_temporary error") _eliminate_simple_cases(simple) _eliminate_into_table_constructors(tables) _eliminate_mass_assignments(massive) _eliminate_iterators(iterators)
def deleteFile(filepath): try: import os os.remove(filepath) except Exception as e: utils.printException(e)
def saveServerAddress(newServerUrl): try: with open(lastKnownServerAddressFileName, 'w') as file: file.write(newServerUrl) except Exception as e: utils.printException(e)
def __stopCameraMonitoringStreaming(self): try: self.__camera.stop_recording(splitter_port=2) except Exception as e: utils.printException(e) self.__streaming_stopped = True