def check_update(self, testing=False): ''' Check the list of updates to see if any of them require attention ''' sc = SC() self.current_version = sc.dict['Server']['update']['software_version'] update = self.get_update_info() update_required = False notify = False update_info = set() if self.check_new_update(update['version'], self.current_version) is True: update_required = True update_info.add(update['info']) if self.check_new_update(update['version'], self.current_update) is True: # update current update version in sc.conf json sc = SC() sc.dict['Server']['update']['update_version'] = update[ 'version'] if testing is not True: sc.save_dict() notify = True return update_required, notify, update_info
def __init__(self): sc = SC() self.json_url = sc.dict['Server']['update']['json_url'] self.current_version = sc.dict['Server']['update']['software_version'] self.current_update = sc.dict['Server']['update']['update_version'] self.admin_notified = sc.dict['Server']['update']['admin_notified'] self.sc_root_dir = root_dir()
def docker_init(): copy_backups() sc = SC() sc.dict['host'] = 'sc-server' sc.dict['web_port'] = 5000 sc.save_dict()
def build_pdf_html(shakemap, name=None, template_name='default', web=False, config=None): temp_manager = TemplateManager() template_name = (template_name or 'default').lower() if not config: config = temp_manager.get_configs('pdf', name=name, sub_dir=template_name) template = temp_manager.get_template('pdf', name=name, sub_dir=template_name) shakemap.sort_facility_shaking('weight') fac_details = shakemap.get_impact_summary() colors = { 'red': '#FF0000', 'orange': '#FFA500', 'yellow': '#FFFF00', 'green': '#50C878', 'gray': '#AAAAAA' } return template.render(shakemap=shakemap, facility_shaking=shakemap.facility_shaking, fac_details=fac_details, sc=SC(), config=config, web=web, colors=colors)
def can_process_event(event, scenario=False): # check if we should wait until daytime to process clock = Clock() sc = SC() if (clock.nighttime() is True) and (scenario is False): if event.magnitude < sc.night_eq_mag_cutoff: return False return True
def ci_init(): sc = SC() sc.dict['SMTP']['username'] = os.environ.get('SC_SMTP_USERNAME', '') sc.dict['SMTP']['password'] = os.environ.get('SC_SMTP_PASSWORD', '') sc.dict['SMTP']['server'] = os.environ.get('SC_SMTP_SERVER', '') sc.dict['SMTP']['from'] = os.environ.get('SC_SMTP_FROM', '') sc.dict['SMTP']['security'] = os.environ.get('SC_SMTP_SECURITY', '') sc.dict['SMTP']['port'] = int(os.environ.get('SC_SMTP_PORT', '')) sc.save_dict()
def notify_admin(self, update_info=None, testing=False): # notify admin admin_notified = False admin_notified = self.send_update_notification(update_info=update_info) if admin_notified is True: # record admin Notification sc = SC() sc.dict['Server']['update']['admin_notified'] = True if testing is not True: sc.save_dict()
def __init__(self): # get info from the config sc = SC() self.me = sc.dict['SMTP']['from'] or sc.dict['SMTP']['username'] self.username = sc.smtp_username self.password = sc.smtp_password self.server_name = sc.smtp_server self.server_port = int(sc.dict['SMTP']['port']) self.security = sc.dict['SMTP']['security'] self.log = '' self.notify = sc.dict['Notification']['notify']
def update_configs(new): """ Add new configurations, but keep users' changes intact. This will have the wrong version number, which will have to be overwritten later """ sc = SC() new_dict = json.loads(new) # map old configs on top of new to retain user settings merge_dicts(new_dict, sc.dict) sc.dict = new_dict sc.save_dict()
def get_event_map(event): image_loc = os.path.join(event.directory_name, 'image.png') if os.path.exists(image_loc) is False: sc = SC() # download the google maps image url_opener = URLOpener() gmap = url_opener.open( 'https://api.mapbox.com/styles/v1/mapbox/streets-v10/static/pin-s+F00(%s,%s)/%s,%s,5/200x200?access_token=%s' % (event.lon, event.lat, event.lon, event.lat, sc.map_key)) # and save it image = open(image_loc, 'wb') image.write(gmap) image.close()
def sc_config(new_configs={}): sc = SC() sc_config = json.loads(sc.json) for key, value in new_configs.iteritems(): if key in sc_config: if not isinstance(new_configs[key], dict): sc_config[key] = value else: for i_key, i_value in new_configs[key].iteritems(): sc_config[key][i_key] = i_value sc.json = json.dumps(sc_config) if sc.validate(sc.json) is True: sc.save(sc.json)
def build_insp_html(shakemap, name=None, web=False, config=None): temp_manager = TemplateManager() template_name = (name or 'default').lower() if not config: config = temp_manager.get_configs('inspection', name=template_name) template = temp_manager.get_template('inspection', name=template_name) shakemap.sort_facility_shaking('weight') fac_details = shakemap.get_impact_summary() return template.render(shakemap=shakemap, facility_shaking=shakemap.facility_shaking, fac_details=fac_details, sc=SC(), config=config, web=web)
def db_migration(engine): from db_migrations import migrations from util import SC sc = SC() for migration in migrations: mig_version = int(migration.__name__.split('to')[1]) cur_version = sc.dict['Server']['update']['db_version'] if mig_version > cur_version: # run the migration engine = migration(engine) # update the configs sc.dict['Server']['update']['db_version'] = mig_version session_maker = sessionmaker(bind=engine) Session = scoped_session(session_maker) sc.save_dict() return engine, Session
def build_new_event_html(events=None, notification=None, group=None, name=None, web=False, config=None): temp_manager = TemplateManager() template_name = (name or 'default').lower() if not config: config = temp_manager.get_configs('new_event', name=template_name) template = temp_manager.get_template('new_event', name=template_name) return template.render(events=events, group=group, notification=notification, sc=SC(), config=config, web=web)
def update(self, testing=False): update = self.get_update_info() version = self.current_version sc = SC() delim = os.sep failed = [] success = [] # concatinate files if user is multiple updates behind files = update.get('files', []) for file_ in files: try: # download file url_opener = URLOpener() text_file = url_opener.open(file_['url']) # get the full path to the file file_path = delim.join([root_dir()] + file_['path'].split('/')) norm_file_path = os.path.normpath(file_path) # open the file if 'sc.json' not in file_['path']: # normal text file file_to_update = open(norm_file_path, 'w') file_to_update.write(text_file) file_to_update.close() else: # json configs require special update self.update_configs(text_file) if self.check_new_update(file_['version'], version): version = file_['version'] success += [file_] except Exception: failed += [file_] sc.dict['Server']['update']['software_version'] = version if testing is not True: sc.save_dict() return success, failed
def __init__(self, req_products=None, data_dir=''): sc = SC() self.req_products = req_products self.pref_products = [] self.server_address = '' self.json_feed_url = sc.geo_json_web self.ignore_nets = sc.ignore_nets.split(',') self.json_feed = '' self.earthquakes = {} self.data_dir = '' self.delim = '' self.log = '' self.query_period = 'day' if not self.req_products: self.req_products = sc.eq_req_products if not self.pref_products: self.pref_products = sc.dict['Services']['eq_pref_products'] if data_dir == '': self.get_data_path()
def db_init(): # SETUP DATABASE sc = SC() # name the database, but switch to a test database if run from test.py db_name = sc.dict['DBConnection']['database'] + '.db' testing = False insp = inspect_mod.stack() if 'tests' in str(insp): db_name = 'test.db' testing = True if sc.dict['DBConnection']['type'] == 'sqlite' or testing is True: engine = create_engine('sqlite:///%s' % os.path.join(get_db_dir(), db_name)) elif sc.dict['DBConnection']['type'] == 'mysql': try: db_str = 'mysql://{}:{}@{}/{}'.format( sc.dict['DBConnection']['username'], sc.dict['DBConnection']['password'], sc.dict['DBConnection']['server'], sc.dict['DBConnection']['database']) except Exception: # db doesn't exist yet, let's create it server_str = 'mysql://{}:{}@{}'.format( sc.dict['DBConnection']['username'], sc.dict['DBConnection']['password'], sc.dict['DBConnection']['server']) engine = create_engine(server_str) engine.execute('CREATE DATABASE {}'.format( sc.dict['DBConnection']['database'])) finally: # try to get that connection going again engine = create_engine(db_str, pool_recycle=3600) engine.execute('USE {}'.format( sc.dict['DBConnection']['database'])) # if we're testing, we want to drop all existing database info to test # from scratch if testing is True: metadata.drop_all(engine) # create database schema that doesn't exist try: metadata.create_all(engine, checkfirst=True) except Exception: # another service might be initializing the db, # wait a sec for it to be done occurs during # docker init time.sleep(5) metadata.create_all(engine, checkfirst=True) engine, Session = db_migration(engine) # create scadmin if there are no other users session = Session() us = session.query(User).filter(User.user_type.like('admin')).all() if not us: u = User() u.username = '******' u.password = generate_password_hash('scadmin', method='pbkdf2:sha512') u.user_type = 'ADMIN' u.updated = time.time() u.updated_by = 'shakecast' session.add(u) session.commit() Session.remove() return engine, Session
def get_new_events(self, session=None, scenario=False): """ Checks the json feed for new earthquakes """ sc = SC() event_str = '' new_events = [] for eq_id in self.earthquakes.keys(): eq = self.earthquakes[eq_id] # ignore info from unfavorable networks and low mag eqs if (eq['properties']['net'] in self.ignore_nets or eq['properties']['mag'] < sc.new_eq_mag_cutoff): continue # get event id and all ids event = Event() event.all_event_ids = eq['properties']['ids'] if scenario is False: event.event_id = eq_id else: event.event_id = eq_id + '_scenario' event.all_event_ids = event.event_id # use id and all ids to determine if the event is new and # query the old event if necessary old_shakemaps = [] old_notifications = [] if event.is_new() is False: event.status = 'processed' ids = event.all_event_ids.strip(',').split(',') old_events = [(session.query(Event).filter( Event.event_id == each_id).first()) for each_id in ids] # remove older events for old_event in old_events: if old_event is not None: old_notifications += old_event.notifications old_shakemaps += old_event.shakemaps # if one of these old events hasn't had # notifications sent, this event should be sent if old_event.status == 'new': event.status = 'new' session.delete(old_event) else: event.status = 'new' # Fill the rest of the event info event.title = self.earthquakes[eq_id]['properties']['title'] event.place = self.earthquakes[eq_id]['properties']['place'] event.time = self.earthquakes[eq_id]['properties']['time'] / 1000.0 event.magnitude = eq['properties']['mag'] event_coords = self.earthquakes[eq_id]['geometry']['coordinates'] event.lon = event_coords[0] event.lat = event_coords[1] event.depth = event_coords[2] event.type = 'scenario' if scenario is True else 'event' # determine whether or not an event should be kept # based on group definitions. Should always be true for scenario runs keep_event = scenario groups = session.query(Group).all() if len(groups) > 0: for group in groups: if group.point_inside(event): keep_event = True else: keep_event = True if keep_event is False: continue if old_shakemaps: event.shakemaps = old_shakemaps if old_notifications: event.notifications = old_notifications session.add(event) session.commit() self.get_event_map(event) # add the event to the return list and add info to the # return string new_events += [event] event_str += 'Event: %s\n' % event.event_id # print event_str return new_events, event_str
# name the database, but switch to a test database if run from test.py db_name = 'shakecast.db' testing = False insp = inspect_mod.stack() for stack in insp: for entry in stack: if 'test.py' in str(entry): db_name = 'test.db' testing = True # logging from DB #logging.basicConfig(level=logging.DEBUG) #logging.getLogger('sqlalchemy.engine.base').setLevel(logging.DEBUG) # SETUP DATABASE sc = SC() if sc.dict['DBConnection']['type'] == 'sqlite' or testing is True: engine = create_engine('sqlite:///%s' % os.path.join(directory, db_name)) elif sc.dict['DBConnection']['type'] == 'mysql': try: db_str = 'mysql://{}:{}@{}/pycast'.format( sc.dict['DBConnection']['username'], sc.dict['DBConnection']['password'], sc.dict['DBConnection']['server']) engine = create_engine(db_str) engine.execute('USE pycast') except Exception: # db doesn't exist yet, let's create it server_str = 'mysql://{}:{}@{}'.format( sc.dict['DBConnection']['username'], sc.dict['DBConnection']['password'],