def activation_key_expired(self): """ Determine whether this ``RegistrationProfile``'s activation key has expired, returning a boolean -- ``True`` if the key has expired. Key expiration is determined by a two-step process: 1. If the user has already activated, the key will have been reset to the string constant ``ACTIVATED``. Re-activating is not permitted, and so this method returns ``True`` in this case. 2. Otherwise, the date the user signed up is incremented by the number of days specified in the setting ``REGISTRATION_API_ACCOUNT_ACTIVATION_DAYS`` (which should be the number of days after signup during which a user is allowed to activate their account); if the result is less than or equal to the current date, the key has expired and this method returns ``True``. """ # utils imported here to avoid circular import import utils expiration_date = datetime.timedelta( days=utils.get_settings('REGISTRATION_API_ACCOUNT_ACTIVATION_DAYS')) return self.activation_key == self.ACTIVATED or \ (getattr(self.user, utils.get_settings('REGISTRATION_API_USER_REGISTER_DATE_FIELD')) + expiration_date <= datetime_now())
def download(): info_path = os.path.join(os.getcwd(), 'data/info.jsonlines') if not os.path.isfile(info_path): print('Use `update` command first.') return save_dir = os.path.join(os.getcwd(), 'data/files/') if not os.path.isdir(save_dir): os.mkdir(save_dir) download_urls = [] with open(info_path, 'rt') as file: for line in file: article_info = json.loads(line) file_name = article_info['file_name'] download_url = article_info['download_url'] if not file_name or not download_url or os.path.isfile( os.path.join(save_dir, file_name)): continue download_urls.append({ 'url': article_info['download_url'], 'file_name': file_name }) print('Total files: %d' % len(download_urls)) cp = crawler.CrawlerProcess(get_settings()) cr = crawler.Crawler(DownloadSpider, get_settings()) cr.signals.connect(downloaded, signal=signals.file_downloaded) cp.crawl(cr, save_dir=save_dir, download_list=download_urls) cp.start()
def settings(): errors = None admin_settings = get_settings(g.db, admin_id=current_user.admin_id) or Setting() form = SettingForm(request.form) if request.method == 'POST': if form.validate(): already_completed = finished_setting(admin_settings) form.populate_obj(admin_settings) admin_settings.admin_id = current_user.admin_id g.db.session.add(admin_settings) g.db.session.commit() url = 'hunts' if already_completed else 'new_hunt' flash('Settings have been updated successfully', 'success') return make_response(redirect(url_for(url))) else: logger.info( '%s attempted to submit settings information' ' resulting in errors: %s', current_user.email, form.errors) return make_response( render_template('settings.html', login=admin_settings.login, form=form, password=admin_settings.password, wax_site=admin_settings.wax_site))
def __init__(self, parent=None): QMainWindow.__init__(self, parent=parent) self.setWindowTitle("Paparazzi Center") icon = QtGui.QIcon(os.path.join(utils.PAPARAZZI_HOME, "data", "pictures", "penguin_logo.svg")) self.setWindowIcon(icon) self.addMenu() self.tabwidget = QTabWidget(parent=self) self.setCentralWidget(self.tabwidget) self.configuration_panel = ConfigurationPanel(self.tabwidget) self.operation_panel = OperationPanel(self.tabwidget) self.tabwidget.addTab(self.configuration_panel, "Configuration") self.tabwidget.addTab(self.operation_panel, "Operation") self.status_msg = QLabel() self.statusBar().addWidget(self.status_msg) self.fill_status_bar() self.statusBar().show() self.configuration_panel.msg_error.connect(self.handle_error) self.configuration_panel.clear_error.connect(self.clear_error) self.operation_panel.session.program_spawned.connect(self.configuration_panel.disable_sets) self.operation_panel.session.programs_all_stopped.connect(self.configuration_panel.enable_sets) self.configuration_panel.ac_changed.connect(self.operation_panel.session.set_aircraft) self.configuration_panel.splitter.splitterMoved.connect(self.update_left_pane_width) settings = utils.get_settings() window_size = settings.value("ui/window_size", QtCore.QSize(1000, 600), QtCore.QSize) self.resize(window_size) self.configuration_panel.init() self.operation_panel.session.init()
def index_items(hunt_id): hunt = Hunt.find_by_id(g.db, hunt_id) if hunt: email = session.get('email') if email: admin_settings = get_settings(g.db, hunt_id=hunt_id) lrs = WaxCommunicator( admin_settings, request.host_url, hunt, None, {'email': email, 'name': session.get('name')}) state = lrs.get_state() logger.info( 'preparing to render items from hunt_id, %s, for user, %s', hunt_id, email) return make_response(render_template( 'items.html', state=state, hunt=hunt, num_remaining=num_items_remaining(state, hunt.items))) session['intended_url'] = '/hunts/{}/items'.format(hunt_id) return make_response( render_template('welcome.html', hunt=hunt, welcome=hunt.welcome_message, action_url="/get_started/hunts/{}".format( hunt_id))) logger.info('Someone attempted to visit the items list for hunt with id, ' '%s, but this hunt does not exist', hunt_id) abort(404)
def init_all_words(cls): wb = load_workbook(CONF["pathOfExcel"]) obj = cls(wb) settings = get_settings() for sheet in wb: if sheet.title in settings.partly_dict and settings.partly_dict[ sheet.title]["flag"]: if isinstance(settings.partly_dict[sheet.title], list): for d in settings.partly_dict[sheet.title]: row, col = 0, parse_letter_to_num(d["start"]) end = parse_letter_to_num(d["end"]) PartlyWordGenerator.fill(sheet, row, col, end, obj) else: row, col = 0, parse_letter_to_num( settings.partly_dict[sheet.title]["start"]) end = parse_letter_to_num( settings.partly_dict[sheet.title]["end"]) PartlyWordGenerator.fill(sheet, row, col, end, obj) # while True: # row, col = set_row_and_col(row, col) # if col > end: # break # cell = sheet.cell(row, col) # obj.append(cell) return obj
def __init__(self) -> None: """Initialization""" self.return_code = -1 self.state = self.State(self.State.START.value + 1) self.new_user = dict( username="******", password="******", full_name="John Doe", role=AuthenticationMessages.Role.OPERATOR.value, updated_full_name="John J. Doe", updated_password="******", updated_role=AuthenticationMessages.Role.ADMIN.value, ) self.settings = get_settings() self.logger = setup_log("AuthenticationExample", self.settings.log_level) self.client = Connections( hostname=self.settings.hostname, logger=self.logger, authentication_on_open=self.authentication_on_open, authentication_on_message=self.authentication_on_message, authentication_on_error=self.authentication_on_error, authentication_on_close=self.authentication_on_close, ) self.authentication = AuthenticationMessages( self.logger, self.settings.protocol_version)
def parse_mapper(self, _, rast_s3key): """ Given a line containing a s3 keyname of a raster, download the mentioned file and split it into pixels in the format: point_wkt, {'val': <val>, 'date': <date>} (where the point_wkt is the centroid of the pixel) """ job = os.environ.get('LT_JOB') rast_fn = utils.rast_dl(rast_s3key) mask_key = rast_s3key.replace(s.RAST_TRIGGER, s.MASK_TRIGGER) try: mask_fn = utils.rast_dl(mask_key) except Exception: mask_fn = None # don't worry about mask # calculate index index_eqn = utils.get_settings(job)['index_eqn'] index_rast = utils.rast_algebra(rast_fn, index_eqn) # figure out date from filename datestring = utils.filename2date(rast_fn) # pull down grid grid_fn = utils.get_file(s.OUT_GRID % job) print 'Serializing %s...' % os.path.basename(rast_fn) pix_generator = utils.apply_grid(index_rast, grid_fn, {'date': datestring}, mask_fn=mask_fn) for point_wkt, pix_data in pix_generator: yield point_wkt, pix_data
def __init__(self) -> None: """Initialization""" self.return_code = -1 self.state = self.State(self.State.START.value + 1) self.authentication_thread = None self.metadata_thread = None self.realtime_situation_thread = None self.total_node_count = 0 self.loaded_node_count = 0 self.settings = get_settings() self.logger = setup_log("ComponentsInformationExample", self.settings.log_level) self.client = Connections( hostname=self.settings.hostname, logger=self.logger, authentication_on_open=self.authentication_on_open, authentication_on_message=self.authentication_on_message, authentication_on_error=self.authentication_on_error, authentication_on_close=self.authentication_on_close, metadata_on_open=self.metadata_on_open, metadata_on_message=self.metadata_on_message, metadata_on_error=self.metadata_on_error, metadata_on_close=self.metadata_on_close, realtime_situation_on_open=self.realtime_situation_on_open, realtime_situation_on_message=self.realtime_situation_on_message, realtime_situation_on_error=self.realtime_situation_on_error, realtime_situation_on_close=self.realtime_situation_on_close, ) self.messages = Messages(self.logger, self.settings.protocol_version)
def settings(): errors = None admin_settings = get_settings( g.db, admin_id=current_user.admin_id) or Setting() form = SettingForm(request.form) if request.method == 'POST': if form.validate(): already_completed = finished_setting(admin_settings) form.populate_obj(admin_settings) admin_settings.admin_id = current_user.admin_id g.db.session.add(admin_settings) g.db.session.commit() url = 'hunts' if already_completed else 'new_hunt' flash('Settings have been updated successfully', 'success') return make_response(redirect(url_for(url))) else: logger.info( '%s attempted to submit settings information' ' resulting in errors: %s', current_user.email, form.errors) return make_response(render_template( 'settings.html', login=admin_settings.login, form=form, password=admin_settings.password, wax_site=admin_settings.wax_site ))
def processor(bot, update): user = update.message.from_user text = update.message.text action, params = db_user.get_action(user.id) settings = utils.get_settings() db_user.update(user) # Check if it is group chat if update.message.chat.id < 0: if text.startswith('/'): log_message( update.message.reply_text(utils.get_constant('not_chat'))) return # logging logger.info('{} {} ({}:@{}): {}'.format(user.first_name, user.last_name, user.id, user.username, text)) # Constant behavior for button in settings['constant_behavior']: if utils.get_constant(button) == text: behave(settings['constant_behavior'][button], bot, update) return # Checking menu if action in settings: for button in settings[action]['keyboard']: if get_button_text(button) == text: behave(button, bot, update) else: action_manager(bot, update, action)
def to_database(abbreviations, purge, safe): """ Sync YAML files to DB. """ init_django() if not abbreviations: abbreviations = get_all_abbreviations() settings = get_settings() for abbr in abbreviations: click.secho('==== {} ===='.format(abbr), bold=True) directory = get_data_dir(abbr) jurisdiction_id = get_jurisdiction_id(abbr) person_files = (glob.glob(os.path.join(directory, 'people/*.yml')) + glob.glob(os.path.join(directory, 'retired/*.yml'))) committee_files = glob.glob(os.path.join(directory, 'organizations/*.yml')) if safe: click.secho('running in safe mode, no changes will be made', fg='magenta') state_settings = settings[abbr] try: with transaction.atomic(): create_posts(jurisdiction_id, state_settings) load_directory(person_files, 'person', jurisdiction_id, purge=purge) load_directory(committee_files, 'organization', jurisdiction_id, purge=purge) if safe: click.secho('ran in safe mode, no changes were made', fg='magenta') raise CancelTransaction() except CancelTransaction: pass
def settings(): "Get the settings from file 'settings.json' in this directory." result = utils.get_settings(BASE_URL="http://localhost:5001", USER_USERNAME=None, USER_APIKEY=None) # Set up requests session with API key. result["session"] = session = requests.Session() session.headers.update({"x-apikey": result["USER_APIKEY"]}) # Get the schema. response = session.get(f"{result['BASE_URL']}/api/schema/root") assert response.status_code == http.client.OK result["root_schema"] = response.json() response = session.get(f"{result['BASE_URL']}/api/schema/dbs") assert response.status_code == http.client.OK result["dbs_schema"] = response.json() response = session.get(f"{result['BASE_URL']}/api/schema/db") assert response.status_code == http.client.OK result["db_schema"] = response.json() response = session.get(f"{result['BASE_URL']}/api/schema/table") assert response.status_code == http.client.OK result["table_schema"] = response.json() response = session.get(f"{result['BASE_URL']}/api/schema/rows") assert response.status_code == http.client.OK result["rows_schema"] = response.json() response = session.get(f"{result['BASE_URL']}/api/schema/view") assert response.status_code == http.client.OK result["view_schema"] = response.json() response = session.get(f"{result['BASE_URL']}/api/schema/view/create") assert response.status_code == http.client.OK result["view_create_schema"] = response.json() yield result result["session"].close()
def call_train_and_predict(settings_file, verbose=False): settings = utils.get_settings(settings_file) null = open(os.devnull, 'w') train_retcode = subprocess.call(['./train.py', '-s', settings_file], stdout=null, stderr=null) # Raise a warning if it was non-zero and return if train_retcode != 0: warnings.warn("train.py -s {0} did not complete successfully".format( settings_file)) return None # Start ./predict proc predict_retcode = subprocess.call(['./predict.py', '-s', settings_file], stdout=null, stderr=null) # Raise warning if predict failed and return if predict_retcode != 0: warnings.warn("predict.py -s {0} did not complete successfully".format( settings_file)) return None return None null.close() out_file.close()
def user_network(): """ Start iteration process for load data to user_network """ log = get_logger() with get_connect() as con, get_connect_iptable() as con_ip: cur = con.cursor() cur_ip = con_ip.cursor() # get settings settings = get_settings(cur) load_date_start = datetime.strptime(settings['LOAD_DATE_START'], '%Y.%m.%d %H:%M:%S.%f') load_pause = int(settings['LOAD_PAUSE']) load_pause_empty = int(settings['LOAD_PAUSE_EMPTY']) cnt_rows = int(settings['LOAD_ROWS']) while True: date = user_network_load(cur_ip, cur, load_date_start, cnt_rows) log.info("Load data. load_date_start: %s", date) # save date between iterations if date > load_date_start: sql = "update load_settings set value=%s where param=%s" date_str = datetime.strftime(date, '%Y.%m.%d %H:%M:%S.%f') cur.execute(sql, [date_str, 'LOAD_DATE_START']) else: # sleep if not new data log.debug("No new data sleep( %s )", load_pause_empty) time.sleep(load_pause_empty) if load_pause: log.debug("sleep between loads sleep( %s )", load_pause) time.sleep(load_pause) load_date_start = date
def __init__(self) -> None: """Initialization""" self.return_code = -1 self.state = self.State(self.State.START.value + 1) self.authentication_thread = None self.metadata_thread = None self.settings = get_settings() self.logger = setup_log("BuildingExample", self.settings.log_level) self.client = Connections( hostname=self.settings.hostname, logger=self.logger, authentication_on_open=self.authentication_on_open, authentication_on_message=self.authentication_on_message, authentication_on_error=self.authentication_on_error, authentication_on_close=self.authentication_on_close, metadata_on_open=self.metadata_on_open, metadata_on_message=self.metadata_on_message, metadata_on_error=self.metadata_on_error, metadata_on_close=self.metadata_on_close, ) self.messages = BuildingMessages(self.logger, self.settings.protocol_version)
def parse_mapper(self, _, rast_s3key): """ Given a line containing a s3 keyname of a raster, download the mentioned file and split it into pixels in the format: point_wkt, {'val': <val>, 'date': <date>} (where the point_wkt is the centroid of the pixel) """ job = os.environ.get('LT_JOB') rast_fn = utils.rast_dl(rast_s3key) mask_key = rast_s3key.replace(s.RAST_TRIGGER, s.MASK_TRIGGER) try: mask_fn = utils.rast_dl(mask_key) except Exception: mask_fn = None # don't worry about mask # calculate index index_eqn = utils.get_settings(job)['index_eqn'] index_rast = utils.rast_algebra(rast_fn, index_eqn) # figure out date from filename datestring = utils.filename2date(rast_fn) # pull down grid grid_fn = utils.get_file(s.OUT_GRID % job) print 'Serializing %s...' % os.path.basename(rast_fn) pix_generator = utils.apply_grid( index_rast, grid_fn, {'date': datestring}, mask_fn=mask_fn) for point_wkt, pix_data in pix_generator: yield point_wkt, pix_data
def get_256_colors(container="gateone"): """ Returns the rendered 256-color CSS. If *container* is provided it will be used as the ``{{container}}`` variable when rendering the template ( defaults to "gateone"). """ terminal_app_path = os.path.join(GATEONE_DIR, 'applications', 'terminal') colors_json_path = os.path.join(terminal_app_path, '256colors.json') # Using get_settings() as a cool hack to get the color data as a nice dict: color_map = get_settings(colors_json_path, add_default=False) # Setup our 256-color support CSS: colors_256 = "" for i in xrange(256): i = str(i) fg = u"#%s span.✈fx%s {color: #%s;}" % ( container, i, color_map[i]) bg = u"#%s span.✈bx%s {background-color: #%s;} " % ( container, i, color_map[i]) fg_rev =( u"#%s span.✈reverse.fx%s {background-color: #%s; color: " u"inherit;}" % (container, i, color_map[i])) bg_rev =( u"#%s span.✈reverse.bx%s {color: #%s; background-color: " u"inherit;} " % (container, i, color_map[i])) colors_256 += "%s %s %s %s\n" % (fg, bg, fg_rev, bg_rev) return colors_256
def update(): print('Updating articles info. Please wait...') save_dir = os.path.join(os.getcwd(), 'data') if not os.path.isdir(save_dir): os.mkdir(save_dir) save_path = os.path.join(save_dir, 'info.jsonlines') if os.path.isfile(save_path): os.remove(save_path) settings = get_settings() settings.set( 'FEEDS', { 'file:///' + save_path: { 'format': 'jsonlines', 'overwrite': True, 'item_export_kwargs': { 'sort_keys': True } } }) cp = crawler.CrawlerProcess(settings) for spider_cls in spider_classes: c = crawler.Crawler(spider_cls, settings) c.signals.connect(item_scraped, scrapy_signals.item_scraped) cp.crawl(c) cp.start() print('Updating finished.')
def settings(): "Get the settings from the file 'settings.json' in this directory." result = utils.get_settings(BASE_URL="http://localhost:5002", ADMIN_USERNAME=None, ADMIN_PASSWORD=None) # Remove any trailing slash. result["BASE_URL"] = result["BASE_URL"].rstrip("/") return result
def __init__(self): self.dir_name = os.path.dirname(os.path.abspath((__file__))) self.filename = os.path.join(self.dir_name, 'settings.yaml') self.settings = get_settings(self.filename) self.url = self.settings['API']['URL'] self.apikey = self.settings['API']['APIKEY']
def handle_finished(self, result): if result: settings = utils.get_settings() text_editor = self.text_editor_edit.text() settings.setValue("text_editor", text_editor) terminal_emulator = self.terminal_emulator_edit.text() settings.setValue("terminal_emulator", terminal_emulator) keep_changes = self.keep_changes_checkbox.isChecked() settings.setValue("always_keep_changes", keep_changes)
def main(): args = arg_parser.parse_args() if args.gui: root = tk.Tk() root.wm_title("Inara Updater") settings = utils.get_settings() app = gui.UpdateWindow(root, settings) root.minsize(250, 150) root.mainloop() else: settings = utils.get_settings() if settings is None: util.update_settings(_settings_prompt_cli, settings) inara_session = actions.do_logins(settings) actions.update_inara(inara_session) print("Inara updated!")
def __init__(self): super(PageContent, self).__init__() PageSignal.changed.connect(self.on_change_page) self.settings = get_settings() self.layout = QVBoxLayout() self.layout.setMargin(0) self.layout.addWidget(HomeFeed()) self.setLayout(self.layout)
def init(self): self.sessions = parse_sessions() self.tools = parse_tools() self.init_tools_menu() sessions_names = [session.name for session in self.sessions] self.sessions_combo.addItems(sessions_names) last_session = utils.get_settings().value("ui/last_session", None, str) if last_session is not None: self.sessions_combo.setCurrentText(last_session)
def main(): global args # Training settings parser = utils.get_settings() args = parser.parse_args() args.cuda = not args.no_cuda and torch.cuda.is_available() torch.manual_seed(1337) if args.cuda: torch.cuda.manual_seed(1337) device = torch.device("cuda" if args.cuda else "cpu") cuda = False if args.cuda: cuda = True else: cuda = False # datasets number_of_output_classes, training_dataset, testing_dataset = utils.load_dataset( args) # architecture size A, B, C, D = 64, 8, 16, 16 # A, B, C, D = 32, 32, 32, 32 model = capsules(A=A, B=B, C=C, D=D, E=number_of_output_classes, iters=args.em_iters, cuda=cuda).to(device) criterion = SpreadLoss(number_of_output_classes=number_of_output_classes, m_min=0.2, m_max=0.9, cuda=cuda) optimizer = optim.Adam(model.parameters(), lr=0.01) scheduler = optim.lr_scheduler.ReduceLROnPlateau(optimizer, 'max', patience=1) best_accuracy = test(testing_dataset, model, criterion, device) for epoch in range(1, args.epochs + 1): accuracy = train(training_dataset, model, criterion, optimizer, epoch, device) accuracy /= len(training_dataset) scheduler.step(accuracy) best_accuracy = max(best_accuracy, test(testing_dataset, model, criterion, device)) best_accuracy = max(best_accuracy, test(testing_dataset, model, criterion, device)) print('best test accuracy: {:.6f}'.format(best_accuracy)) utils.save_model(model, args)
def additional_attributes(user, settings_dir=None): """ Given a *user* dict, return a dict containing any additional attributes defined in Gate One's attribute repositories. """ # Doesn't do anything yet if not settings_dir: settings_dir = os.path.join(GATEONE_DIR, 'settings') settings = get_settings(settings_dir) return user
def load(self): if not os.path.exists(SETTINGS_FILE): default_settings = { 'file_storage_location': '{}/Downloads'.format(os.path.expanduser('~')), } self.save(default_settings) return default_settings return get_settings()
def __init__(self, parent=None): QWidget.__init__(self, parent=parent) self.setupUi(self) settings = utils.get_settings() self.text_editor_edit.setText(settings.value("text_editor", "", str)) self.terminal_emulator_edit.setText( settings.value("terminal_emulator", "", str)) self.keep_changes_checkbox.setChecked( settings.value("always_keep_changes", False, bool)) self.finished.connect(self.handle_finished)
def init_all_words(cls): wb = load_workbook(CONF["pathOfExcel"]) word_db = cls(wb) settings = get_settings() for sheet in wb: if sheet.title in settings.sheets and settings.sheets[sheet.title]: for col in range(1, sheet.max_column + 1, 2): for row in range(1, sheet.max_row + 1): word_db.append(sheet.cell(row, col)) return word_db
def activate(request, activation_key=None): """ Given an an activation key, look up and activate the user account corresponding to that key (if possible). """ utils.activate_user(activation_key) # if not activated success_url = utils.get_settings('REGISTRATION_API_ACTIVATION_SUCCESS_URL') if success_url is not None: return HttpResponseRedirect(success_url)
def set_token_auth(req): settings = get_settings() if settings.get('token_auth'): auth_token = settings['token_auth'].get('token', '') if not auth_token and settings['token_auth'].get('url'): try: auth_token = request_token() except Exception as e: log.error(e) header = settings['token_auth'].get('header') req.add_header(header, auth_token)
def generic_menu(menu_name, update: Update): settings = utils.get_settings() if menu_name in settings: menu = settings[menu_name] buttons = [] for button in menu['keyboard']: buttons.append(get_button_text(button)) buttons = ReplyKeyboardMarkup(utils.generate_markup(buttons)) log_message( update.effective_message.reply_text(menu['text'], reply_markup=buttons)) db_user.set_action(user_id=update.effective_user.id, action=menu_name)
def __init__(self) -> None: """Initialization""" self.return_code = -1 self.state = self.State(self.State.START.value + 1) # Network id needs to point to a valid network with at least one sink # online self.network_id = 777555 self.sink_ids = None # Uncomment the line below for setting appconfig for specific sinks only # self.sink_ids = [1, 101] # When running more than once for the same network, the diagnostics interval # or application data needs to changed as WNT server will not try to set the # application configuration if it is already the same. self.diagnostics_interval = 60 self.application_data = "00112233445566778899AABBCCDDEEFF" self.is_override_on = False self.authentication_thread = None self.metadata_thread = None self.realtime_situation_thread = None self.total_node_count = 0 self.loaded_node_count = 0 self.settings = get_settings() self.logger = setup_log( "ApplicationConfigurationExample", self.settings.log_level ) self.client = Connections( hostname=self.settings.hostname, logger=self.logger, authentication_on_open=self.authentication_on_open, authentication_on_message=self.authentication_on_message, authentication_on_error=self.authentication_on_error, authentication_on_close=self.authentication_on_close, metadata_on_open=self.metadata_on_open, metadata_on_message=self.metadata_on_message, metadata_on_error=self.metadata_on_error, metadata_on_close=self.metadata_on_close, realtime_situation_on_open=self.realtime_situation_on_open, realtime_situation_on_message=self.realtime_situation_on_message, realtime_situation_on_error=self.realtime_situation_on_error, realtime_situation_on_close=self.realtime_situation_on_close, ) self.messages = Messages(self.logger, self.settings.protocol_version)
def __init__(self): self.dir_name = os.path.dirname(os.path.abspath(__file__)) self.filename = os.path.join(self.dir_name, 'settings.yaml') self.settings = get_settings(self.filename) self.from_addr = self.settings['MAIL']['FROM_ADDR'] self.password = self.settings['MAIL']['PASSWORD'] self.to_addr = self.settings['MAIL']['TO_ADDR'] self.smtp_server = self.settings['MAIL']['SMTP_SERVER_ADDR'] self.port = self.settings['MAIL']['SMTP_SERVER_PORT']
def activate(request, activation_key=None): """ Given an an activation key, look up and activate the user account corresponding to that key (if possible). """ print 'activate' utils.activate_user(activation_key) # if not activated success_url = utils.get_settings('REGISTRATION_API_ACTIVATION_SUCCESS_URL') if success_url is not None: return render_to_response('registration/activation_complete.html')
def settings(): if request.method == 'POST': rpc_settings = dict(request.form) if test_rpc(rpc_settings): update_settings(rpc_settings) flash_success('Settings updated') return redirect(url_for('wallet')) else: flash_error('Invalid settings') return redirect(url_for('settings')) else: settings = get_settings() return render_template("settings.html", settings=settings)
def __init__(self, song=None): super(Header, self).__init__() self.song = song self.settings = get_settings() self.layout = QGridLayout() self.layout.setMargin(0) self.layout.setContentsMargins(0, 0, 0, 25) self.setObjectName(u'header') self.setAttribute(Qt.WA_StyledBackground, True) self.setStyleSheet( css(''' #header { border-bottom: 1px solid {{backgroundColor}}; } ''', backgroundColor=colors.SECONDARY_COLOR)) self.left_container = QWidget() self.left_container.setStyleSheet( css('color: {{color}};', color=colors.GREY_COLOR)) self.left_container_layout = QHBoxLayout(alignment=Qt.AlignLeft) self.left_container_layout.setMargin(0) self.left_container.setLayout(self.left_container_layout) self.right_container = QWidget() self.right_container_layout = QHBoxLayout(alignment=Qt.AlignRight) self.right_container_layout.setMargin(0) self.right_container.setLayout(self.right_container_layout) self.layout.addWidget(self.left_container, 0, 0) self.layout.addWidget(self.right_container, 0, 1) # Info on the left songs_amount = len( self.song['songlist']) if self.song['full_release'] else len( [s for s in self.song['songlist'] if s['released']]) songs_text = pluralize('song', 'songs', songs_amount) info_text = '{} · {}'.format(self.song['genre'], songs_text).upper() self.left_container_layout.addWidget(QLabel(info_text)) # Download buttons on the right if 'download_links' in self.song: for item in self.song['download_links']: btn = IconButton(text=item['label'], icon='download', on_click=self.download(item)) self.right_container_layout.addWidget(btn) self.setLayout(self.layout)
def __init__(self, command, options): self.command = command self.options = options self.settings = get_settings() if not self.settings.has_key("PROJECT_NAME") or not self.settings.get("PROJECT_NAME"): safe_exit("You must specify PROJECT_NAME in your settings file.\n", 1) self.PROJECT_NAME = self.settings.get("PROJECT_NAME") self.AVAILABLE_DIR = self.settings.get("AVAILABLE_DIR", "/var/%s/available"%(self.PROJECT_NAME)) self.ENABLED_DIR = self.settings.get("ENABLED_DIR", "/var/%s/enabled"%(self.PROJECT_NAME)) self.LOG_DIR = self.settings.get("LOG_DIR", "/var/%s/log"%(self.PROJECT_NAME)) self.DATA_DIR = self.settings.get("DATA_DIR", "/var/%s/data"%(self.PROJECT_NAME))
def main(): settings = utils.get_settings(use_gui=False) flags = flag_parser.parse_args() companion.login(settings.get("ed_companion", "username"), settings.get("ed_companion", "password")) data = companion.get_data() # Now we have the data! if flags.dump: pprint(data) else: print "Commander %s" % data["commander"]["name"] print "Credits: %s" % data["commander"]["credits"] print "Location: %s" % data["lastSystem"]["name"]
def __init__(self) -> None: """Initialization""" self.floor_plan_image_id = None self.floor_plan_image_thumbnail_id = None self.return_code = -1 self.state = self.State(self.State.START.value + 1) self.authentication_thread = None self.metadata_thread = None self.settings = get_settings() self.logger = setup_log("FloorPlanAreaExample", self.settings.log_level) self.client = Connections( hostname=self.settings.hostname, logger=self.logger, authentication_on_open=self.authentication_on_open, authentication_on_message=self.authentication_on_message, authentication_on_error=self.authentication_on_error, authentication_on_close=self.authentication_on_close, metadata_on_open=self.metadata_on_open, metadata_on_message=self.metadata_on_message, metadata_on_error=self.metadata_on_error, metadata_on_close=self.metadata_on_close, ) self.messages = Messages(self.logger, self.settings.protocol_version) script_path = os.path.dirname(os.path.realpath(__file__)) self.floor_plan_image_file_path = os.path.join( script_path, "assets/floor_plan.png" ) self.floor_plan_image_thumbnail_file_path = os.path.join( script_path, "assets/floor_plan_thumbnail.png" ) self.floor_plan_image_width = 8989 self.floor_plan_image_height = 4432 self.temp_floor_plan_image_file_path = ( self.floor_plan_image_file_path + ".tmp.png" ) self.temp_floor_plan_image_thumbnail_file_path = ( self.floor_plan_image_thumbnail_file_path + ".tmp.png" )
def call_train_and_predict(settings_file, verbose=False): settings = utils.get_settings(settings_file) batch_out_dir = "batch_out" out = open(os.path.join( batch_out_dir, "{0}_batch_AUC_scores".format(settings['RUN_NAME'])), 'w') err = None # Dump all stderr to /dev/null if we aren't wanting verbose output if not verbose: null = open(os.devnull, 'w') err = null print_verbose('**Training {0}**'.format(settings_file), flag=verbose) # Start train process train_retcode = subprocess.call(['../train.py', '-s', settings_file], stdout=out, stderr=err) # Raise a warning if it was non-zero and return if train_retcode != 0: warnings.warn("train.py -s {0} did not complete successfully".format( settings_file)) return None print_verbose('##Trained {0}##'.format(settings_file), flag=verbose) print_verbose('**Predicting {0}**'.format(settings_file), flag=verbose) # Start predict proc predict_retcode = subprocess.call(['../predict.py', '-s', settings_file], stdout=out, stderr=err) # Raise warning if predict failed and return if predict_retcode != 0: warnings.warn("predict.py -s {0} did not complete successfully".format( settings_file)) return None print_verbose('##Predicted {0}##'.format(settings_file), flag=verbose) if not verbose: null.close() out.close() return None
def module_settings_update(module_name): settings = utils.get_settings(module_name) new_vals = [] for setting in settings['settings']: try: val = request.form[setting['name']] if val != setting['value']: new_vals.append({ 'name': setting['name'], 'value': val }) except KeyError: pass if len(new_vals) != 0: return utils.set_settings(module_name, new_vals) return "ok"
def analysis_reducer(self, point_wkt, pix_datas): """ Given a point wkt and a list of pix datas in the format: [ {'date': '2011-09-01', 'val': 160.0}, {'date': '2012-09-01', 'val': 180.0}, ... ] perform the landtrendr analysis and change labeling. Yields out the change labels and trendline data for the given point """ sys.stdout.write('.') # for viewing progress sys.stdout.flush() job = os.environ.get('LT_JOB') settings = utils.get_settings(job) pix_datas = list(pix_datas) # save iterator to a list pix_trendline = utils.analyze( pix_datas, settings['line_cost'], utils.parse_date(settings['target_date']) ) # write out pix trendline for label, val in pix_trendline.mr_label_output().iteritems(): # prepend 'aux/' to label name so written to sub folder yield ( 'trendline/%s' % label, {'pix_ctr_wkt': point_wkt, 'value': val} ) label_rules = [ classes.LabelRule(lr) for lr in settings['label_rules'] ] change_labels = utils.change_labeling(pix_trendline, label_rules) # write out change labels for label_name, data in change_labels.iteritems(): for key in ['class_val', 'onset_year', 'magnitude', 'duration']: label_key = '%s_%s' % (label_name, key) yield label_key, {'pix_ctr_wkt': point_wkt, 'value': data[key]}
def broadcast(team): settings = get_settings() data = request.forms.decode() if team not in settings or data.get('token') != settings[team]['token']: return HTTPError(status=401) # Skip bot messages if data.get('user_name') == 'slackbot': return '' # Build up message payload text = data.get('text') username = data.get('user_name') avatar = get_user(team, data.get('user_id'))['profile']['image_192'] for site, info in settings.items(): if site == team: continue # Skip ourselves def replace_id(match): if match.group(1) == '#': return '#{}'.format(get_channel(team, match.group(2))) else: return translate_user(team, site, match.group(2)) text_translated = re.sub(r'<([@#])([^\|>]+)(\|[^>]+)?>', replace_id, text) message = { "username": username, "icon_url": avatar, "text": text_translated, } query = Request(info['publish_hook'], data=json.dumps(message, ensure_ascii=False).encode('utf-8')) query.add_header('Content-Type', 'application/json') try: urlopen(query) except: traceback.print_exc() return HTTPError(status=502) return ''
def new_hunt(): setting = get_settings(g.db, admin_id=current_user.admin_id) if not finished_setting(setting): flash('You must complete your settings information before' ' creating a hunt', 'warning') return redirect(url_for('settings')) hunt = Hunt() form = HuntForm(request.form) if request.method == 'POST': if form.validate(): hunt = initialize_hunt(form, hunt, current_user.admin_id, request) try: g.db.session.add(hunt) g.db.session.commit() except IntegrityError as e: logger.warning( 'Exception found while creating hunt with an existing ' ' name: %s\n Form data: %s ', e, form.data) return jsonify( {'hunt name': [{'name': ['hunt name already taken']}]}), 400 else: flash('New scavenger hunt added', 'success') logger.info('hunt, %s, created for admin with id, %s', hunt.name, hunt.admin_id) saved_hunt = g.db.session.query(Hunt).order_by( Hunt.hunt_id.desc()).first() return jsonify({'hunt_id': saved_hunt.hunt_id}) else: logger.warning('Error creating hunt.\nForm errors: %s\nForm data: ' '%s ', form.errors, form.data) return jsonify(form.errors), 400 domain = current_user.email.split('@')[-1] return make_response( render_template('new_hunt.html', form=form, domain=domain))
def run(): from __init__ import __version__ parser = optparse.OptionParser(version=__version__) parser.add_option('-r', '--repo-path', help="the path to source repository") parser.add_option('-s', '--settings', help="the path to local-ci configuration file (.local-ci.yml)") opts, args = parser.parse_args() if not opts.repo_path or not os.path.exists(opts.repo_path): print >> sys.stderr, '[ERROR] The path to source repo is not specified or does not exist' sys.exit(1) if not opts.settings or not os.path.exists(opts.settings): print >> sys.stderr, '[ERROR] The path to configuration file is not specified or does not exist' sys.exit(1) repo_dispatcher = get_repo_dispatcher(opts.repo_path, get_settings(opts.settings)) for image in repo_dispatcher.docker_images(): if not image: print >> sys.stderr, "[WARNING] Unknown image, %s" % image else: print >> sys.stderr, "[INFO] The image, %s" % image repo_dispatcher.run(image)
def main(settingsfname, verbose=False): settings = utils.get_settings(settingsfname) subjects = settings['SUBJECTS'] data = utils.get_data(settings, verbose=verbose) metadata = utils.get_metadata() features_that_parsed = [feature for feature in settings['FEATURES'] if feature in list(data.keys())] settings['FEATURES'] = features_that_parsed utils.print_verbose("=====Feature HDF5s parsed=====", flag=verbose) # get model model_pipe = utils.build_model_pipe(settings) utils.print_verbose("=== Model Used ===\n" "{0}\n==================".format(model_pipe), flag=verbose) # dictionary to store results subject_predictions = {} accuracy_scores = {} for subject in subjects: utils.print_verbose( "=====Training {0} Model=====".format(str(subject)), flag=verbose) # initialise the data assembler assembler = utils.DataAssembler(settings, data, metadata) X, y = assembler.test_train_discrimination(subject) # get the CV iterator cv = utils.sklearn.cross_validation.StratifiedShuffleSplit( y, random_state=settings['R_SEED'], n_iter=settings['CVITERCOUNT']) # initialise lists for cross-val results predictions = [] labels = [] allweights = [] # run cross validation and report results for train, test in cv: # calculate the weights weights = utils.get_weights(y[train]) # fit the model to the training data model_pipe.fit(X[train], y[train], clf__sample_weight=weights) # append new predictions predictions.append(model_pipe.predict(X[test])) # append test weights to store (why?) (used to calculate auc below) weights = utils.get_weights(y[test]) allweights.append(weights) # store true labels labels.append(y[test]) # stack up the results predictions = utils.np.hstack(predictions) labels = utils.np.hstack(labels) weights = utils.np.hstack(allweights) # calculate the total accuracy accuracy = utils.sklearn.metrics.accuracy_score(labels, predictions, sample_weight=weights) print("Accuracy score for {1}: {0:.3f}".format(accuracy, subject)) # add AUC scores to a subj dict accuracy_scores.update({subject: accuracy}) # store results from each subject subject_predictions[subject] = (predictions, labels, weights) # stack subject results (don't worrry about this line) predictions, labels, weights = map(utils.np.hstack, zip(*list(subject_predictions.values()))) # calculate global accuracy accuracy = utils.sklearn.metrics.accuracy_score(labels, predictions, sample_weight=weights) print( "predicted accuracy score over all subjects: {0:.2f}".format(accuracy)) # output AUC scores to file accuracy_scores.update({'all': accuracy}) settings['DISCRIMINATE'] = 'accuracy_scores.csv' # settings['AUC_SCORE_PATH'] = 'discriminate_scores' utils.output_auc_scores(accuracy_scores, settings) return accuracy_scores
#!/usr/bin/python2.7 from examples.door_sync import check_door from examples.termopar_1_sync import read_temp from examples.temperature_humidity_sync import get_humidity_temperature from utils import check_network, check_carbon import time import os from utils import get_settings settings = get_settings(__file__, directory="examples") carbon_server = settings.get("server", "carbon_server") carbon_port = int(settings.get("server", "carbon_port")) def loop(fn, msg): count = 0 while count < 10: print(msg, fn()) time.sleep(.5) count += 1 print("[OK] NETWORK" if check_network() else "[ERROR] NETWORK") print("[OK] CARBON SERVER" if check_carbon(carbon_server, carbon_port) else "[ERROR] CARBON SERVER") loop(check_door, "PUERTA") loop(read_temp, "TERMO PAR") loop(get_humidity_temperature, "HUMIDITY, TEMP")
def main(): metadata = utils.get_metadata() settings = utils.get_settings('probablygood.gavin.json') settings['R_SEED'] = None # settings['SUBJECTS'] = ['Patient_2'] scaler = sklearn.preprocessing.StandardScaler() thresh = sklearn.feature_selection.VarianceThreshold() # selector = sklearn.feature_selection.SelectKBest() classifier = sklearn.svm.SVC(probability=True) pipe = sklearn.pipeline.Pipeline([('scl', scaler), ('thr', thresh), # ('sel', selector), ('cls', classifier)]) output = {} data = utils.get_data(settings) da = utils.DataAssembler(settings, data, metadata) global_results = {} for subject in list(settings['SUBJECTS']) + ['global']: global_results[subject] = {} for i in range(10): print("iteration {0}".format(i)) for subject in settings['SUBJECTS']: print(subject) X, y = da.build_training(subject) # cv = utils.Sequence_CV(da.training_segments, metadata) train, test, train_results, test_results = fit_and_return_parts_and_results( da, metadata, pipe, X, y) output.update({subject: {'train': train, 'test': test, 'train_results': train_results, 'test_results': test_results}}) # with open('raw_cv_data.pickle', 'wb') as fh: # pickle.dump(output, fh) summary_stats = mean_var_calc(output) for subject in settings['SUBJECTS']: for t in summary_stats[subject]: try: global_results[subject][t] += [summary_stats[subject][t]] except KeyError: global_results[subject][t] = [summary_stats[subject][t]] print(global_results) for subject in settings['SUBJECTS']: for t in global_results[subject]: meanscore = np.mean(global_results[subject][t]) varscore = np.var(global_results[subject][t]) print("For {0} mean {1} was " "{2} with sigma {3}".format(subject, t, meanscore, varscore)) with open('summary_stats.pickle', 'wb') as fh: pickle.dump(global_results, fh)
def register_participant(): hunt_id = request.args['hunt_id'] hunt = Hunt.find_by_id(g.db, hunt_id) if hunt: form = ParticipantForm(request.form) if form.validate(): email = form.email.data logger.info( 'Participant registration form validated for hunt, "%s", and' ' email, %s.\nPreparing to validate participant against hunt' ' participation rules.', hunt.name, email) participant_valid, err_msg = validate_participant( g.db, email, hunt_id, hunt.participant_rule) if participant_valid: logger.info('The registering participant, %s, has been' ' validated against the hunt participation rules.' ' Preparing to find email in participant database' ' table.', email) if not get_participant(g.db, email, hunt_id): logger.info( 'Preparing to save new participant with email, %s,' ' to hunt, %s', email, hunt.name) create_new_participant(g.db, form, hunt_id) scavenger_info = {'email': email, 'name': form.name.data} session.update(scavenger_info) admin_settings = get_settings(g.db, hunt_id=hunt_id) logger.info( "Retrieved settings associated with hunt with id, %s: %s", hunt_id, admin_settings) try: lrs = WaxCommunicator( admin_settings, request.host_url, hunt, None, scavenger_info=scavenger_info) except Exception as e: logger.exception( "Error instantiating WaxCommunicator while registering" " participant: %s", e) raise e try: lrs.send_began_hunt_statement() except Exception as e: logger.exception( "Error sending began hunt statement: %s", e) raise e logger.info( "name and email set to %s, and %s\n" "preparing requested item information.", session['name'], email) redirect_url = get_intended_url(session, hunt_id) return make_response(redirect(redirect_url)) else: logger.info('participant attempted to register for' ' hunt with invalid form information.\n' 'Error message: %s\n. Form data: %s', err_msg, request.form) return err_msg else: # i don't think this can happen ever in the app logger.warning('A user attempted to register for hunt with id, %s,' ' but the hunt could not be found. Form data: %s', hunt_id, request.form) abort(400)
def find_item(hunt_id, item_id): logger.info( 'Participant is visiting route: /hunts/%s/items/%s', hunt_id, item_id) admin_settings = get_settings(g.db, hunt_id=hunt_id) # admin_settings found through hunt_id means hunt exists logger.info("Settings retrieved for hunt with id, %s", hunt_id) if finished_setting(admin_settings): logger.info( "Settings are complete. Preparing to retrieve item with id, %s", item_id) item = get_item(g.db, item_id, hunt_id) if item: logger.info( "Item found. Preparing to retrieve hunt with id, %s ", hunt_id) hunt = Hunt.find_by_id(g.db, hunt_id) if participant_registered(g.db, session.get('email'), hunt_id): logger.info( "Participant, %s, has registered. Preparing to" " retrieve data from the state api.", session.get('email')) lrs = WaxCommunicator( admin_settings, request.host_url, hunt, item, scavenger_info={ 'email': session.get('email'), 'name': session.get('name') }) state = lrs.get_state() found_again = str(item_id) in state lrs.send_found_item_statement(found_again=found_again) updated_state = {str(item.item_id): True} hunt_previously_completed = state.get('hunt_completed') # TODO: Don't send the whole state object, as discussed state.update(updated_state) if hunt_requirements_completed(state, hunt): logger.info( 'Requirements for hunt, "%s", have been completed.', hunt.name) if not hunt_previously_completed: lrs.send_completed_hunt_statement() updated_state['hunt_completed'] = True state.update(updated_state) lrs.update_state_api_doc(updated_state) found_ids = found_ids_list(state) return make_response(render_template( 'items.html', item=item, hunt=hunt, username=session.get('name'), found_ids=found_ids, hunt_now_completed=state.get('hunt_completed'), num_found=len(found_ids), num_items=len(hunt.items), num_remaining=num_items_remaining(state, hunt.items), found_again=found_again, previously_completed=hunt_previously_completed)) else: logger.info( "Page visitor is not yet registered for this hunt." " Preparing to redirect to the getting started page.") session['intended_url'] = '/hunts/{}/items/{}'.format( hunt_id, item_id) return make_response(render_template( 'welcome.html', hunt=hunt, welcome=hunt.welcome_message, action_url="/get_started/hunts/{}".format(hunt_id))) abort(404)
def run(self): # Execution starts here logger.info('%s: Starting...', self.__class__.__name__) # get configuration conf = get_settings() # setup gps source if conf['TRACKING_GPS_ENABLE'] == True: logger.info('%s: GPS Source enabled.', self.__class__.__name__) self.gps_source = GPSSource(conf, logger, self.queue) self.gps_source.start() # setup file source if conf['TRACKING_FILE_ENABLE'] == True: logger.info('%s: File Source enabled.', self.__class__.__name__) self.file_source = FileSources(conf, logger, self.queue) self.file_source.start() # setup municbox source if conf['TRACKING_MUNICBOX_ENABLE'] == True: logger.info('%s: Munic.Box Source enabled.', self.__class__.__name__) self.munic_source = MunicSource(conf, logger, self.queue) self.munic_source.start() # setup database sink if conf['TRACKING_DB_PUBLISH'] == True: logger.info('%s: Publishing to database enabled.', self.__class__.__name__) db_sink = DBSink(conf, logger) # setup message queue sink if conf['TRACKING_MQ_PUBLISH'] == True: logger.info('%s: Publishing to message queue enabled.', self.__class__.__name__) mq_sink = MQSink(conf, logger) # setup RVI sink if conf['TRACKING_RVI_PUBLISH'] == True: logger.info('%s: Publishing to RVI enabled.', self.__class__.__name__) rvi_sink = RVISink(conf, logger) # catch signals for proper shutdown for sig in (SIGABRT, SIGTERM, SIGINT): signal(sig, self.cleanup) # main execution loop while True: try: # get data from queue try: data = self.queue.get(True, 60) except Exception as e: if isinstance(e, KeyboardInterrupt): break else: logger.info("%s: Queue timeout", self.__class__.__name__) continue # vin is required but not all data sources may provide it if (not 'vin' in data): data[u'vin'] = conf['VIN_DEFAULT'] logger.info("%s: Got data: %s", self.__class__.__name__, data) if conf['TRACKING_DB_PUBLISH'] == True: db_sink.log(data) if conf['TRACKING_MQ_PUBLISH'] == True: mq_sink.log(data) if conf['TRACKING_RVI_PUBLISH'] == True: rvi_sink.log(data) except KeyboardInterrupt: print ('\n') break
def module_settings(module_name): settings = utils.get_settings(module_name) if settings is None: abort(404) return render_template("module_settings.html", **settings)
#!/usr/bin/python2.7 import sys import os sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) from sensor_sync import SyncDataFromMemory from utils import get_settings import os settings = get_settings(__file__) CARBON_HOST = settings.get("server", "carbon_server") SENSOR_NAME = "temperature_low_one" DEVICE_NUMBER = settings.get("sensor_termopar", "device_number") os.system('modprobe w1-gpio') os.system('modprobe w1-therm') base_dir = '/sys/bus/w1/devices/' device_folder = base_dir + DEVICE_NUMBER device_file = device_folder + '/w1_slave' def read_temp_raw(): with open(device_file,'r') as f: lines = f.readlines() return lines def read_temp(): lines = read_temp_raw() while lines[0].strip()[-3:] != 'YES': time.sleep(0.2) lines = read_temp_raw()
def main(path=None): config = get_settings(path or args.settings) HOST, PORT = "localhost", config.udp_port server = SocketServer.UDPServer((HOST, PORT), SimpleUDPHandler) server.serve_forever()