def main(): """It's the main function. You call this to start the bot.""" try: settings.load() except (FileNotFoundError or IOError or json.decoder.JSONDecodeError): settings.save() try: settings.manager["OAUTH_TOKEN_DISCORD"] except KeyError: print( "Please enter an OAuth token for this bot, so it can sign into Discord." ) settings.manager["OAUTH_TOKEN_DISCORD"] = input("> ") try: settings.manager["SUPPORT_GUILD"] except KeyError: print( "Please enter a support guild link for this bot. You may leave it blank." ) settings.manager["SUPPORT_GUILD"] = input("> ") logger.info("Warming up...") extensions = settings.manager.get("EXTENSIONS", settings.DEFAULT_EXTENSIONS) for extension in extensions: logger.info(f"Loading extension {extension}") try: bot.load_extension(extension) logger.info(f"Extension {extension} loaded") except Exception as error: logger.warning(f"Extension {extension} seems to be broken") logger.warning(error) bot.run(settings.manager["OAUTH_TOKEN_DISCORD"])
def startup(): settings.load('settings.ini') db.setup() logging.getLogger().info("Checking database integrity.") #db.connection.execute("VACUUM") logging.getLogger().info("Integrity check complete. Continuing.")
def main(): root = tk.Tk() settings.load() root.geometry(settings.get('main_window_geometry')) app = MainWindow(root) db.init() root.mainloop()
def finish(self): newres = not self.resbuttons.buttonmap[settings.size].selected newwin = not self.wbuttons.buttonmap[settings.fullscreen].selected for buttonset in self.buttonsets: for button in buttonset.buttons: if button.selected: button.confirm() context.pop() settings.save() settings.load() if newres or newwin: vista.init()
def load(): print "main.load" settings.load() window = mc.GetActiveWindow() window.GetControl(1).SetVisible(False) focus_control = __restore_menu(window, __menu) if focus_control: print "SETTING focus in LOAD" focus(focus_control) else: __set_title('Main') window.GetList(MAIN_LIST_ID).SetVisible(True) window.GetControl(1).SetVisible(True)
def evaluate(individual): #sim_uuid = uuid.uuid4() #print("Beginning pyloric circuit simulation {0}...".format(sim_uuid)) settings.load(individual) y = expEuler.run(settings.func, settings.y0, settings.t, settings.params) volts = [y[:,8], y[:,21], y[:,34]] featuresOf = features.extract_circuit_features(y, settings.tmin, settings.tmax, settings.trange, settings.h) errs = objectives.errFunc(featuresOf) errs2nd = objectives2nd.errFunc(featuresOf) return [volts, featuresOf, errs, errs2nd]
def main(): #start the game running try: # poke_types.load_data() #load pokemon type data # pokemon.load_data() map.load_data() settings.load(g) g.save = savegame.SaveGame(g) #initialize a new savegame manager g.reset() #reset the game g.mainloop() #start the main loop except error.QuitException: #if it was just a forced quit pass except Exception as e: #if it's any other exception error.exception_handler(g, e) #pass it to exception handler g.keeprunning = False
def send_reset_email(self): expires = datetime.datetime.now() + reset_password_timeout url = self.generate_reset_link() body = ("A password reset for {} has been requested.\r\n".format( self.username), "Navigate to {} to complete reset.".format(url), "Expires on {}".format(expires.isoformat())) message = MIMEText('\r\n'.join(body)) message['Subject'] = "Password Reset Link for CASCADE on {}".format( settings.load()['server']['hostname']) message['From'] = 'cascade@' + settings.load()['server']['hostname'] message['To'] = self.email server = smtplib.SMTP(settings.load()['links']['smtp']) server.set_debuglevel(1) server.sendmail(message['From'], [self.email], message.as_string()) server.quit()
def run(self): self.config = settings.load(self, "jame.ini") query = "SELECT VERSION()" print "MySQL version: %s" % dbase.db.connect(self, query)[0] for i in xrange(-9, 9): for j in xrange(-9, 9): terrain.gen.createTile(self, "grass", i, j, 0)
def __init__(self, settings=None, midi_port=None): self.midi_out = rtmidi.MidiOut(b'in') if not settings: self.settings = default_settings.load() # Default to first port if midi_port == None: # TODO: check to see if there are any ports at all first! midi_port = self.midi_out.ports[0] self.midi_enabled = True self.midi_out.open_port(midi_port) # Control Change is signalled by 0xb0, see: https://www.midi.org/specifications/item/table-1-summary-of-midi-message # Also good to stick to typically undefined range, http://nickfever.com/music/midi-cc-list # TODO: don't hardcode cc! self.cc_hex = 0xb0 self.x_cc = 86 self.y_cc = 87 self.z_cc = 88 self.metrics = { "x": 0, "y": 0, "z": 0 } self.tracker = Tracker()
def set_properties(self): # Load the settings settings = Settings() settings.load() # Get the checkboxes x11_forward = self.builder.get_object("chk_enable_x11") request_compression = self.builder.get_object("chk_request_compress") force_ipv4 = self.builder.get_object("radio_force_ipv4") force_ipv6 = self.builder.get_object("radio_force_ipv6") # Set the config x11_forward.set_active(bool(settings.x11_forward)) request_compression.set_active(bool(settings.request_compression)) force_ipv4.set_active(bool(settings.force_ipv4)) force_ipv6.set_active(bool(settings.force_ipv6))
def __main(): """ Configures run options based off of arguments and starts lane_detection :return: void """ if camera is not None: # use camera as input source = camera elif file is not None: # use file as input source = file else: # use default as input source_settings = settings.load(settings.SettingsCategories.INPUT, settings.InputSettings.SOURCES) defaultInput = source_settings.default.input if defaultInput == 'file': source = __get_abs_path(source_settings.default.file_source) elif defaultInput == 'camera': camera_type = source_settings.default.camera_source source = settings.cameras[camera_type] else: raise Exception('Invalid default input type') hough = HoughTransform(source, n_consumers=1) histopeak = HistogramPeakDetection(source, n_consumers=1) lane_departure = LaneDeparture(hough)
def create_user(): if not settings.load()['config'].get('allow_account_creation', False): return JSONResponse(status=httplib.FORBIDDEN) """ This API route is used by the create new account template to add a new user into Mongo """ if isinstance(request.json, dict): args = request.json if args.get('username') and args.get('password'): try: user = users.create_user(args['username'], args['password'], args.get('email'), args.get('full_name')) except users.PasswordPolicyError as error: regex, rules = error.args return JSONResponse( {'violation': { 'regex': regex, 'rules': rules }}, httplib.BAD_REQUEST) if user is not None: response = Response(status=httplib.CREATED) response.set_cookie('user-token', user.generate_token(), max_age=datetime.timedelta(days=7)) return response else: return JSONResponse({'message': 'Username already exists!'}, status=httplib.BAD_REQUEST) return JSONResponse( {'message': 'Username, email and password are required'}, status=httplib.BAD_REQUEST)
def _refresh(self): channels = [] cpaths = glob.glob(path.join(locations.CHAN_PATH, CHANID_GLOB)) #print "cpath=>" #print cpaths for p in cpaths: try: #print "p=>" #print p channels.append(Channel(p, False)) except ImportError: pass cpaths = glob.glob(path.join(locations.PLUGIN_PATH, CHANID_GLOB)) #print "plugin path=>" #print cpaths for p in cpaths: try: channels.append(Channel(p, True)) except ImportError: pass # Ignore channels with no image #print "channels" #print channels channels = filter(lambda chan: chan.imageExists(), channels) self.channels = sorted(channels, key=lambda chan: chan.getTitle().upper()) self.settings = settings.load("channels")
def _set_subs_lang(self, subs): if subs is not None: sub_settings = settings.load("subtitles") if 'lang' in sub_settings: subs['lang'] = sub_settings['lang'] else: subs['lang'] = 'eng' return subs
def _set_subs_lang(self, subs): if subs is not None: sub_settings = settings.load("subtitles") if "lang" in sub_settings: subs["lang"] = sub_settings["lang"] else: subs["lang"] = "eng" return subs
def set_password(self, value): password_config = settings.load().get('password', None) if password_config and password_config.get('enforce'): regex = password_config['regex'] rules = password_config.get('rules', []) if re.match(regex, value) is None: raise PasswordPolicyError(regex, rules) self.sha256_hash = sha256_crypt.encrypt(value)
def main(): settings.load() midi.setupMidiPorts() octatrack_pages.setup() setupLaunchpad() triggerOctatackMidiDump() gotoSavedSelectedPage() try: while True: pass except KeyboardInterrupt: pass finally: print('\n') print('save settings . . .') settings.save() print('exit.')
def get_meid(mvid): import sqlalchemy sett = settings.load() cstr = sett['epi_conn_str'] eng = sqlalchemy.create_engine(cstr) meid = pd.read_sql( """ SELECT modelable_entity_id FROM epi.model_version WHERE model_version_id = %s""" % mvid, eng) return meid.values[0][0]
def _refresh(self): channels = [] cpaths = glob.glob(path.join(locations.CHAN_PATH, CHANID_GLOB)) for p in cpaths: channels.append(Channel(p, False)) cpaths = glob.glob(path.join(locations.PLUGIN_PATH, CHANID_GLOB)) for p in cpaths: channels.append(Channel(p, True)) self.channels = sorted(channels, key=lambda chan: chan.getTitle().upper()) self.settings = settings.load("channels")
def getpath(force_select=False): filename = settings.load().get("tzpath", "") if not filename or force_select: FILE_OPTS = ___ = dict() ___["title"] = u"Locate the 桶裝出貨表 file." ___["defaultextension"] = ".xls" ___["filetypes"] = [("Excel files", ".xls"), ("all files", ".*")] ___["initialdir"] = u"T:\\Users\\chairman\\Documents\\" ___["initialfile"] = u"桶裝出貨表.xls" filename = os.path.normpath(tkFileDialog.askopenfilename(**FILE_OPTS)) settings.update(tzpath=filename) return filename
def run(): """ TODO: write docstring """ # Set environment variables settings.load() # Get PostgreSQL database credentials psql_credentials = settings.get_psql() # Create SQLAlchemy engine from database credentials engine = create_connection_from_dict(psql_credentials, 'postgresql') # Get data to process from postgres quants_df = execute_sql('select * from features.quants;', engine, read_file=False, return_df=True) data_dir = settings.get_data_dir() labeled_fishing_dir = data_dir / 'labeled_data' / 'fishing' labeled_nonfishing_dir = data_dir / 'labeled_data' / 'nonfishing' cnn_split_dir = data_dir / 'cnn_split' if cnn_split_dir.exists(): shutil.rmtree(cnn_split_dir, ignore_errors=False, onerror=None) cnn_split_dir.mkdir(parents=True, exist_ok=True) # Create labeled data print('Creating labeled data.') fishy_stuff = fishing_prefilter(quants_df) nonfish = nonfishing_dataframe_creator(quants_df, fishy_stuff) dataset = sampler(fishy_stuff, nonfish) trajectory_separator(dataset, data_dir) # Create train / test split print("Creating train/test split") split_data(labeled_fishing_dir, labeled_nonfishing_dir, cnn_split_dir, binary_name='fishing', set_seed=223) # Train the cnn run_cnn(cnn_split_dir, batchsize=256, epochs=50, color_mode='rgb', start_filters=8, depth=2, dense_count = 2, dense_neurons = 256, bnorm = False)
def get_directories(mvid): """ Define and creat directories for cascade outputs and logs """ sett = settings.load() logdir = '%s/%s' % (sett['log_dir'], mvid) try: os.makedirs(logdir) except: pass root_dir = '%s/%s' % (sett['cascade_ode_out_dir'], mvid) try: os.makedirs(root_dir) except: pass os.chmod(logdir, 0o775) os.chmod(root_dir, 0o775) return {'logdir': logdir, 'root_dir': root_dir}
def main(): args = args_parser() sets = settings.load(args.setting) if len(sets) == 0: print("No servers found.") exit(1) loop = asyncio.get_event_loop() loop.set_default_executor(ThreadPoolExecutor()) servers = [s.build(loop) for s in sets] servers.append(core.idle(loop)) try: loop.run_until_complete(asyncio.gather(*servers)) except KeyboardInterrupt: pass loop.close()
def _refresh(self): channels = [] cpaths = glob.glob(path.join(locations.CHAN_PATH, CHANID_GLOB)) for p in cpaths: try: channels.append(Channel(p, False)) except ImportError: pass cpaths = glob.glob(path.join(locations.PLUGIN_PATH, CHANID_GLOB)) for p in cpaths: try: channels.append(Channel(p, True)) except ImportError: pass # Ignore channels with no image channels = filter(lambda chan: chan.imageExists(), channels) self.channels = sorted(channels, key=lambda chan: chan.getTitle().upper()) self.settings = settings.load("channels")
def load(self) -> bool: """ Loads the region of interest from disk for the pipeline's source :return: bool (the success status of the load operation) """ rois = settings.load(settings.SettingsCategories.INPUT, settings.InputSettings.ROI, must_exist=False) if rois is None or rois.get(self._pipeline.source, None) is None: if self._when_missing_open_editor: return self.editor(self._pipeline.frame) else: return False # failed to load and could not open editor (ROI does not exist) else: self._roi = rois[self._pipeline.source] return True
def main(_): settings = load('settings.yaml') # Detect number of training examples images_path = settings['dirs']['raw_data'] + '/data/images' num_examples = len([name for name in os.listdir(images_path) if os.path.isfile(os.path.join(images_path,name))]) # Randomly subdivide examples into training and evaluation sets ratio = settings['training_ratio'] random.seed() training_ids = random.sample(range(1, num_examples + 1), int(ratio * num_examples)) eval_ids = list(set(range(1, num_examples + 1)) - set(training_ids)) # Put images into their directories for id in training_ids: src = images_path + '/' + str(id) + '.jpg' dest = settings['dirs']['test_images'] + '/' + str(id) + '.jpg' shutil.copyfile(src, dest) for id in eval_ids: src = images_path + '/' + str(id) + '.jpg' dest = settings['dirs']['eval_images'] + '/' + str(id) + '.jpg' shutil.copyfile(src, dest) # Generate TFRecord for training examples writer = tf.python_io.TFRecordWriter(FLAGS.training_output) examples = create_tf_examples(training_ids, \ settings['dirs']['test_images'], \ settings['dirs']['raw_data'] + '/data/database/training_examples.sqlite3') for e in examples: writer.write(e.SerializeToString()) writer.close() # Generate TFRecord for evaluation examples writer = tf.python_io.TFRecordWriter(FLAGS.evaluation_output) examples = create_tf_examples(eval_ids, \ settings['dirs']['eval_images'], \ settings['dirs']['raw_data'] + '/data/database/training_examples.sqlite3') for e in examples: writer.write(e.SerializeToString()) writer.close()
def setUp(self): settings.load(conf_path)
response = Response(response=body, status=status, headers=headers) if not sid_is_valid: curr_data = dict(request.session) request.session = sessions.new() request.session.update(curr_data) request.session.modified = True if request.session.should_save: sessions.save(request.session) cookie_path = settings.get_str('global', 'cookie_path_prefix', '/') response.set_cookie(sessions.get_cookie_name(), request.session.sid, path=cookie_path) return response(environ, start_response) settings.load(path=CONF_PATH) if settings.get('global', 'manatee_path', None): sys.path.insert(0, settings.get('global', 'manatee_path')) # please note that some environments may provide umask setting themselves if settings.get('global', 'umask', None): os.umask(int(settings.get('global', 'umask'), 8)) if not settings.get_bool('global', 'maintenance'): application = KonTextWsgiApp() else: application = MaintenanceWsgiApp() robots_path = os.path.join(os.path.dirname(__file__), 'files/robots.txt') if os.path.isfile(robots_path):
parser.add_argument('-r', '--replace', metavar='REPLACE', action='store_const', const=True, help='Remove possible existing record first') args = parser.parse_args() if args.kontext_conf: conf_path = args.kontext_conf else: conf_path = os.path.join(os.getcwd(), 'conf', 'config.xml') logging.getLogger(__name__).info( 'No config.xml path specified - assuming ./conf/config.xml') conf = Config(registry_dir_path=args.registry_dir, kontext_conf_path=conf_path) settings.load(conf_path) jsonpath = args.jsonpath.rstrip('/') conf.update_missing(load_default_conf(jsonpath)) db_conf = find_db_reg_paths(settings) backend = WritableBackend(db_conf) if os.path.isfile(jsonpath): file_list = [jsonpath] elif os.path.isdir(jsonpath): file_list = get_conf_list(jsonpath) process_corpora(file_list, backend=backend, reg_dir=conf.registry_dir_path, variant=args.variant, replace=args.replace) backend.commit()
def spawn_simulation(infile, pauses, outfolder): settings = load(infile) write_pauses(pauses, outfolder) settings["pauses"] = pauses experiment = ExperimentSet(settings, outfolder) experiment.run()
from settings import load import sys import csv from math import ceil try: output = sys.argv[2] import matplotlib matplotlib.use("Agg") except: output = None import pylab settings = load(sys.argv[1]) with open(settings["output"] + "/" + "densities.csv", "rb") as f: # load data data = list( map(float, densities) for densities in csv.reader(f, delimiter=',')) # fill in width dataFilled = [] for tick in data: newTick = [] length = len(data) for pos in range(length): newTick.append( sum(tick[pos:min(pos + settings['fatness'], length)])) dataFilled.append(newTick) data = dataFilled
class HistogramPeakDetection(Pipeline): """ Explain Histogram Peak Detection here """ DEGREE_TO_FIT_TO_LINES: int = 1 settings: config_dict = settings.load( settings.SettingsCategories.PIPELINES, settings.PipelineSettings.HISTOGRAM_PEAK_DETECTION) def __init__(self, source: str, *, n_consumers: int = 0, should_start: bool = True, show_pipeline: bool = True, debug: bool = False): """ Calls superclass __init__ (see Pipeline.__init__ for more details) :param source: the filename or device that the pipeline should be run on :param should_start: a flag indicating whether or not the pipeline should start as soon as it is instantiated :param debug: a flag indicating whether or not the use is debugging the pipeline. In debug, the pipeline is shown and debug statements are enabled """ super().__init__(source, n_consumers=n_consumers, image_mask_enabled=True, should_start=should_start, show_pipeline=show_pipeline, debug=debug) def _init_pipeline(self, first_frame): super()._init_pipeline(first_frame) self._historic_fill = HistoricFill( self.fps, HistogramPeakDetection.DEGREE_TO_FIT_TO_LINES, max_consecutive_autofills=5) def _add_knot(self, name: str, image: numpy.array, hls: bool = False): if hls: image = cv2.cvtColor(image, cv2.COLOR_HLS2RGB) return super()._add_knot(name, image) def filter_thresholds(self, image: numpy.array, thresholds: Union[tuple[int, int], list[tuple[int, int]]])\ -> numpy.array: if isinstance(thresholds, tuple): thresholds = [thresholds] filter_mask = numpy.zeros_like(image) for threshold in thresholds: lower_threshold, upper_threshold = threshold filter_mask[(lower_threshold <= image) & (image <= upper_threshold)] = 255 return filter_mask def fill_filter_close_regions(self, filter_image: numpy.array, max_fill_distance: int = 20) -> numpy.array: filter = numpy.copy(filter_image) for row_index, row in enumerate(filter): last_filled_index = None for pixel_index, pixel in enumerate(row): if pixel == 255: if last_filled_index is not None and pixel_index - last_filled_index <= max_fill_distance: filter[row_index, last_filled_index + 1:pixel_index - 1] = 255 last_filled_index = pixel_index return filter def get_histogram_peak_points(self, detected_image: numpy.array, window_height: int = settings.sliding_window.height, n_max_per_window: int = settings.sliding_window.n_max_per_window)\ -> tuple[numpy.array, numpy.array]: height, width, *_ = detected_image.shape height -= general.pipeline_settings.lanes.bottom_offset + general.pipeline_settings.lanes.top_offset if height % window_height != 0: raise FloatingPointError( "window_height must evenly divide the detected_image height") num_vertical_windows = height // window_height # 3rd dimension: 0th entry is left side, 1st entry is right side vertical_averages = numpy.empty( (constants.NUM_LANES_TO_DETECT, num_vertical_windows, width // constants.NUM_LANES_TO_DETECT), numpy.float) for i in range(num_vertical_windows): window = detected_image[ general.pipeline_settings.lanes.top_offset + i * window_height:general.pipeline_settings.lanes.top_offset + (i + 1) * window_height + 1, :] vertical_window_average = numpy.average(window, axis=0) vertical_averages[ 0, i, :] = vertical_window_average[0:width // constants.NUM_LANES_TO_DETECT] vertical_averages[ 1, i, :] = vertical_window_average[width // constants.NUM_LANES_TO_DETECT:] window_maxes = numpy.iinfo(numpy.uint16).max * numpy.ones( (constants.NUM_LANES_TO_DETECT, num_vertical_windows, n_max_per_window), numpy.uint16) for i in range(num_vertical_windows): for lane in range(constants.NUM_LANES_TO_DETECT): n_max_indices = numpy.argpartition( vertical_averages[lane, i, :], -n_max_per_window)[-n_max_per_window:] maxes_greater_than_threshold_indices = vertical_averages[ lane, i, n_max_indices] >= HistogramPeakDetection.settings.sliding_window.active_threshold n_max_indices = n_max_indices[ maxes_greater_than_threshold_indices] window_maxes[lane, i, 0:n_max_indices.shape[0]] = n_max_indices # if numpy.max(vertical_averages[lane, i, :]) >= 150: # window_maxes[lane, i] = numpy.argmax(vertical_averages[lane, i, :]) if self.debug: plot.bar(numpy.arange(width), numpy.average(detected_image, axis=0)) plot.show() time.sleep(7.5) points: list[list[tuple[int, int]]] = [ [] for i in range(constants.NUM_LANES_TO_DETECT) ] for window_index in range(num_vertical_windows): y_cord = general.pipeline_settings.lanes.top_offset + window_index * window_height + window_height // 2 for lane in range(constants.NUM_LANES_TO_DETECT): for i in range(n_max_per_window): if window_maxes[lane, window_index, i] != numpy.iinfo( numpy.uint16).max: points[lane].append( (window_maxes[lane, window_index, i] + lane * width // constants.NUM_LANES_TO_DETECT, y_cord)) left_points, right_points = points return numpy.array(left_points), numpy.array(right_points) def filter_points(self, points: numpy.array, reject_outside_n_std_devs: float = 1.5, max_iterations: int = 5, max_residual: int = 20) -> numpy.array: assert len(points) >= 2 assert numpy.all(numpy.diff(points[:, 1]) >= 0) # sorted by y value def line_of_best_fit( data: numpy.array) -> tuple[numpy.poly1d, numpy.array, float]: assert len(points) > 1 # there are points to fit a line on best_fit_coef = numpy.polyfit( data[:, 1], data[:, 0], deg=HistogramPeakDetection.DEGREE_TO_FIT_TO_LINES) best_fit = numpy.poly1d(best_fit_coef) residuals = numpy.linalg.norm( (best_fit(data[:, 1]) - data[:, 0]).reshape(len(data), 1), axis=1) residuals_std_dev = numpy.std(residuals) mean_centered_residuals = residuals - numpy.mean(residuals) return best_fit, mean_centered_residuals, residuals_std_dev for iter in range(max_iterations): best_fit, mean_centered_residuals, residuals_std_dev = line_of_best_fit( points) points_to_keep = mean_centered_residuals <= reject_outside_n_std_devs * residuals_std_dev if self.debug: plot.bar(numpy.arange(mean_centered_residuals.shape[0]), mean_centered_residuals) plot.show() time.sleep(7.5) if numpy.all(points_to_keep): # no points to remove, break early break else: # we have points to remove -> remove them points = points[points_to_keep] best_fit, mean_centered_residuals, residuals_std_dev = line_of_best_fit( points) points = points[ mean_centered_residuals <= max_residual] # enforce a hard limit on the maximum residual return points def fit_lane_to_points(self, points, polynomial_degree: int = DEGREE_TO_FIT_TO_LINES)\ -> numpy.array: # predict x from y coordinate left_points, right_points = points left_lane = numpy.polyfit(left_points[:, 1], left_points[:, 0], deg=polynomial_degree) right_lane = numpy.polyfit(right_points[:, 1], right_points[:, 0], deg=polynomial_degree) polyfit_lanes = numpy.array([left_lane, right_lane]) # find the inverse function - i.e. convert so that y is the dependant variable # since there is no generic solution for finding an inverse function, we do it on a case by case basis # only polynomial_degree == 1 is implemented as a higher order polynomial is never fitted to the data if polynomial_degree == 1: lanes = numpy.ones_like(polyfit_lanes) for i, lane in enumerate(polyfit_lanes): lanes[i] /= lane[0] # m_new = 1/m_old lanes[i][1] *= -lane[1] # b_new = -b_old/m_old else: raise NotImplementedError( 'Finding the inverse function for polynomial of degree {deg} has not been implemented yet' .format(deg=polynomial_degree)) return lanes def _display_points(self, image, points, radius: int = 5, color: tuple[int, int, int] = (255, 0, 255), display_overlay: bool = True, overlay_name: str = 'Points') -> numpy.array: points_image = numpy.zeros_like(image) for point in points: cv2.circle(points_image, tuple(point), radius, color, -1) if display_overlay: self._add_knot(overlay_name, points_image, hls=False) return points_image def _run(self, frame): """ Histogram Peak Detection is run on the frame to detect the lane lines This method handles detecting the lines, drawing them, and passing them to the lane departure algorithm *insert more info about Histogram Peak Detection here* :param frame: the current frame of the capture :return: void """ frame = numpy.copy(frame) self._add_knot('Raw', frame) hls = cv2.cvtColor(frame, cv2.COLOR_RGB2HLS) # apply gaussian blur, reducing noise in grayscale image, reducing the effect of undesired lines blurred = cv2.GaussianBlur( hls, HistogramPeakDetection.settings.gaussian_blur.kernel_size, HistogramPeakDetection.settings.gaussian_blur.deviation) self._add_knot('Gaussian Blur', blurred, hls=True) hls_64f = blurred.astype(numpy.float) h_channel = hls_64f[:, :, 0] l_channel = hls_64f[:, :, 1] s_channel = hls_64f[:, :, 2] # Negative slopes will be ignored if using uint8, thus, we run the sobel filter over a float image and convert later sobelx_64f = cv2.Sobel(l_channel, cv2.CV_64F, 1, 0, ksize=5) abs_sobelx_64f = numpy.absolute(sobelx_64f) sobelx = numpy.uint8(abs_sobelx_64f) sobelx_scaled = numpy.uint8(255 * (sobelx / numpy.max(sobelx))) self._add_knot('Sobel X', sobelx_scaled, hls=False) sobel_filter_mask = self.filter_thresholds(sobelx_scaled, (125, 255)) self._add_knot('Sobel Filter Mask', sobel_filter_mask) sobel_filtered = cv2.bitwise_and(sobelx_scaled, sobel_filter_mask) self._add_knot('Sobel Filtered', sobel_filtered) s_channel = s_channel.astype(numpy.uint8) self._add_knot('Saturation Channel', s_channel) saturation_filter_mask = self.filter_thresholds(s_channel, (6, 85)) self._add_knot('Saturation Filter Mask', saturation_filter_mask) saturation_filtered = cv2.bitwise_and(s_channel, saturation_filter_mask) self._add_knot('Saturation Filtered', saturation_filtered) combined_filter = sobel_filter_mask & saturation_filter_mask self._add_knot('Combined Filters', combined_filter) # fill contours in combined filters contours = cv2.findContours(combined_filter, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) contours = contours[0] if len(contours) == 2 else contours[1] for contour in contours: cv2.drawContours(combined_filter, [contour], -1, (255, 255, 255), -1) self._add_knot('Filled Contours Combined Filters', combined_filter) masked = region_of_interest(self, combined_filter) self._add_knot('Region Of Interest Mask', masked) # do the 'Histogram Peak Detection' left_points, right_points = self.get_histogram_peak_points(masked) combined_points = [*left_points, *right_points] # display the detected points on the image masked_image_rgb = cv2.cvtColor(masked, cv2.COLOR_GRAY2RGB) self._display_points(masked_image_rgb, left_points, color=(255, 255, 0), overlay_name='Detected Left Points') self._display_points(masked_image_rgb, right_points, color=(0, 255, 255), overlay_name='Detected Right Points') points_image = self._display_points(masked_image_rgb, combined_points, display_overlay=False) detected_points_result = cv2.addWeighted(masked_image_rgb, 0.5, points_image, 1, 0) self._add_knot('Detected Points Result', detected_points_result) # filter the points to remove 'incorrect' detections left_points = self.filter_points(left_points) right_points = self.filter_points(right_points) combined_points = [*left_points, *right_points] left_points_image = self._display_points( masked_image_rgb, left_points, color=(255, 255, 0), overlay_name='Filtered Left Points') right_points_image = self._display_points( masked_image_rgb, right_points, color=(0, 255, 255), overlay_name='Filtered Right Points') points_image = self._display_points(masked_image_rgb, combined_points, display_overlay=False) detected_points_result = cv2.addWeighted(masked_image_rgb, 0.5, points_image, 1, 0) self._add_knot('Filtered Points Result', detected_points_result) lanes = self.fit_lane_to_points((left_points, right_points)) display_lanes(frame, numpy.array([lanes[0]]), self._add_knot, overlay_name='Left Lane') display_lanes(frame, numpy.array([lanes[1]]), self._add_knot, overlay_name='Right Lane') lane_image = display_lanes(frame, lanes, self._add_knot) detected_lanes_result = cv2.addWeighted(frame, 0.75, lane_image, 1, 0) detected_lanes_result = cv2.addWeighted(detected_lanes_result, 1, left_points_image, 1, 0) detected_lanes_result = cv2.addWeighted(detected_lanes_result, 1, right_points_image, 1, 0) self._add_knot('Detected Lanes Result', detected_lanes_result) lanes = self._historic_fill.get(lanes) lane_image = display_lanes(frame, lanes, self._add_knot, overlay_name='Historic Filtered') detected_lanes_result = cv2.addWeighted(frame, 0.75, lane_image, 1, 0) self._add_knot('Historic Filtered Result', detected_lanes_result) self._add_lanes(lanes)
""" import os import imp import sys import time APP_PATH = os.path.realpath('%s/..' % os.path.dirname(os.path.abspath(__file__))) sys.path.insert(0, '%s/../lib' % APP_PATH) import settings import initializer import translation from bgcalc.stderr2f import stderr_redirector settings.load(os.path.join(APP_PATH, 'conf', 'config.xml')) if settings.get('global', 'manatee_path', None): sys.path.insert(0, settings.get('global', 'manatee_path')) import manatee os.environ['MANATEE_REGISTRY'] = settings.get('corpora', 'manatee_registry') initializer.init_plugin('db') initializer.init_plugin('sessions') initializer.init_plugin('auth') initializer.init_plugin('conc_cache') initializer.init_plugin('query_storage') initializer.init_plugin('conc_persistence') initializer.init_plugin('sessions') initializer.init_plugin('user_items') initializer.init_plugin('corparch') initializer.init_plugin('token_connect', optional=True)
parser.add_argument('-d', '--data-only', metavar='DATA_ONLY', action='store_const', const=True) parser.add_argument('-c', '--create-if-none', metavar='CREATE_IF_NONE', action='store_const', const=True) parser.add_argument('-j', '--json-out', metavar='JSON_OUT', type=str, help='A directory where corpus installation JSON should be stored') parser.add_argument('-r', '--reg-path', type=str, default='', help='Path to registry files') parser.add_argument('-a', '--variant', type=str, help='Try to search for alternative registry in a directory with this name') parser.add_argument('-v', '--verbose', action='store_const', const=True, help='Print some additional (error) information') parser.add_argument('-y', '--dry-run', action='store_const', const=True, help='No actual database operation will be performed. SQL queries will be printed.') args = parser.parse_args() import settings settings.load(args.conf_path) if args.dry_run: db = DummySQL() else: db = MySQL(MySQLConf(settings)) if args.schema_only: prepare_tables(db) else: reg_path = args.reg_path if args.reg_path else settings.get('corpora', 'manatee_registry') shared = DummyShared() if args.dry_run else Shared(reg_path=reg_path) ijson = InstallJsonDir(args.json_out) parse_corplist(path=args.corplist, db=db, shared=shared, json_out=ijson, variant=args.variant, verbose=args.verbose, data_only=args.data_only, create_if_none=args.create_if_none) ijson.write() db.commit()
# Title: main.py # this is the main file of exectuion # import file # imaginary import modeling code directly after perl version import project import settings # imginary import of settings import os; from languageDir import *; # import all language parsers. import lang; #import topics #import builder if __name__ == "__main__": # start point of python interpeter. args = settings.load(); # languages.load(); # topics.load(); # project.loadConfigFileInfo(); # print 'starting normal_docs' oldTimes = project.getOldFileTimes(args); curTimes = project.getCurrentFileTimes(args); ParseList = project.cmpFileTimes(curTimes,oldTimes); project.updateModTimes(curTimes,args); print("now filtering the ParseList"); project.filterParseList(ParseList); print ParseList; for sourceFile in ParseList: lang_parser = lang.langRegister().languageOf(sourceFile); lang_parser.ParseFile(sourceFile);
import model import settings urls = ( r'/artists/', 'controllers.artists.List', r'/artists/(.+)/tracks/', 'controllers.artists.Tracks', r'/artists/(.+)/albums/', 'controllers.artists.Albums', r'/artists/(.+)/albums/(.+)/tracks', 'controllers.artists.AlbumTracks', r'/tracks/', 'controllers.tracks.List', ) def check_modified(handler): web.http.expires(300) web.http.modified(etag=model.datastore_state_tag()) return handler() # Load our settings settings.load("choonweb.conf") # Connect to Mongo model.connect() app = web.application(urls, globals()) # As we're only a read only reflection of Mongo every can be checked against # our MongoDB state app.add_processor(check_modified) if __name__ == '__main__': app.run()
import action import user import iptv ##################################################################### # Prerequisites # ##################################################################### settings_file = '.settings.conf' users_file = '.users.json' handlers_dir = 'actions/' # Table for global data storing public = table.Table() # Load settings config = settings.load(settings_file) # Load users list users = user.UserStore(users_file) # Load action handlers actions = action.ActionStore() actions.load(handlers_dir) ##################################################################### # XMPP specific methods # ##################################################################### def handleKnownUser(connection, message, params): data = params.data user = params.self handler, match = actions.select(data, user.access or 0)
import sys import time try: import cPickle as pickle except ImportError: import pickle CURR_PATH = os.path.realpath(os.path.dirname(os.path.abspath(__file__))) sys.path.insert(0, '%s/lib' % CURR_PATH) import settings import initializer import plugins import translation from bgcalc.stderr2f import stderr_redirector settings.load('%s/conf/config.xml' % CURR_PATH) if settings.get('global', 'manatee_path', None): sys.path.insert(0, settings.get('global', 'manatee_path')) import manatee os.environ['MANATEE_REGISTRY'] = settings.get('corpora', 'manatee_registry') initializer.init_plugin('db') initializer.init_plugin('sessions') initializer.init_plugin('auth') initializer.init_plugin('conc_cache') initializer.init_plugin('query_storage') initializer.init_plugin('conc_persistence') initializer.init_plugin('sessions') initializer.init_plugin('user_items') initializer.init_plugin('corparch') initializer.init_plugin('live_attributes', optional=True)
ERROR = "ERROR" SUCCESS = "SUCCESS" SKIP = "SKIP" REJECT = "REJECT" UNKNOWN = "UNKNOWN" def codi_sortida(estat): return (0 if estat == SUCCESS or estat == SKIP else 1) if __name__ == '__main__': a = None opts, args = getopt.getopt(sys.argv[1:], 'c:') for o, a in opts: if o == '-c': settings.load(a) logging.basicConfig( filename=settings.get("log_file"), level=settings.get("log_level"), format='%(asctime)s [%(process)d] %(name)-12s' ' %(levelname)-8s %(message)s' ) buffer_logs = StringIO() logger.addHandler(logging.StreamHandler(buffer_logs)) if a is not None: logger.info("Fitxer de configuracio [%s]", a) estat = UNKNOWN
+ ' --pipeline_config_path=' + settings['paths']['updated_config'] ) def export(settings): os.system('python ' + settings['paths']['export_script'] \ + ' --input_type image_tensor' \ + ' --pipeline_config_path ' + settings['dirs']['output'] + '/pipeline.config' \ + ' --trained_checkpoint_prefix ' + settings['dirs']['output'] + '/model.ckpt-' + str(settings['config']['train_config']['num_steps']) \ + ' --output_directory ' + settings['dirs']['tuned_models'] ) if __name__ == '__main__': print 'Loading settings' settings = settings.load('settings.yaml') print 'Creating directories' create_dirs(settings) print 'Downloading the base model' download_base_model(settings) print 'Generating "pipeline.config"' populate_config(settings) print 'Downloading the dataset' download_dataset(settings) print 'Generating TFRecords for object_detection framework' training_output = settings['config']['train_input_reader'][
parser.add_argument('-s', '--test-setuid', action='store_true', default=False, help='test setuid for files/directories') parser.add_argument('-u', '--webserver-user', type=str, default='www-data', help='A system user a webserver runs under.') parser.add_argument('-g', '--webserver-group', type=str, default='www-data', help='A system group a webserver runs under.') args = parser.parse_args() settings.load(args.config_file) finfo = FileInfo(args.test_setuid, webserver_user=args.webserver_user, webserver_group=args.webserver_group, config_path=args.config_file) print('--------------------------------------') for test in TESTS: print('') print('\n'.join(test(finfo))) print('\n--------------------------------------') print('Total number of tests: {0}'.format(len(SCORE))) print('Number of failed tests: {0}'.format( len(filter(lambda x: x is False, SCORE)))) print('')
controller_class = self.load_controller_class(environ['PATH_INFO']) app = controller_class(request=request, ui_lang=ui_lang) status, headers, sid_is_valid, body = app.run() response = Response(response=body, status=status, headers=headers) if not sid_is_valid: curr_data = dict(request.session) request.session = sessions.new() request.session.update(curr_data) request.session.modified = True if request.session.should_save: sessions.save(request.session) response.set_cookie(sessions.get_cookie_name(), request.session.sid) return response(environ, start_response) settings.load(path=CONF_PATH) if settings.get('global', 'manatee_path', None): sys.path.insert(0, settings.get('global', 'manatee_path')) # please note that some environments may provide umask setting themselves if settings.get('global', 'umask', None): os.umask(int(settings.get('global', 'umask'), 8)) if not settings.get_bool('global', 'maintenance'): application = KonTextWsgiApp() else: application = MaintenanceWsgiApp() robots_path = os.path.join(os.path.dirname(__file__), 'files/robots.txt') if os.path.isfile(robots_path):
dest="keys", default="", help="comma-separated list of keys to use in query") parser.add_option("-p", "--private", dest="private", default=False, action="store_true", help="request only private results") opts, args = parser.parse_args() if len(args) != 1: parser.print_help() sys.exit(1) s.conf = s.load(args[0]) # set up readline HISTFILE = os.path.expanduser('~/.smap-query-history') readline.parse_and_bind('tab: complete') readline.parse_and_bind('set editing-mode emacs') if hasattr(readline, "read_history_file"): try: readline.read_history_file(HISTFILE) except IOError: pass atexit.register(readline.write_history_file, HISTFILE) cp = adbapi.ConnectionPool(s.conf['database']['module'], host=s.conf['database']['host'], database=s.conf['database']['db'],
from uuid import uuid4 import json import os import re from bottle import Bottle, redirect, request, response, static_file, template from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.asymmetric import rsa import fakeredis import jwt import redis import settings DIR = os.path.dirname(os.path.abspath(__file__)) SETTINGS = settings.load() if SETTINGS['RedisURL']: REDIS = redis.StrictRedis.from_url(SETTINGS['RedisURL']) else: REDIS = fakeredis.FakeStrictRedis() # <-- Only suitable for local testing app = Bottle() # HTTP Routes ---------------------------------------------------------------- @app.get('/') def index(): """Render the homepage."""
def get_db(conf): return redis.StrictRedis(host=conf['default:host'], port=int(conf['default:port']), db=int(conf['default:id'])) if __name__ == '__main__': import argparse argparser = argparse.ArgumentParser(description="RedisDB clean-up utility") argparser.add_argument( 'clean_what', metavar="ACTION", help="what item group should be cleaned (session, concordance)") args = argparser.parse_args() patterns = {'session': 'session:*', 'concordance': 'concordance:*'} if not args.clean_what in patterns: raise ValueError('Unknown action: %s' % args.clean_what) settings.load('%s/conf/config.xml' % app_path) db = get_db(settings.get('plugins', 'db')) keys = db.keys(patterns[args.clean_what]) i = 0 for key in keys: db.delete(key) print(('deleted: %s' % key)) i += 1 print(('Finished deleting %d keys' % i))
import settings def get_db(conf): return redis.StrictRedis(host=conf['default:host'], port=int(conf['default:port']), db=int(conf['default:id'])) if __name__ == '__main__': import argparse argparser = argparse.ArgumentParser(description="RedisDB clean-up utility") argparser.add_argument('clean_what', metavar="ACTION", help="what item group should be cleaned (session, concordance)") args = argparser.parse_args() patterns = { 'session': 'session:*', 'concordance': 'concordance:*' } if not args.clean_what in patterns: raise ValueError('Unknown action: %s' % args.clean_what) settings.load('%s/conf/config.xml' % app_path) db = get_db(settings.get('plugins', 'db')) keys = db.keys(patterns[args.clean_what]) i = 0 for key in keys: db.delete(key) print('deleted: %s' % key) i += 1 print('Finished deleting %d keys' % i)
def write_pauses(pauses, folder): with open(path.join(folder, "pauses.csv"), "w") as pause_file: pause_file.write("\n".join("%i\t%f\t%f" % pause for pause in pauses)) def inject_pauses(settings, output_folder, count): pauses = generate_pauses(settings["size"], count, settings["time"]) write_pauses(pauses, output_folder) settings["pauses"] = pauses def spawn_simulation(infile, pauses, outfolder): settings = load(infile) write_pauses(pauses, outfolder) settings["pauses"] = pauses experiment = ExperimentSet(settings, outfolder) experiment.run() if __name__ == "__main__": temp = load("pauses_run_1.json") pauses = generate_pauses(temp["size"], 120, temp["time"]) pool = Pool(6) pool.apply_async(spawn_simulation, ("pauses_run_1.json", pauses, "pauses_run_1")) pool.apply_async(spawn_simulation, ("pauses_run_2.json", pauses, "pauses_run_2")) pool.apply_async(spawn_simulation, ("pauses_run_3.json", pauses, "pauses_run_3")) pool.apply_async(spawn_simulation, ("pauses_run_4.json", pauses, "pauses_run_4")) pool.apply_async(spawn_simulation, ("pauses_run_5.json", pauses, "pauses_run_5")) pool.apply_async(spawn_simulation, ("pauses_run_6.json", pauses, "pauses_run_6")) pool.close() pool.join()
#!/usr/bin/python from settings import load from experiment import ExperimentSet import traceback, sys, os if __name__ == "__main__": # Read arguments try: settingsfile = sys.argv[1] print "Settings file: " + settingsfile except IndexError: print "Bad parameters, should be 'python engine.py settingsfile'" quit() # Generate variables settings = load(settingsfile) # Create and run the experiment experiments = ExperimentSet(settings) experiments.run() print "Experiments concluded :)"
APP_PATH = os.path.realpath('%s/..' % AUTOCONF_PATH) sys.path.insert(0, '%s/lib' % APP_PATH) DEFAULT_LOG_FILE_SIZE = 1000000 DEFAULT_NUM_LOG_FILES = 5 # this specifies recommended argument values for setting logging level from command line LOG_LEVELS = OrderedDict( [('debug', logging.DEBUG), ('info', logging.INFO), ('warning', logging.WARNING), ('error', logging.ERROR), ('critical', logging.CRITICAL)] ) DEFAULT_LOG_OUT = sys.stderr import settings settings.load('%s/conf/config.xml' % APP_PATH) logger = logging.getLogger('kontext_script') def setup_logger(log_path=None, logger_name=None, logging_level=None): """ Configures logging (= module's logger variable). arguments: log_path -- path to a file where log will be written; if omitted then stdout is used logger_name -- a name to be used for logger (by default it is 'kontext_script') """ if logger_name is not None: logger.name = logger_name
def build_model(self, pos=None, *posi): from modeltest import ModelTest self.position_model = model.TreeModel(position.build_positions(settings.load('quotes.json')))
-si, --splitinto: number of files the source archive should be split into. If not specified, the number of files is determined based on the archive rows limit config value -c, --clear: delete all files from the archives directory before splitting -v, --verbose: verbose mode """ import argparse import os import sys sys.path.insert(0, os.path.realpath(os.path.join(os.path.dirname(__file__), '../../../../lib'))) from plugins.ucnk_conc_persistence3 import ArchMan import settings conf_path = os.path.realpath(os.path.join(os.path.dirname(__file__), '../../../../conf/config.xml')) settings.load(conf_path) plugin_conf = settings.get('plugins', 'conc_persistence3') db_path = plugin_conf.get('ucnk:archive_db_path') rows_limit = int(plugin_conf.get('ucnk:archive_rows_limit')) arch_man = ArchMan(db_path, rows_limit) parser = argparse.ArgumentParser() parser.add_argument("source_archive", help="full path to the source archive db file") parser.add_argument("-si", "--splitinto", help="number of files the source archive should be split into") parser.add_argument("-c", "--clear", action="store_true", help="clear the archives directory before splitting") parser.add_argument("-v", "--verbose", action="store_true", help="verbose mode") args = parser.parse_args() source_path = args.source_archive if not os.path.exists(source_path):
def run(checkupdate=False): import sys,os if "--help" in sys.argv or "-h" in sys.argv or "-?" in sys.argv or "/?" in sys.argv: print "%s -run 'path/to/game' : run a game directly"%(sys.argv[0],) print "%s -text : text mode, no graphics created, must use -run"%(sys.argv[0],) sys.exit() #Check for updates! newengine = None if checkupdate: import libupdate eng = libupdate.Engine() libupdate.screen.blit(arial14.render("Checking for Updates...",1,[255,255,255]),[0,0]) pygame.display.flip() libupdate.root.start_index = 0 try: assets.threads = [eng.Update_PyWright(thread=True)] pygame.event.clear() pygame.event.pump() while libupdate.list.status_box.text=="Fetching data from server...": libupdate.screen.fill([0,0,0]) libupdate.screen.blit(arial14.render("Checking for Updates... (Click to cancel)",1,[255,255,255]),[0,0]) pygame.display.flip() for e in pygame.event.get(): if e.type == pygame.MOUSEBUTTONDOWN: libupdate.list.status_box.text = "cancelled" libupdate.screen.fill([0,0,0]) if libupdate.list.status_box.text == "cancelled": libupdate.screen.blit(arial14.render("Cancelled checking for updates",1,[255,255,255]),[0,0]) else: libupdate.screen.blit(arial14.render("Finished checking for updates",1,[255,255,255]),[0,0]) pygame.display.flip() for pane in libupdate.list.children[2:]: c = pane.children[1].children[0] if isinstance(c,gui.checkbox): c.checked = True libupdate.Engine.quit_threads = 0 libupdate.screen.blit(arial14.render("Doing update to "+c.text,1,[255,255,255]),[0,20]) pygame.display.flip() eng.do_update(output=True) goodkeys = "copy_reg,sre_compile,locale,_sre,__main__,site,__builtin__,\ operator,encodings,os.path,encodings.encodings,encodings.cp437,errno,\ encodings.codecs,sre_constants,re,ntpath,UserDict,nt,stat,zipimport,warnings,\ encodings.types,_codecs,encodings.cp1252,sys,codecs,types,_types,_locale,signal,\ linecache,encodings.aliases,exceptions,sre_parse,os,goodkeys,k,core,libengine".split(",") for k in sys.modules.keys(): if k not in goodkeys: del sys.modules[k] import core as core2 reload(core2) import libengine as le2 reload(le2) newengine = le2.run break except SystemExit: sys.exit() #~ except: #~ pass if newengine: newengine() sys.exit() assets.init_sound() pygame.display.init() settings.load(assets) game = "menu" scene = "intro" if sys.argv[1:] and sys.argv[2:]: game = sys.argv[1] scene = sys.argv[2] assets.game = game assets.items = [] running = True text_only = False if "-text" in sys.argv: text_only = True assets.Script = debug_script.TextScript os.environ["SDL_VIDEODRIVER"] = "dummy" assets.make_screen() assets.make_start_script() if "-run" in sys.argv: def ms(*args): print "make_start_script to exit" sys.exit() assets.make_start_script = ms assets.stack = [] assets.start_game(sys.argv[sys.argv.index("-run")+1]) import time lt = time.time() ticks = 0 fr = 0 #~ import time #~ end = time.time()+5 #~ while time.time()<end: #~ pass #~ sys.exit() laststack = [] if android: android.map_key(android.KEYCODE_MENU, pygame.K_ESCAPE) android.map_key(android.KEYCODE_BACK, pygame.K_SPACE) def androidpause(): if android: if android.check_pause(): assets.save_game("android_pause",True) android.wait_for_resume() assets.next_screen = assets.screen_refresh while running: androidpause() #~ ticks = time.time()-lt #~ lt = time.time() #~ while ticks<(1/(float(assets.variables.get("_framerate",60))+20.0)): #~ if ticks: time.sleep(0.02) #~ ticks += time.time()-lt #~ lt = time.time() #~ dt = ticks*1000.0 assets.dt = clock.tick(getattr(assets,"framerate",60)) assets.dt = min(assets.dt*.001*60,10.0)*assets.game_speed pygame.display.set_caption("PyWright "+VERSION) assets.cur_script.update() script.interpret_scripts() #~ if vtrue(assets.variables.get("_debug","false")): #~ ns = assets.get_stack() #~ if ns != laststack: #~ laststack = ns #~ print "^^^^^^^^^^^^^^^^^^^" #~ for s in ns: #~ print s #~ print "vvvvvvvvvvvvvvvvvvvvvvv" #~ print [[x,x.pri] for x in assets.cur_script.obs] if not assets.cur_script: break assets.next_screen -= assets.dt [o.unadd() for o in assets.cur_script.obs if getattr(o,"kill",0) and hasattr(o,"unadd")] for o in assets.cur_script.world.all[:]: if getattr(o,"kill",0): assets.cur_script.world.all.remove(o) if assets.next_screen < 0: pygame.screen.blit(pygame.blank,[0,0]) try: assets.cur_script.draw(pygame.screen) except (art_error,script_error),e: import traceback traceback.print_exc() assets.cur_script.obs.append(error_msg(e.value,assets.cur_script.lastline_value,assets.cur_script.si,assets.cur_script)) if assets.flash: try: fl = flash() assets.cur_script.obs.append(fl) fl.ttl = assets.flash if hasattr(assets,"flashcolor"): fl.color = assets.flashcolor except (art_error,script_error),e: import traceback traceback.print_exc() assets.cur_script.obs.append(error_msg(e.value,assets.cur_script.lastline_value,assets.cur_script.si,assets.cur_script)) assets.flash = 0 assets.flashcolor = [255,255,255] if assets.shakeargs != 0: try: assets.cur_script._shake("shake",*assets.shakeargs) except (art_error,script_error),e: import traceback traceback.print_exc() assets.cur_script.obs.append(error_msg(e.value,assets.cur_script.lastline_value,assets.cur_script.si,assets.cur_script)) assets.shakeargs = 0
import os import sys APP_PATH = os.path.realpath('%s/../..' % os.path.dirname(os.path.abspath(__file__))) sys.path.insert(0, '%s/lib' % APP_PATH) import settings from celery import Celery settings.load(os.path.join(APP_PATH, 'conf', 'config.xml')) import imp conf_mod = imp.load_source('beatconfig', settings.get('job_scheduler', 'conf')) app = Celery('kontext', config_source=conf_mod)
parser.add_option("-v", "--verbose", dest="verbose", action="store_true", default=False, help="print generated SQL") parser.add_option("-k", "--keys", dest="keys", default="", help="comma-separated list of keys to use in query") parser.add_option("-p", "--private", dest="private", default=False, action="store_true", help="request only private results") opts, args = parser.parse_args() if len(args) != 1: parser.print_help() sys.exit(1) s.conf = s.load(args[0]) # set up readline HISTFILE = os.path.expanduser('~/.smap-query-history') readline.parse_and_bind('tab: complete') readline.parse_and_bind('set editing-mode emacs') if hasattr(readline, "read_history_file"): try: readline.read_history_file(HISTFILE) except IOError: pass atexit.register(readline.write_history_file, HISTFILE) cp = adbapi.ConnectionPool(s.conf['database']['module'], host=s.conf['database']['host'], database=s.conf['database']['db'],
import os, sys, ConfigParser, logging, signal, string, time from daemon import runner working_directory = os.path.dirname(os.path.realpath(__file__)) sys.path.insert(1, working_directory + '/classes') sys.path.insert(1, working_directory + '/lib') import settings from irc import * from blockupdate import * from commands import * # Load our local configuration config = ConfigParser.RawConfigParser() if not config.read(working_directory + '/conf/config.cfg'): raise RuntimeError('Failed to load configuration: conf/config.cfg') settings = settings.load(config) # Ensure some paths exist logfile = working_directory + '/' + config.get('Logging', 'file') if not os.path.exists(os.path.dirname(logfile)): os.makedirs(os.path.dirname(logfile)) # Setup logging according to configuration numeric_level = getattr(logging, config.get('Logging', 'level').upper(), None) formatter = config.get('Logging', 'format') if not isinstance(numeric_level, int): raise ValueError('Invalid log level: %s' % config.get('Logging', 'level')) logging.basicConfig(format=formatter, level=numeric_level) logFormatter = logging.Formatter(formatter) logger = logging.getLogger('bot') fh = logging.FileHandler(logfile)