def test_stacker_historic_fit_rf(self): n_samples = 5000 n_features = 15 num_classes = 10 models = ['c-rf' for x in range(5)] params = ['test_params_rf' for x in range(5)] X, y = make_classification(n_samples=n_samples, n_features=n_features, n_informative=5, n_redundant=3, n_classes=num_classes) df = pd.DataFrame( {'feature_' + str(i): X[:, i] for i in range(X.shape[1])}) df['ID'] = np.random.randint(0, 100, n_samples) historical_df = df[:2500] def create_f1(df): squares = df.feature_2.apply(lambda x: x * 2) return squares.values def create_f2(df): some_feature = df.feature_3.apply( lambda x: np.sin(x) / np.cos(x * 2)) return some_feature.values def create_historic_f1(df, historical=historical_df): value_dict = historical[['ID', 'feature_1' ]].groupby('ID').mean().to_dict() historical_feature = df.ID.apply( lambda x: value_dict['feature_1'].get(x, 0)) return historical_feature def create_historic_f2(df, historical=historical_df): value_dict = historical[['ID', 'feature_5' ]].groupby('ID').median().to_dict() historical_feature = df.ID.apply( lambda x: value_dict['feature_5'].get(x, 0)) return historical_feature non_historical_features = [create_f1, create_f2] historical_features = [create_historic_f1, create_historic_f2] fb = FeatureBuilder(non_historical_features, historical_features) meta_clf = Stacker('xgb', models, 10, fb, meta_model_params='test_params', base_model_params=params) meta_clf.fit(X, y, df) self.assertTrue( meta_clf.predict(X[-1000:], df=df[-1000:], historical_df=df).shape[0] == 1000)
def test_init_rf(self): models = ['c-rf' for x in range(5)] params = ['test_params_rf' for x in range(5)] feature_builder = FeatureBuilder([], []) meta_clf = Stacker('xgb', models, 10, feature_builder, meta_model_params='test_params', base_model_params=params) self.assertEqual(meta_clf.meta_model.name, 'xgb') self.assertEqual(meta_clf.meta_model.params['num_class'], 10) self.assertTrue(meta_clf.meta_model.num_rounds >= 10) self.assertTrue(meta_clf.meta_model.num_rounds <= 75)
def test_base_predictions_xgb(self): n_samples = 5000 n_features = 15 num_classes = 10 models = ['xgb' for x in range(5)] params = ['test_params' for x in range(5)] fb = FeatureBuilder([], []) meta_clf = Stacker('xgb', models, 10, fb, meta_model_params='test_params', base_model_params=params) self.assertEqual(meta_clf.meta_model.name, 'xgb') X, y = make_classification(n_samples=n_samples, n_features=n_features, n_informative=5, n_redundant=3, n_classes=num_classes) meta_prediction = meta_clf.generate_base_model_predictions(X, y) self.assertTrue(meta_prediction.shape[0] == n_samples) self.assertTrue(meta_prediction.shape[1] == num_classes * len(models))
rospy.init_node('stacker', log_level=rospy.DEBUG) node_name = rospy.get_name() rospy.logwarn('Node name: ' + node_name) opcua_endpoint = rospy.get_param('~opcua_endpoint') if not rospy.has_param('~opcua_server_namespace'): raise rospy.ROSInitException( 'Parameter "opcua_server_namespace" must be specified in accordance with OPCU-UA' 'Model. Example: /Airalab/Stacker_goods') opcua_server_namespace = rospy.get_param('~opcua_server_namespace') if 'ns=' not in opcua_server_namespace: # use only string type nodeId raise rospy.ROSInitException( 'Parameter "opcua_server_namespace" template: "ns=<int>;s=/<VendorName>/<ObjectName>"') if not rospy.has_param('opcua_client_node'): rospy.logwarn('Using default ROS OPC-UA Client node path: /opcua/opcua_client') rospy.logwarn('You can specify it in parameter \'opcua_client_node\'') opcua_client_node = rospy.get_param('opcua_client_node', '/opcua/opcua_client') if not rospy.has_param('~direction'): raise rospy.ROSInitException( 'Parameter "direction" must be specified "forward", "fw" or "backward", "bw"') direction = rospy.get_param('~direction') timeout = rospy.get_param('~timeout', 5000) stacker = Stacker(node_name, opcua_client_node, opcua_endpoint, opcua_server_namespace, timeout, direction) stacker.enable() rospy.spin()
http://stackoverflow.com/questions/10840533/most-pythonic-way-to-delete-a-file-which-may-not-exist """ for f in filelist: try: os.remove(f) except OSError as e: if e.errno != errno.ENOENT: # errno.ENOENT = no such file or directory raise # re-raise exception if a different error occured if __name__ == '__main__': def date2datetime(input_date, time_offset=time.min): if not input_date: return None return datetime.combine(input_date, time_offset) stacker = Stacker() # Check for required command line parameters assert stacker.x_index, 'Tile X-index not specified (-x or --x_index)' assert stacker.y_index, 'Tile Y-index not specified (-y or --y_index)' assert stacker.output_dir, 'Output directory not specified (-o or --output)' assert os.path.isdir(stacker.output_dir), 'Invalid output directory specified (-o or --output)' stacker.output_dir = os.path.abspath(stacker.output_dir) log_multiline(logger.debug, stacker.__dict__, 'stacker.__dict__', '\t') # Stacker object already has command line parameters # that disregard_incomplete_data is set to True for command line invokation stack_info_dict = stacker.stack_tile(x_index=stacker.x_index, y_index=stacker.y_index, stack_output_dir=stacker.output_dir,
parser.add_argument("--align", action="store_true", help="run only the aligner, do not compress") parser.add_argument("--transform", action="store_true", help="run only the aligner and transform, do not compress") parser.add_argument("--stitch", action="store_true", help="stitch images for panoramic formats") args = parser.parse_args() # --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- aligner = Aligner() stitcher = Stitcher() stacker = Stacker(aligner) input_images_aligner = [] input_images_stitcher = [] input_images_stacker = [] # transform to absolute paths BASE_DIR = os.path.dirname(os.path.realpath(__file__)) # --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- # init aligner if args.align or args.transform: # expand all paths for directory in config.DIRS_TO_EXPAND_ALIGNER:
import sys sys.path.append("../../setup") from stacker import Stacker s = Stacker( "dpl-samples-hadoop-terasort", { "Resources": { "S3Bucket": { "Type": "AWS::S3::Bucket", "DeletionPolicy": "Delete" } } }) s.run(sys.argv)
import sys sys.path.append("../../setup") from stacker import Stacker s = Stacker( "dpl-samples-hello-world", { "Resources": { "S3Bucket": { "Type": "AWS::S3::Bucket", "DeletionPolicy": "Delete" } } }) s.run(sys.argv)