def __init__(self): input_reader = InputReader() self.state = input_reader.read(0) self.output_writer = OutputWriter('naive') self.num_rows = self.state['numRows'] self.num_cols = self.state['numCols'] self.num_drones = self.state['numDrones'] self.num_turns = self.state['numTurns'] self.max_payload = self.state['maxPayload'] self.num_product_types = self.state['numProductTypes'] self.product_weights = self.state['productWeights'] self.num_warehouses = self.state['numWarehouses'] # warehouse: liste auf dicts: x, y, items -> itemstock self.warehouse_data = self.state['warehousesData'] # order data: liste auf dicts: x, y, numorders, items self.order_data = self.state['orderData'] # get list of items self.item_lists = numpy.zeros( (len(self.order_data), len(self.product_weights)), dtype=int) self.turns_used = numpy.zeros(self.num_drones) self.drone_location = numpy.zeros((self.num_drones, 2)) for order_idx in numpy.arange(len(self.order_data)): for item in self.order_data[order_idx]['items']: self.item_lists[order_idx][item] += 1 for n in numpy.arange(self.num_drones): self.drone_location[n] = [0, 0]
def initialize(self, conf, context): try: self.counter = 0 self.modname = conf["dispel4py.module"] self.scriptname = conf["dispel4py.script"] scriptconfig = pickle.loads(str(conf['dispel4py.config'])) if 'dispel4py.config' in conf else {} storm.log("Dispel4Py ------> loading script %s" % self.scriptname) mod = import_module(self.modname) self.script = getattr(mod, self.scriptname)() for key, value in scriptconfig.iteritems(): storm.log("Dispel4Py ------> %s: setting attribute %s" % (self.scriptname, key)) setattr(self.script, key, value) storm.log("Dispel4Py ------> loaded script %s" % self.scriptname) # attach an output writer to each output connection for outputname, output in self.script.outputconnections.iteritems(): output['writer'] = OutputWriter(self.scriptname, outputname) # pre-processing if required self.script.preprocess() storm.log("Dispel4Py ------> %s: preprocess() completed." % (self.scriptname,)) except: storm.log("Dispel4Py ------> %s: %s" % (self.scriptname, traceback.format_exc(),)) raise
def __init__(self): input_reader = InputReader() self.state = input_reader.read(0) self.output_writer = OutputWriter('naive') self.num_rows = self.state['numRows'] self.num_cols = self.state['numCols'] self.num_drones = self.state['numDrones'] self.num_turns = self.state['numTurns'] self.max_payload = self.state['maxPayload'] self.num_product_types = self.state['numProductTypes'] self.product_weights = self.state['productWeights'] self.num_warehouses = self.state['numWarehouses'] # warehouse: liste auf dicts: x, y, items -> itemstock self.warehouse_data = self.state['warehousesData'] # order data: liste auf dicts: x, y, numorders, items self.order_data = self.state['orderData'] # get list of items self.item_lists = numpy.zeros((len(self.order_data), len(self.product_weights)), dtype=int) self.turns_used = numpy.zeros(self.num_drones) self.drone_location = numpy.zeros((self.num_drones, 2)) for order_idx in numpy.arange(len(self.order_data)): for item in self.order_data[order_idx]['items']: self.item_lists[order_idx][item] += 1 for n in numpy.arange(self.num_drones): self.drone_location[n] = [0, 0]
def execute_orders(orders_path, output_path): output_writer = OutputWriter(output_path) reader = FilerReader(orders_path) while reader.has_next_line(): args = reader.next_line().split(",") order = None if len(args) == 3: # Receive order order = ReceiveOrder(args[0], args[1], args[2], output_writer) elif len(args) == 2: # Send order order = SendOrder(args[0], args[1], output_writer) if order: order.execute() else: print("ERROR")
NUM_WORKERS = 1 total_images_count = 0 # COMMAND ---------- batch_classifier = BatchClassifier( can_use_cuda=CAN_USE_CUDA, classes_file_location=IMAGENET_CLASSES_LOCATION, batch_size=BATCH_SIZE, max_labels=MAX_LABELS, num_workers=NUM_WORKERS) # COMMAND ---------- output_writer = OutputWriter(OUTPUT_CLASSIFIED_PARQUET) # COMMAND ---------- def process_dataframe(dataframe): global total_images_count start_time_batch_processing = time.time() batch_size = dataframe.size print("Classifying a Batch of size: %d" % batch_size) matched_labels = batch_classifier.classify_images(dataframe) size_matched_labels = len(matched_labels) print("Classified a batch of %d valid elements from %d possible elements" % (size_matched_labels, batch_size)) output_writer.write_to_parquet(matched_labels) duration_batch_processing = time.time() - start_time_batch_processing
class NaiveOptimiser: def __init__(self): input_reader = InputReader() self.state = input_reader.read(0) self.output_writer = OutputWriter('naive') self.num_rows = self.state['numRows'] self.num_cols = self.state['numCols'] self.num_drones = self.state['numDrones'] self.num_turns = self.state['numTurns'] self.max_payload = self.state['maxPayload'] self.num_product_types = self.state['numProductTypes'] self.product_weights = self.state['productWeights'] self.num_warehouses = self.state['numWarehouses'] # warehouse: liste auf dicts: x, y, items -> itemstock self.warehouse_data = self.state['warehousesData'] # order data: liste auf dicts: x, y, numorders, items self.order_data = self.state['orderData'] # get list of items self.item_lists = numpy.zeros((len(self.order_data), len(self.product_weights)), dtype=int) self.turns_used = numpy.zeros(self.num_drones) self.drone_location = numpy.zeros((self.num_drones, 2)) for order_idx in numpy.arange(len(self.order_data)): for item in self.order_data[order_idx]['items']: self.item_lists[order_idx][item] += 1 for n in numpy.arange(self.num_drones): self.drone_location[n] = [0, 0] def optimise(self): while True: print self.turns_used[:20] # Do smallest orders first # list of num orders num_orders = [order['numOrders'] for order in self.order_data] idx_smallest_orders = num_orders.index(min(num_orders)) idx_order = idx_smallest_orders print self.item_lists[idx_smallest_orders] coordinates_smallest = (self.order_data[idx_order]['x'], self.order_data[idx_order]['y']) items_order = self.order_data[idx_order]['items'] # get warehouses with items closest to destination that have at least one of the items stocked smallest_dist = 10000 smallest_idx = -1 # get warehouses that stock suitable items suitable_warehouses = [] for warehouse in self.warehouse_data: warehouse_stock = warehouse['items'] for item in items_order: if warehouse_stock[item] > 0: suitable_warehouses.append(warehouse) coordinates_warehouse = (0, 0) # get nearest one to customer for idx, warehouse in enumerate(suitable_warehouses): coordinates_warehouse = (warehouse['x'], warehouse['y']) dist = math.ceil(math.sqrt((coordinates_smallest[0] - coordinates_warehouse[0]) ** 2 + ( coordinates_smallest[1] - coordinates_warehouse[1]) ** 2)) if dist < smallest_dist: smallest_dist = dist smallest_idx = idx cur_warehouse = self.warehouse_data[smallest_idx] # assign next drone which hasn't done much so for next_drone_idx = numpy.argmin(self.turns_used) # which stock? warehouse_stock = cur_warehouse['items'] for idx, item in enumerate(self.item_lists[idx_order]): if item > 0: print 'here' if warehouse_stock[item] > 0: # next drone fetch that stock! # how many can it fetch? num_possible = int(self.max_payload[next_drone_idx] * 1. / self.product_weights[item]) num_taken = min([num_possible, warehouse_stock[item]]) # calculate number of turns used # add delivery turn_count = smallest_dist + 1 load_dist = math.ceil( numpy.linalg.norm(self.drone_location[next_drone_idx] - coordinates_warehouse)) turn_count += load_dist + 1 # we are over max turns! if self.turns_used[next_drone_idx] + turn_count > self.num_turns: self.end() return else: self.turns_used[next_drone_idx] += turn_count # fetch that self.output_writer.writeLoad(next_drone_idx, smallest_idx, item, num_taken) self.output_writer.writeDeliver(next_drone_idx, idx_order, item, num_taken) # decrease warehouse stock warehouse_stock[item] -= num_taken # remove item from list self.item_lists[idx_order][item] -= num_taken # update drone location self.drone_location[next_drone_idx] = coordinates_smallest print 'here' break def end(self): self.output_writer.endFile()
from error_handler import ErrorHandler from input_reader import InputReader from output_writer import OutputWriter from trace_manager import TraceManager from trace_parser import TraceParser from threading import Thread error_handler = ErrorHandler() writer = OutputWriter(error_handler) trace_manager = TraceManager(writer) parser = TraceParser(error_handler, trace_manager) reader = InputReader(parser) print("Start parsing") clean_thread = Thread(target=trace_manager.clean_traces) clean_thread.start() reader.read()
class NaiveOptimiser: def __init__(self): input_reader = InputReader() self.state = input_reader.read(0) self.output_writer = OutputWriter('naive') self.num_rows = self.state['numRows'] self.num_cols = self.state['numCols'] self.num_drones = self.state['numDrones'] self.num_turns = self.state['numTurns'] self.max_payload = self.state['maxPayload'] self.num_product_types = self.state['numProductTypes'] self.product_weights = self.state['productWeights'] self.num_warehouses = self.state['numWarehouses'] # warehouse: liste auf dicts: x, y, items -> itemstock self.warehouse_data = self.state['warehousesData'] # order data: liste auf dicts: x, y, numorders, items self.order_data = self.state['orderData'] # get list of items self.item_lists = numpy.zeros( (len(self.order_data), len(self.product_weights)), dtype=int) self.turns_used = numpy.zeros(self.num_drones) self.drone_location = numpy.zeros((self.num_drones, 2)) for order_idx in numpy.arange(len(self.order_data)): for item in self.order_data[order_idx]['items']: self.item_lists[order_idx][item] += 1 for n in numpy.arange(self.num_drones): self.drone_location[n] = [0, 0] def optimise(self): while True: print self.turns_used[:20] # Do smallest orders first # list of num orders num_orders = [order['numOrders'] for order in self.order_data] idx_smallest_orders = num_orders.index(min(num_orders)) idx_order = idx_smallest_orders print self.item_lists[idx_smallest_orders] coordinates_smallest = (self.order_data[idx_order]['x'], self.order_data[idx_order]['y']) items_order = self.order_data[idx_order]['items'] # get warehouses with items closest to destination that have at least one of the items stocked smallest_dist = 10000 smallest_idx = -1 # get warehouses that stock suitable items suitable_warehouses = [] for warehouse in self.warehouse_data: warehouse_stock = warehouse['items'] for item in items_order: if warehouse_stock[item] > 0: suitable_warehouses.append(warehouse) coordinates_warehouse = (0, 0) # get nearest one to customer for idx, warehouse in enumerate(suitable_warehouses): coordinates_warehouse = (warehouse['x'], warehouse['y']) dist = math.ceil( math.sqrt((coordinates_smallest[0] - coordinates_warehouse[0])**2 + (coordinates_smallest[1] - coordinates_warehouse[1])**2)) if dist < smallest_dist: smallest_dist = dist smallest_idx = idx cur_warehouse = self.warehouse_data[smallest_idx] # assign next drone which hasn't done much so for next_drone_idx = numpy.argmin(self.turns_used) # which stock? warehouse_stock = cur_warehouse['items'] for idx, item in enumerate(self.item_lists[idx_order]): if item > 0: print 'here' if warehouse_stock[item] > 0: # next drone fetch that stock! # how many can it fetch? num_possible = int(self.max_payload[next_drone_idx] * 1. / self.product_weights[item]) num_taken = min([num_possible, warehouse_stock[item]]) # calculate number of turns used # add delivery turn_count = smallest_dist + 1 load_dist = math.ceil( numpy.linalg.norm( self.drone_location[next_drone_idx] - coordinates_warehouse)) turn_count += load_dist + 1 # we are over max turns! if self.turns_used[ next_drone_idx] + turn_count > self.num_turns: self.end() return else: self.turns_used[next_drone_idx] += turn_count # fetch that self.output_writer.writeLoad(next_drone_idx, smallest_idx, item, num_taken) self.output_writer.writeDeliver( next_drone_idx, idx_order, item, num_taken) # decrease warehouse stock warehouse_stock[item] -= num_taken # remove item from list self.item_lists[idx_order][item] -= num_taken # update drone location self.drone_location[ next_drone_idx] = coordinates_smallest print 'here' break def end(self): self.output_writer.endFile()
"min_samples_split=0.1, n_jobs=1)" crf2_str = "RandomForestClassifier(n_estimators=n_trees, max_features=1, random_state=2, " \ "min_samples_split=0.1, n_jobs=1)" prf1 = eval(prf1_str) prf2 = eval(prf2_str) crf1 = eval(crf1_str) crf2 = eval(crf2_str) return [prf1, prf2, crf1, crf2], [prf1_str, prf2_str, crf1_str, crf2_str] for file_name in file_list: print(file_name) output_writer = OutputWriter('vlpso_result' + '_' + str(n_trees) + '_trees/' + file_name) # ---------------------- Prepare Data ---------------------- # D_train = np.loadtxt(data_folder + 'train1/' + file_name + '_train1.dat', delimiter=',') D_val = np.loadtxt(data_folder + 'val/' + file_name + '_val.dat', delimiter=',') D_test = np.loadtxt(data_folder + 'test/' + file_name + '_test.dat', delimiter=',') X_train = D_train[:, :-1] Y_train = D_train[:, -1].astype(np.int32) X_val = D_val[:, :-1] Y_val = D_val[:, -1].astype(np.int32) X_test = D_test[:, :-1]
# ------------------------ Parameters ---------------------- # def init_classifier(): rf_str = "RandomForestClassifier(n_estimators=n_trees, max_features='sqrt', random_state=1, " \ "min_samples_split=0.1, n_jobs=1)" rf = eval(rf_str) return rf, rf_str for i_file in range(from_id, to_id): file_name = file_list[i_file] print(datetime.datetime.now(), ' File {}: '.format(i_file), file_name) output_writer = OutputWriter('result/{}/'.format(NAME) + file_name) # ---------------------- Prepare Data ---------------------- # D_train = np.loadtxt(data_folder + 'train1/' + file_name + '_train1.dat', delimiter=',') D_val = np.loadtxt(data_folder + 'val/' + file_name + '_val.dat', delimiter=',') D_test = np.loadtxt(data_folder + 'test/' + file_name + '_test.dat', delimiter=',') X_train = D_train[:, :-1] Y_train = D_train[:, -1].astype(np.int32) X_val = D_val[:, :-1] Y_val = D_val[:, -1].astype(np.int32) X_test = D_test[:, :-1] Y_test = D_test[:, -1].astype(np.int32)
import torch import pandas as pd from torch.utils.data import DataLoader from torch.utils.data import TensorDataset from output_writer import OutputWriter ow = OutputWriter(5) ow.write_to_csv(verbose=True) # Create Tensors to hold dependent/independent variable data train_csv = ow.get_cached_csv("train") train_ind = pd.read_csv(train_csv)[["f1", "f2", "f3", "f4", "f5"]] train_dep = pd.read_csv(train_csv)[["phone_class_index"]] x = torch.from_numpy(train_ind.values).float() y = torch.from_numpy(train_dep.values).long() print(x) print(y) # Create a TensorDataset and DataLoader to provide the model with batches of data train_ds = TensorDataset(x, y) train_dl = DataLoader(train_ds, batch_size=32) ##### Set model layer dimensions ### D_in is the input dimension (5, one for each estimated formant) D_in = x.shape[1] ### H is the hidden layer dimension H = 16 ### C is the number of final categories (there are 14 monophthongs) C = 14
crf2_str = "RandomForestClassifier(n_estimators=n_trees, max_features=1, random_state=2, " \ "min_samples_split=0.1, n_jobs=1)" prf1 = eval(prf1_str) prf2 = eval(prf2_str) crf1 = eval(crf1_str) crf2 = eval(crf2_str) return [prf1, prf2, crf1, crf2], [prf1_str, prf2_str, crf1_str, crf2_str] for i_file in range(from_id, to_id): file_name = file_list[i_file] print(datetime.datetime.now(), ' File {}: '.format(i_file), file_name) output_writer = OutputWriter('result/{}/'.format(NAME) + file_name) # ---------------------- Prepare Data ---------------------- # D_train = np.loadtxt(data_folder + 'train1/' + file_name + '_train1.dat', delimiter=',') D_val = np.loadtxt(data_folder + 'val/' + file_name + '_val.dat', delimiter=',') D_test = np.loadtxt(data_folder + 'test/' + file_name + '_test.dat', delimiter=',') X_train = D_train[:, :-1] Y_train = D_train[:, -1].astype(np.int32) X_val = D_val[:, :-1] Y_val = D_val[:, -1].astype(np.int32) X_test = D_test[:, :-1] Y_test = D_test[:, -1].astype(np.int32) classes = np.unique(np.concatenate((Y_train, Y_val, Y_test))) if np.any(classes.astype(np.int32) == 0):