Example #1
0
class Receive_data( QtCore.QObject ):
    def __init__( self ):
        self.data = DataManager("data.db")
        QtCore.QObject.__init__(self)

    @QtCore.Slot('QString', 'QString')
    def execute(self,email,password):
        global client
        client = Client(email, password)
        response = client.login()
        if (response['result'] == 'missingParams' or response['result'] == 'invalidLogin'):
            Mbox('Budibox', 'Username or password incorrect. Please try again or register at http://www.budibox.com!')
        else:
            if (response['result'] == 'ok'):
                startup = client.notify_startup()
                if (not startup):
                    print_message("Could not startup!")
                else:
                    print_message("Notified!")
            self.data.addProperty("email", email);
            password = base64.b64encode(password)
            print password
            self.data.addProperty("password", str(password));
            Mbox('Budibox', 'Login successful!')
            app.exit()
Example #2
0
    def add(self, task, projectName=None):
        date = Timings.now()

        if self.taskType(task) != "work":
            projectName = None

        attributes = self.processTask(date, task, projectName)
        DataManager.writeTask(date, task, projectName, firstToday=len(self.tasks) == 1)
        return attributes
Example #3
0
    def countTasks(self):
        """Count tasks statistics divided by projects"""
        self._data = DataManager.getByRange(self._fromDate, self._toDate)
        res = {}
        for date, task, projectName in self._data:
            if task == "__start__":
                self.timings.setPrevDate(None)

            spentSeconds = self.timings.count(date, Tasks.taskType(task))

            if Tasks.taskType(task) != "work":
                continue

            if spentSeconds:
                if projectName not in res:
                    res[projectName] = {}

                if task not in res[projectName]:
                    res[projectName][task] = spentSeconds
                else:
                    res[projectName][task] += spentSeconds
        self._countAttrib([v for k in res for v in res[k].values()])
        if res:
            ret = {}
            for k in res.keys():
                ret[k] = sorted(res[k].iteritems(), key=lambda item: item[1], reverse=True)
            return ret
        else:
            return {}
Example #4
0
    def _countObject(self, objType, targetAction):
        """Generic function for calculating projects data or slacking statistics"""
        self._data = DataManager.getByRange(self._fromDate, self._toDate)
        res = {}

        for date, task, projectName in self._data:
            if task == "__start__":
                self.timings.setPrevDate(None)
            objKey = projectName if objType == "project" else task

            spentSeconds = self.timings.count(date, Tasks.taskType(task))

            if Tasks.taskType(task) != targetAction:
                self.timings.setPrevDate(date)
                continue

            if spentSeconds:
                if objKey not in res:
                    res[objKey] = spentSeconds
                else:
                    res[objKey] += spentSeconds

        self._countAttrib(res.values())
        if res:
            return sorted(res.iteritems(), key=lambda item:item[1], reverse=True)
        else:
            return []
Example #5
0
 def __init__(self, top_right, bottom_left):
     self.event_instance = None
     self.data_manager = DataManager()
     self.top_right = top_right
     self.bottom_left = bottom_left
     self.result = self.run()
     self.data_manager.close_con()
    def init_home_page(self, recommend_status="publish"):

        # Init DBus.
        self.system_bus = dbus.SystemBus()
        bus_object = self.system_bus.get_object(DSC_SERVICE_NAME, DSC_SERVICE_PATH)
        self.bus_interface = dbus.Interface(bus_object, DSC_SERVICE_NAME)
        # Say hello to backend.
        #self.bus_interface.say_hello(self.simulate)
        self.set_software_download_dir()
        self.inhibit_obj = InhibitObject()

        self.loginfo("Init data manager")

        # Init data manager.
        self.data_manager = DataManager(self.bus_interface, debug_flag)

        # Init packages status
        self.packages_status = {}

        # Init home page.
        self.home_page = HomePage(self.data_manager, recommend_status)

        # Init switch page.
        self.switch_page(self.home_page)

        self.in_update_list = False

        self.init_backend()
Example #7
0
	def __init__(self, dataset_path):
		"""Init the engine with the dataset path
		"""

		print "Initializing DataManager"
		self.dataManager = DataManager(dataset_path)
#        url = 'http://commondatastorage.googleapis.com/books1000/'
		datastore_directory = "datastore/datasets/ml-latest-small"
		self.X_train, self.X_test, self.y_train, self.y_test, self.movies = self.dataManager.load_datasets(datastore_directory)
		#load model if one is saved
		self.model = self.dataManager.load_model("model.pkl")
Example #8
0
class LoginBox:
    def __init__(self):
        self.data = DataManager("data.db")
        self.email = self.data.getPropertyValue("email")
        self.password = self.data.getPropertyValue("password")
        
    def start(self):
        global client
        if (len(self.email) > 0 and len(self.password) > 0):
            self.email = str(self.email[0])
            self.password = str(base64.b64decode(self.password[0]))
            client = Client(self.email, self.password)
            response = client.login()
        
            if (response['result'] == 'ok'):
                startup = client.notify_startup()
                if (not startup):
                    print_message("Could not startup!")
                else:
                    print_message("Notified!")
        
        else:
            Mbox("Budibox", "Credentials undefined or incorrect. Please login again.") 
            
            # Create the QML user interface.
            view = QDeclarativeView()
            view.setSource(QUrl('qml/main.qml'))
            view.setWindowTitle("Budibox")
            view.setWindowIcon(QIcon("qml/budibox.jpg"))
            
            context = view.rootContext()
            context.setContextProperty("send_data",Receive_data())
            
            # Display the user interface and allow the user to interact with it.
            view.setGeometry(360, 360, 360, 360)
            view.setMaximumSize(360, 360)
            view.show()
            
            app.exec_()
    def __init__(self):
        self.interval = 5.0
        self.running = False

        self.configParser = ConfigParser.RawConfigParser()
        self.configParser.read('config/dob.cfg')

        self.data_manager = DataManager(configParser=self.configParser)
        self.drive_watcher = GDriveWatcher(configParser=self.configParser, data_manager=self.data_manager)
        self.slack_bot = SlackBot(configParser=self.configParser)
        self.witmanager = WitAIManager(configParser=self.configParser)

        self.last_drive_scan = None
Example #10
0
def main(argv):
  manager = DataManager()
  manager.load()

  sess = tf.Session()

  model = VariationalAutoencoder(learning_rate=flags.learning_rate,
                                 beta=flags.beta)

  sess.run(tf.global_variables_initializer())

  saver = load_checkpoints(sess)

  if flags.training:
    # Train
    train(sess, model, manager, saver)
  else:
    reconstruct_check_images = manager.get_random_images(10)
    # Image reconstruction check
    reconstruct_check(sess, model, reconstruct_check_images)
    # Disentangle check
    disentangle_check(sess, model, manager)
Example #11
0
class MLEngine:
	"""A machine learning engine
	"""

	def __init__(self, dataset_path):
		"""Init the engine with the dataset path
		"""

		print "Initializing DataManager"
		self.dataManager = DataManager(dataset_path)
#        url = 'http://commondatastorage.googleapis.com/books1000/'
		datastore_directory = "datastore/datasets/ml-latest-small"
		self.X_train, self.X_test, self.y_train, self.y_test, self.movies = self.dataManager.load_datasets(datastore_directory)
		#load model if one is saved
		self.model = self.dataManager.load_model("model.pkl")

	def preprocess(self):
		print "Preprocessing data"

	def train(self):
		print "Training model"
		self.model.train(self.X_train, self.y_train, self.movies)
		# score on test data
		print self.model.score(self.X_test, self.y_test)

	def predict(self, X):
		prediction = self.model.predict(X)
		print prediction
		return prediction

	def validate(self):
		print "Validating..."

	def save_model(self, filename):
		print "Saving model to file..."
		self.dataManager.save_model(self.model, filename)
Example #12
0
    def __init__(self, batch_size, model_path, examples_path, max_image_width,
                 train_test_ratio, restore):
        self.step = 0
        self.__model_path = model_path
        self.__save_path = os.path.join(model_path, 'ckp')

        self.__restore = restore

        self.__training_name = str(int(time.time()))
        self.__session = tf.Session()

        # Building graph
        with self.__session.as_default():
            (self.__inputs, self.__targets, self.__seq_len, self.__logits,
             self.__decoded, self.__optimizer, self.__acc, self.__cost,
             self.__max_char_count,
             self.__init) = self.crnn(max_image_width, batch_size)
            self.__init.run()

        with self.__session.as_default():
            self.__saver = tf.train.Saver(tf.global_variables(),
                                          max_to_keep=10)
            # Loading last save if needed
            if self.__restore:
                print('Restoring')
                ckpt = tf.train.latest_checkpoint(self.__model_path)
                if ckpt:
                    print('Checkpoint is valid')
                    self.step = int(ckpt.split('-')[1])
                    self.__saver.restore(self.__session, ckpt)

        # Creating data_manager
        self.__data_manager = DataManager(batch_size, model_path,
                                          examples_path, max_image_width,
                                          train_test_ratio,
                                          self.__max_char_count)
Example #13
0
class DataManagerTest(unittest.TestCase):
    def setUp(self):
        self.manager = DataManager()
        self.manager.load()

    def tearDown(self):
        del self.manager
        gc.collect()

    def test_load(self):
        # sample size should be 737280
        self.assertEqual(self.manager.sample_size, 737280)

    def test_get_image(self):
        # get first image
        image0 = self.manager.get_image(shape=0,
                                        scale=0,
                                        orientation=0,
                                        x=0,
                                        y=0)
        self.assertTrue(image0.shape == (4096, ))

        # boundary check
        image1 = self.manager.get_image(shape=3 - 1,
                                        scale=6 - 1,
                                        orientation=40 - 1,
                                        x=32 - 1,
                                        y=32 - 1)
        self.assertTrue(image1.shape == (4096, ))

    def test_get_images(self):
        indices = [0, 1, 2]
        images = self.manager.get_images(indices)

        self.assertEqual(len(images), 3)
        # image shpe should be flatten. (4096)
        self.assertTrue(images[0].shape == (4096, ))

    def test_get_random_images(self):
        images = self.manager.get_random_images(3)

        self.assertEqual(len(images), 3)
        self.assertTrue(images[0].shape == (4096, ))
Example #14
0
def main():
    flight_search = FlightSearch()
    data_manager = DataManager()
    notification_manager = NotificationManager()
    data = data_manager.get_data()["prices"]

    # Get IATA Codes for cities in spreadsheet.
    if data[0]["iataCode"] == "":
        city_ids = [(elem["id"], elem["city"]) for elem in data]

        for elem in city_ids:
            iata = flight_search.get_iata_code_by_city(elem[1])
            body = {
                "price": {
                    "iataCode": iata,
                }
            }
            data_manager.put_data(body, elem[0])

    # Get Cheap flights from LON (London) to destinations in spread sheet.
    for destination in data:
        flight = flight_search.search_a_flight(destination["iataCode"])

        if flight is None:
            continue

        # Notify if cheap flight is found.
        if destination["lowestPrice"] > flight.price:
            message = "Low price alert!\n" \
                     f"Only {flight.price} to fly from {flight.origin_city}-{flight.origin_airport}" \
                     f" to {flight.destination_city}-{flight.destination_airport}" \
                     f" from {flight.out_date}" \
                     f" to {flight.return_date}."

            if flight.stop_overs > 0:
                message += f"\nFlight has {flight.stop_overs} stop over, via {flight.via_city}."

            # Notify me on phone
            notification_manager.send_message(message)

            #notify customers via mail
            customers = data_manager.get_users()["users"]
            notification_manager.send_emails(customers[0]["email"], flight)
def get_features(base_path='../features/'):
    n = 115615
    # n = 500

    labels = DataManager.get_labels_2_classes().iloc[0:n, 1]
    # features = pd.read_csv(base_path + 'labels_to_test.csv').iloc[0:n, 2:3]
    embeddings = DataManager.get_embeddings().iloc[0:n, :]
    n_grams = DataManager.get_ngrams().iloc[0:n]
    scores = DataManager.get_scores().iloc[0:n, 2:]
    pos_tags = DataManager.get_pos_ngrams().iloc[0:n]
    linguistic = DataManager.get_linguistic().iloc[0:n]

    print('Merging features...')

    features = embeddings.merge(linguistic, on='0')
    features = n_grams.merge(features, left_on='rev_id',
                             right_on='0').drop('0', axis=1)
    features = features.merge(scores, on='rev_id')
    features = features.merge(pos_tags, on='rev_id')

    return features.values, labels
class DataManagerTest(unittest.TestCase):
  def setUp(self):
    self.manager = DataManager()
    self.manager.load()

  def tearDown(self):
    del self.manager
    gc.collect()
  
  def test_load(self):
    # sample size should be 737280
    self.assertEquals(self.manager.sample_size, 737280)

  def test_get_image(self):
    # get first image
    image0 = self.manager.get_image(shape=0, scale=0, orientation=0, x=0, y=0)
    self.assertTrue(image0.shape == (4096,))
    
    # boundary check
    image1 = self.manager.get_image(shape=3-1, scale=6-1, orientation=40-1,
                                    x=32-1, y=32-1)
    self.assertTrue(image1.shape == (4096,))

  def test_get_images(self):
    indices = [0,1,2]
    images = self.manager.get_images(indices)
    
    self.assertEquals(len(images), 3)
    # image shpe should be flatten. (4096)
    self.assertTrue(images[0].shape == (4096,))

  def test_get_random_images(self):
    images = self.manager.get_random_images(3)
    
    self.assertEquals(len(images), 3)
    self.assertTrue(images[0].shape == (4096,))
class TeamBuilder:
    def __init__(self):
        self.data_manager = DataManager()

    # Solution as a knapsack problem with restrictions using LP.
    def team_builder(self, budget):
        defender_pos = ['RCB', 'CB', 'LB', 'RB', 'RWB', 'LWB']
        middle_pos = [
            'RCM', 'LCM', 'LDM', 'CDM', 'LCB', 'RM', 'LM', 'RDM', 'CM'
        ]
        forward_pos = [
            'RF', 'ST', 'LW', 'LF', 'RS', 'CAM', 'LS', 'LAM', 'RW', 'RAM', 'CF'
        ]

        data = self.data_manager.get_data_frame_from_db()

        player = [str(i) for i in range(data.shape[0])]
        point = {str(i): data['Overall'][i] for i in range(data.shape[0])}
        value = {str(i): data['Value'][i] for i in range(data.shape[0])}
        gk = {
            str(i): 1 if data['Position'][i] == 'GK' else 0
            for i in range(data.shape[0])
        }
        fullback = {
            str(i): 1 if data['Position'][i] in defender_pos else 0
            for i in range(data.shape[0])
        }
        halfback = {
            str(i): 1 if data['Position'][i] in middle_pos else 0
            for i in range(data.shape[0])
        }
        forward = {
            str(i): 1 if data['Position'][i] in forward_pos else 0
            for i in range(data.shape[0])
        }
        xi = {str(i): 1 for i in range(data.shape[0])}

        prob = LpProblem("Knapsack_Football", LpMaximize)
        player_vars = LpVariable.dicts("Players", player, 0, 1, LpBinary)

        # objective function
        prob += lpSum([point[i] * player_vars[i]
                       for i in player]), "Total_Cost"

        # constraint
        prob += lpSum([player_vars[i]
                       for i in player]) == 11, "Total_11_Players"
        prob += lpSum([value[i] * player_vars[i]
                       for i in player]) <= budget, "Total_Cost"
        prob += lpSum([gk[i] * player_vars[i] for i in player]) == 1, "1_GK"
        prob += lpSum([fullback[i] * player_vars[i]
                       for i in player]) == 2, "2_DEF"
        prob += lpSum([halfback[i] * player_vars[i]
                       for i in player]) == 3, "3_MID"
        prob += lpSum([forward[i] * player_vars[i]
                       for i in player]) == 5, "5_STR"

        # solve
        status = prob.solve()
        player_index_list = []
        for v in prob.variables():
            if v.varValue > 0:
                index = str(v.name).replace("Players_", '')
                player_index_list.append(int(index))

        result = data.iloc[player_index_list]
        return result
def build_data_manager(frame_folder, feature_folder, caption_file, key_frame_info_folder, save_path):
    data_manager = DataManager()
    data_manager.load_frame_path_info(frame_folder)
    data_manager.load_captions(caption_file)
    data_manager.load_key_frame_information(key_frame_info_folder)
    data_manager.load_features(feature_folder)
    data_manager.save(save_path)
Example #19
0
class OrderHandler:
    def __init__(self, server=requests):
        self.server = server
        self.data = DataManager(self.server)

    """
    Interface Methods
    """

    def start_interface(self) -> int:
        while True:
            print("\nCommand list:")
            print("submit_order")
            print("update_order")
            print("cancel_order")
            print("print_menu")
            print("quit")
            command = input("\nEnter your command: ")

            if command == "quit":
                break
            elif command == "submit_order":
                self.submit_order()
            elif command == "update_order":
                self.update_order()
            elif command == "cancel_order":
                self.cancel_order()
            elif command == "print_menu":
                self.print_menu()
            else:
                print("Error: Invalid command")

    def submit_order(self) -> None:
        pizza_size = ""
        pizza_type = ""
        toppings = []
        drink = ""
        delivery_type = ""

        while True:
            pizza_size = input("Select a size: ")
            if self.check_pizza_size(pizza_size):
                break
            print("Error: Invalid size")

        while True:
            pizza_type = input("Select a pizza type (or enter 'None'): ")
            if self.check_pizza_type(pizza_type):
                break
            print("Error: Invalid type")

        while True:
            topping = input("Add a pizza topping (or enter 'None'): ")
            if topping.lower() == 'none':
                break
            elif not self.check_topping(topping):
                print("Error: Invalid topping")
            else:
                toppings.append(topping)

        while True:
            drink = input("Select a drink: ")
            if self.check_drink(drink):
                break
            print("Error: Invalid drink")

        while True:
            delivery_type = input("Select a delivery type: ")
            if self.check_delivery_type(delivery_type):
                break
            print("Error: Invalid delivery type")

        address = input("Enter your address (enter anything if pick-up): ")

        order_no = self.submit_order_to_server(pizza_size, pizza_type,
                                               toppings, drink, delivery_type,
                                               address)

        print("Your order number is: " + str(order_no))
        print("Lottory Chance, Pull 777 to win for free food")
        lotto = input("Pull? (y/n): ")
        if lotto == "y" or lotto == "Y":
            pull_num = self.lottery()
            print("You pulled: " + str(pull_num))
            if pull_num == 777:
                print("You won!!!")
                print("Your total is 0 dollars.")
            else:
                print("Your total is {} dollars.".format(
                    self.calculate_total(self.get_order_by_no(order_no))))
        else:
            print("Your total is {} dollars.".format(
                self.calculate_total(self.get_order_by_no(order_no))))

    def update_order(self) -> None:
        order_no = input("Enter order number: ")
        print("Select an item to update")
        item = input("size/type/topping/drink/delivery/address): ")
        add_topping = False
        if item.lower() == "topping":
            ans = input("Add or remove topping? (a/r): ")
            if ans == "a":
                add_topping = True
        new_value = input("Enter the item to update: ")
        n = self.update_order_in_server(order_no, item, new_value, add_topping)
        if n == 0:
            print("Updated successfully")
        elif n == 1:
            print("Nothing updated")
        else:
            print("Failed to update")

    def cancel_order(self) -> None:
        order_no = input("Enter order number: ")
        if self.cancel_order_in_server(order_no) == 0:
            print("Order " + order_no + " cancelled successfully")
        else:
            print("Error: Invalid order number")

    def print_menu(self) -> None:
        print("Pizza Types: ")
        types = self.data.get_pizza_manager().get_pizza_types()
        for i in range(len(types)):
            print(
                str(i + 1) + ". " + types[i]["Type"] + " $" +
                str(types[i]["Price"]))
        print()

        print("Pizza Toppings: ")
        toppings = self.data.get_pizza_manager().get_pizza_toppings()
        for i in range(len(toppings)):
            print(
                str(i + 1) + ". " + toppings[i]["Topping"] + " $" +
                str(toppings[i]["Price"]))
        print()

        print("Pizza Sizes: ")
        sizes = self.data.get_pizza_manager().get_pizza_sizes()
        for i in range(len(sizes)):
            print(
                str(i + 1) + ". " + sizes[i]["Size"] + " $" +
                str(sizes[i]["Price"]))
        print()

        print("Pizza Drinks: ")
        drinks = self.data.get_pizza_manager().get_drinks()
        for i in range(len(drinks)):
            print(
                str(i + 1) + ". " + drinks[i]["Drink"] + " $" +
                str(drinks[i]["Price"]))
        print()

        print("Delivery Types: ")
        delivery_types = self.data.get_delivery_manager().get_delivery_types()
        for delivery_type in delivery_types:
            print(delivery_type)
        print()

    """
    Processing Methods
    """

    def check_pizza_size(self, pizza_size: str) -> bool:
        size_dict = self.data.get_pizza_manager().get_pizza_sizes()
        for obj in size_dict:
            if obj["Size"].lower() == pizza_size.lower():
                return True
        return False

    def check_pizza_type(self, pizza_type: str) -> bool:
        if pizza_type.lower() == "none":
            return True
        type_dict = self.data.get_pizza_manager().get_pizza_types()
        for obj in type_dict:
            if obj["Type"].lower() == pizza_type.lower():
                return True
        return False

    def check_topping(self, topping: str) -> bool:
        topping_dict = self.data.get_pizza_manager().get_pizza_toppings()
        for obj in topping_dict:
            if obj["Topping"].lower() == topping.lower():
                return True
        return False

    def check_drink(self, drink: str) -> bool:
        drink_dict = self.data.get_pizza_manager().get_drinks()
        for obj in drink_dict:
            if obj["Drink"].lower() == drink.lower():
                return True
        return False

    def check_delivery_type(self, delivery_type: str) -> bool:
        type_list = self.data.get_delivery_manager().get_delivery_types()
        for t in type_list:
            if t == delivery_type.lower():
                return True
        return False

    def get_order_by_no(self, order_no: int) -> Order:
        raw_order = self.data.get_order_manager().get_order(order_no)
        if raw_order == 404:
            return None
        order = Order.from_json(json.loads(raw_order))
        return order

    def get_delivery_by_type_no(self, delivery_type: str,
                                delivery_no: int) -> Delivery:
        raw_delivery = self.data.get_delivery_manager().get_each_delivery(
            delivery_type, delivery_no)
        if raw_delivery == 404:
            return None
        delivery = Delivery.from_json(raw_delivery)
        return delivery

    def submit_order_to_server(self, pizza_size: str, pizza_type: str,
                               toppings: [str], drink: str, delivery_type: str,
                               address: str) -> int:
        if not pizza_type.lower() == "none":
            predefined = self.data.get_pizza_manager().get_predefined()
            predefined_toppings = []
            for d in predefined:
                if pizza_type in d:
                    predefined_toppings = d[pizza_type]
            for t in predefined_toppings:
                if t not in toppings:
                    toppings.append(t)

        order = OrderCreator.get_instance().create_order(
            pizza_size, pizza_type, toppings, drink, delivery_type)
        self.data.get_order_manager().add_new_order(order.get_order_no(),
                                                    order)
        delivery = DeliveryCreator.get_instance().create_delivery(
            address, order, order.get_order_no())
        self.data.get_delivery_manager().add_new_delivery(
            order.get_delivery(), delivery, order.get_delivery_no())
        return order.get_order_no()

    def update_order_in_server(self, order_no: int, item: str, new_value: str,
                               add_topping: bool) -> int:
        """
        Return 0 if updated successfully.
        Otherwise return -1.
        """
        order = self.get_order_by_no(order_no)
        delivery = self.get_delivery_by_type_no(order.get_delivery(), order_no)
        if item == "size":
            if not self.check_pizza_size(new_value):
                print("Error: Invalid size")
                return -1
            else:
                order.set_pizza_size(new_value)
                self.data.get_order_manager().update_order(
                    order.get_order_no(), order)
                delivery.set_order(order)
                self.data.get_delivery_manager().update_delivery(
                    order.get_delivery(), delivery, order.get_delivery_no())
                return 0
        elif item == "type":
            if not self.check_pizza_type(new_value):
                print("Error: Invalid type")
                return -1
            else:
                order.set_pizza_type(new_value)
                self.data.get_order_manager().update_order(
                    order.get_order_no(), order)
                delivery.set_order(order)
                self.data.get_delivery_manager().update_delivery(
                    order.get_delivery(), delivery, order.get_delivery_no())
                return 0
        elif item == "topping":
            if not self.check_topping(new_value):
                print("Error: Invalid topping")
                return -1
            else:
                if add_topping and new_value not in order.get_toppings():
                    order.add_topping(new_value)
                elif not add_topping and new_value in order.get_toppings():
                    order.remove_topping(new_value)
                else:
                    return 1
                self.data.get_order_manager().update_order(
                    order.get_order_no(), order)
                delivery.set_order(order)
                self.data.get_delivery_manager().update_delivery(
                    order.get_delivery(), delivery, order.get_delivery_no())
                return 0
        elif item == "drink":
            if not self.check_drink(new_value):
                print("Error: Invalid drink")
                return -1
            else:
                order.set_drink(new_value)
                self.data.get_order_manager().update_order(
                    order.get_order_no(), order)
                delivery.set_order(order)
                self.data.get_delivery_manager().update_delivery(
                    order.get_delivery(), delivery, order.get_delivery_no())
                return 0
        elif item == "delivery":
            if not self.check_delivery_type(new_value):
                print("Error: Invalid delivery type")
                return -1
            else:
                order.set_delivery(new_value)
                self.data.get_order_manager().update_order(
                    order.get_order_no(), order)
                delivery.set_order(order)
                self.data.get_delivery_manager().update_delivery(
                    new_value, delivery, delivery.get_delivery_no())
                return 0
        elif item == "address":
            delivery.set_address(new_value)
            self.data.get_delivery_manager().update_delivery(
                order.get_delivery(), delivery, delivery.get_delivery_no())
            return 0
        else:
            print("Error: Invalid item to update")
            return -1

    def cancel_order_in_server(self, order_no) -> int:
        """
        Return 0 if order_no is valid and the order is deleted successfully.
        Otherwisw return -1.
        """
        order = self.get_order_by_no(order_no)
        if order is None:
            return -1
        status1 = self.data.get_delivery_manager().delete_delivery(
            order.get_delivery(), order.get_delivery_no())
        status2 = self.data.get_order_manager().delete_order(order_no)

        if status1 == 404 or status2 == 404:
            return -1
        return 0

    def check_json_type(self) -> bool:
        """
        Return True if server is of type requests
        Return False if server is of type flask.request
        """
        return isinstance(self.server, requests)

    def calculate_total(self, order: Order) -> float:
        if order is None:
            return 0

        size_price = 0
        sizes = self.data.get_pizza_manager().get_pizza_sizes()
        for size in sizes:
            if size["Size"].lower() == order.get_pizza_size().lower():
                size_price = size["Price"]
                break

        type_price = 0
        types = self.data.get_pizza_manager().get_pizza_types()
        for type in types:
            if type["Type"].lower() == order.get_pizza_type().lower():
                type_price = type["Price"]
                break

        topping_price = 0
        toppings = self.data.get_pizza_manager().get_pizza_toppings()
        for order_topping in order.get_toppings():
            for topping in toppings:
                if topping["Topping"].lower() == order_topping.lower():
                    topping_price += topping["Price"]
                    break

        drink_price = 0
        drinks = self.data.get_pizza_manager().get_drinks()
        for drink in drinks:
            if drink["Drink"].lower() == order.get_drink().lower():
                drink_price = drink["Price"]
                break

        return size_price + type_price + topping_price + drink_price

    def lottery(self):
        n = self.data.get_order_manager().get_seed_for_lotto()
        random.seed(n)
        n = int(random.random() * 1000)
        self.data.get_order_manager().set_seed_for_lotto(n)
        return n
Example #20
0
    def transform(self, X, y=None):
        return self.transformer.transform(X)


if __name__ == "__main__":
    # We can use this to run this file as a script and test the Preprocessor
    if True:  # Use the default input and output directories if no arguments are provided
        input_dir = "C:\\Users\\isabe\\Downloads\\monet-master\\starting_kit\\c1_input_data"
        output_dir = "./fichiers_preprocesses"
    else:
        input_dir = argv[1]
        output_dir = argv[2]

    basename = 'perso'

    D = DataManager(basename, input_dir)  # Load data
    print("*** Original data ***")
    print(D)

    Prepro = Preprocessor()

    # Preprocess on the data and load it back into D
    D.data['X_train'] = Prepro.fit_transform(D.data['X_train'],
                                             D.data['Y_train'])
    D.data['X_valid'] = Prepro.transform(D.data['X_valid'])
    D.data['X_test'] = Prepro.transform(D.data['X_test'])
    #D.feat_name = np.array(['PC1', 'PC2'])
    # D.feat_type = np.array(['Numeric', 'Numeric'])

    # Here show something that proves that the preprocessing worked fine
    print("*** Transformed data ***")
Example #21
0
from data_manager import DataManager
from analysis_models.airport_model import AirportModel
from analysis_models.bag_model import BagModel
import matplotlib.pyplot as plt
import pandas as pd

if __name__ == '__main__':

    data_folder = '../data/'
    output_folder = '../output/'
    airport_metadata_filename = 'DimFlyplassProccesed.csv'
    amount_files = 62
    data_file_1 = 0

    dm = DataManager(data_folder, output_folder, amount_files,
                     airport_metadata_filename)

    # Import data
    dm.read_multiple_data_files()
    bags = dm.bag_messages
    bags['sourceTimestamp'] = pd.to_datetime(bags['sourceTimestamp'])
    bags.sort_values(by='sourceTimestamp')
    print(len(bags))
    filtered = bags[bags['bagEventCode'].map(
        lambda x: str(x) == 'BagTagGenerated')]
    print(len(filtered))
    bagsByDay = filtered['sourceTimestamp'].groupby(
        filtered['sourceTimestamp'].dt.floor('d')).size().reset_index(
            name='count')
    print(len(bagsByDay))
    print(bagsByDay)
from data_manager import DataManager

data_manager = DataManager()

print("Welcome to Lui's Flight Club.\nWe're dedicated to finding the best flight deals for you and will notify you "
      "through email.")
continue_verification = True
first_name = input("What is your first name?\n").capitalize()
last_name = input("What is your last name?\n").capitalize()
user_email = input("What is your email?\n")
while continue_verification:
    email_confirmation = input("Type your email again, please.\n")
    if email_confirmation == user_email:
        print("Alright! You're in the club.")
        data_manager.add_new_user(
            first_name,
            last_name,
            user_email)
        continue_verification = False
    else:
        print("Emails don't match.")
        continue
Example #23
0
from data_manager import DataManager
from flight_search import FlightSearch
from flight_data import FlightData
from notification_manager import NotificationManager

data_manager = DataManager()
sheet_data = data_manager.get_data()

if sheet_data[0]['iataCode'] == "":
    flight_search = FlightSearch()
    for city in sheet_data:
        city["iataCode"] = flight_search.get_city_code(city["city"])
    print(f"sheet_data:\n {sheet_data}")

    data_manager.destination_data = sheet_data
    data_manager.input_data()

flight_data = FlightData()

for city in sheet_data:
    data = flight_data.flight_info(city["iataCode"])
    if data == "unavailable":
        print(f"{city['city']}: not available, even with stopover")
        continue
    else:
        price = data["price"]
        print(f"{city['city']}: £{price}")
    if price is not None:
        if int(price) < city["lowestPrice"]:
            routes = data['route']
            depart = routes[0]['local_arrival'].split("T")[0]
Example #24
0
path.append ("../ingestion_program") # Contains libraries you will need
from data_manager import DataManager  # such as DataManager

import prepro

if __name__=="__main__":
    # We can use this to run this file as a script and test the Preprocessor
    if len(argv)==1: # Use the default input and output directories if no arguments are provided
        input_dir = "../public_data"
        output_dir = "../results" # Create this directory if it does not exist
    else:
        input_dir = argv[1]
        output_dir = argv[2];
    
    basename = 'air'
    D = DataManager(basename, input_dir) # Load data
    print("*** Original data ***")
    # On effectue le test de notre preprocessing de façon très simple, en deux affichages :
    # Tout d'abord on vérifie que les données brutes ont 14 features contenant des valeurs entières.
    print("Nombre de features : {0}.".format(D.data['X_train'].shape[1]))
    print(D.data['X_train'][0,:])
    
    Prepro = prepro.Preprocessor()
 
    # Preprocess on the data and load it back into D
    D.data['X_train'] = Prepro.fit_transform(D.data['X_train'], D.data['Y_train'])
    D.data['X_valid'] = Prepro.fit_transform(D.data['X_valid'])
    D.data['X_test'] = Prepro.fit_transform(D.data['X_test'])
  
    # Here show something that proves that the preprocessing worked fine
    print("*** Transformed data ***")
def get_id_from_name( commodityName ):
    from data_manager import DataManager
    DataManager.init()
    commodityName = commodityName.lower()
    return DataManager.nameToId[ commodityName ] if commodityName in DataManager.nameToId else None
Example #26
0
    def __init__(self):

        # Make sure all the directories exist
        if not tf.gfile.Exists(TFLOG_PATH):
            tf.gfile.MakeDirs(TFLOG_PATH)
        if not tf.gfile.Exists(EXPERIENCE_PATH):
            tf.gfile.MakeDirs(EXPERIENCE_PATH)
        if not tf.gfile.Exists(NET_SAVE_PATH):
            tf.gfile.MakeDirs(NET_SAVE_PATH)

        # Initialize our session
        self.session = tf.Session()
        self.graph = self.session.graph

        with self.graph.as_default():

            # View the state batches
            self.visualize_input = VISUALIZE_BUFFER
            if self.visualize_input:
                self.viewer = CostmapVisualizer()

            # Hardcode input size and action size
            self.height = 86
            self.width = self.height
            self.depth = 4
            self.action_dim = 2

            # Initialize the current action and the old action and old state for setting experiences
            self.old_state = np.zeros((self.width, self.height, self.depth), dtype='int8')
            self.old_action = np.ones(2, dtype='float')
            self.network_action = np.zeros(2, dtype='float')
            self.noise_action = np.zeros(2, dtype='float')
            self.action = np.zeros(2, dtype='float')

            # Initialize the grad inverter object to keep the action bounds
            self.grad_inv = GradInverter(A0_BOUNDS, A1_BOUNDS, self.session)

            # Make sure the directory for the data files exists
            if not tf.gfile.Exists(DATA_PATH):
                tf.gfile.MakeDirs(DATA_PATH)

            # Initialize summary writers to plot variables during training
            self.summary_op = tf.merge_all_summaries()
            self.summary_writer = tf.train.SummaryWriter(TFLOG_PATH)

            # Initialize actor and critic networks
            self.actor_network = ActorNetwork(self.height, self.action_dim, self.depth, self.session,
                                              self.summary_writer)
            self.critic_network = CriticNetwork(self.height, self.action_dim, self.depth, self.session,
                                                self.summary_writer)

            # Initialize the saver to save the network params
            self.saver = tf.train.Saver()

            # initialize the experience data manger
            self.data_manager = DataManager(BATCH_SIZE, EXPERIENCE_PATH, self.session)

            # Uncomment if collecting a buffer for the autoencoder
            # self.buffer = deque()

            # Should we load the pre-trained params?
            # If so: Load the full pre-trained net
            # Else:  Initialize all variables the overwrite the conv layers with the pretrained filters
            if PRE_TRAINED_NETS:
                self.saver.restore(self.session, NET_LOAD_PATH)
            else:
                self.session.run(tf.initialize_all_variables())

            tf.train.start_queue_runners(sess=self.session)
            time.sleep(1)

            # Initialize a random process the Ornstein-Uhlenbeck process for action exploration
            self.exploration_noise = OUNoise(self.action_dim, MU, THETA, SIGMA)
            self.noise_flag = True

            # Initialize time step
            self.training_step = 0

            # Flag: don't learn the first experience
            self.first_experience = True

            # After the graph has been filled add it to the summary writer
            self.summary_writer.add_graph(self.graph)
Example #27
0
class DDPG:

    def __init__(self):

        # Make sure all the directories exist
        if not tf.gfile.Exists(TFLOG_PATH):
            tf.gfile.MakeDirs(TFLOG_PATH)
        if not tf.gfile.Exists(EXPERIENCE_PATH):
            tf.gfile.MakeDirs(EXPERIENCE_PATH)
        if not tf.gfile.Exists(NET_SAVE_PATH):
            tf.gfile.MakeDirs(NET_SAVE_PATH)

        # Initialize our session
        self.session = tf.Session()
        self.graph = self.session.graph

        with self.graph.as_default():

            # View the state batches
            self.visualize_input = VISUALIZE_BUFFER
            if self.visualize_input:
                self.viewer = CostmapVisualizer()

            # Hardcode input size and action size
            self.height = 86
            self.width = self.height
            self.depth = 4
            self.action_dim = 2

            # Initialize the current action and the old action and old state for setting experiences
            self.old_state = np.zeros((self.width, self.height, self.depth), dtype='int8')
            self.old_action = np.ones(2, dtype='float')
            self.network_action = np.zeros(2, dtype='float')
            self.noise_action = np.zeros(2, dtype='float')
            self.action = np.zeros(2, dtype='float')

            # Initialize the grad inverter object to keep the action bounds
            self.grad_inv = GradInverter(A0_BOUNDS, A1_BOUNDS, self.session)

            # Make sure the directory for the data files exists
            if not tf.gfile.Exists(DATA_PATH):
                tf.gfile.MakeDirs(DATA_PATH)

            # Initialize summary writers to plot variables during training
            self.summary_op = tf.merge_all_summaries()
            self.summary_writer = tf.train.SummaryWriter(TFLOG_PATH)

            # Initialize actor and critic networks
            self.actor_network = ActorNetwork(self.height, self.action_dim, self.depth, self.session,
                                              self.summary_writer)
            self.critic_network = CriticNetwork(self.height, self.action_dim, self.depth, self.session,
                                                self.summary_writer)

            # Initialize the saver to save the network params
            self.saver = tf.train.Saver()

            # initialize the experience data manger
            self.data_manager = DataManager(BATCH_SIZE, EXPERIENCE_PATH, self.session)

            # Uncomment if collecting a buffer for the autoencoder
            # self.buffer = deque()

            # Should we load the pre-trained params?
            # If so: Load the full pre-trained net
            # Else:  Initialize all variables the overwrite the conv layers with the pretrained filters
            if PRE_TRAINED_NETS:
                self.saver.restore(self.session, NET_LOAD_PATH)
            else:
                self.session.run(tf.initialize_all_variables())

            tf.train.start_queue_runners(sess=self.session)
            time.sleep(1)

            # Initialize a random process the Ornstein-Uhlenbeck process for action exploration
            self.exploration_noise = OUNoise(self.action_dim, MU, THETA, SIGMA)
            self.noise_flag = True

            # Initialize time step
            self.training_step = 0

            # Flag: don't learn the first experience
            self.first_experience = True

            # After the graph has been filled add it to the summary writer
            self.summary_writer.add_graph(self.graph)

    def train(self):

        # Check if the buffer is big enough to start training
        if self.data_manager.enough_data():

            # get the next random batch from the data manger
            state_batch, \
                action_batch, \
                reward_batch, \
                next_state_batch, \
                is_episode_finished_batch = self.data_manager.get_next_batch()

            state_batch = np.divide(state_batch, 100.0)
            next_state_batch = np.divide(next_state_batch, 100.0)

            # Are we visualizing the first state batch for debugging?
            # If so: We have to scale up the values for grey scale before plotting
            if self.visualize_input:
                state_batch_np = np.asarray(state_batch)
                state_batch_np = np.multiply(state_batch_np, -100.0)
                state_batch_np = np.add(state_batch_np, 100.0)
                self.viewer.set_data(state_batch_np)
                self.viewer.run()
                self.visualize_input = False

            # Calculate y for the td_error of the critic
            y_batch = []
            next_action_batch = self.actor_network.target_evaluate(next_state_batch)
            q_value_batch = self.critic_network.target_evaluate(next_state_batch, next_action_batch)

            for i in range(0, BATCH_SIZE):
                if is_episode_finished_batch[i]:
                    y_batch.append([reward_batch[i]])
                else:
                    y_batch.append(reward_batch[i] + GAMMA * q_value_batch[i])

            # Now that we have the y batch lets train the critic
            self.critic_network.train(y_batch, state_batch, action_batch)

            # Get the action batch so we can calculate the action gradient with it
            # Then get the action gradient batch and adapt the gradient with the gradient inverting method
            action_batch_for_gradients = self.actor_network.evaluate(state_batch)
            q_gradient_batch = self.critic_network.get_action_gradient(state_batch, action_batch_for_gradients)
            q_gradient_batch = self.grad_inv.invert(q_gradient_batch, action_batch_for_gradients)

            # Now we can train the actor
            self.actor_network.train(q_gradient_batch, state_batch)

            # Save model if necessary
            if self.training_step > 0 and self.training_step % SAVE_STEP == 0:
                self.saver.save(self.session, NET_SAVE_PATH, global_step=self.training_step)

            # Update time step
            self.training_step += 1

        self.data_manager.check_for_enqueue()

    def get_action(self, state):

        # normalize the state
        state = state.astype(float)
        state = np.divide(state, 100.0)

        # Get the action
        self.action = self.actor_network.get_action(state)

        # Are we using noise?
        if self.noise_flag:
            # scale noise down to 0 at training step 3000000
            if self.training_step < MAX_NOISE_STEP:
                self.action += (MAX_NOISE_STEP - self.training_step) / MAX_NOISE_STEP * self.exploration_noise.noise()
            # if action value lies outside of action bounds, rescale the action vector
            if self.action[0] < A0_BOUNDS[0] or self.action[0] > A0_BOUNDS[1]:
                self.action *= np.fabs(A0_BOUNDS[0]/self.action[0])
            if self.action[1] < A0_BOUNDS[0] or self.action[1] > A0_BOUNDS[1]:
                self.action *= np.fabs(A1_BOUNDS[0]/self.action[1])

        # Life q value output for this action and state
        self.print_q_value(state, self.action)

        return self.action

    def set_experience(self, state, reward, is_episode_finished):

        # Make sure we're saving a new old_state for the first experience of every episode
        if self.first_experience:
            self.first_experience = False
        else:
            self.data_manager.store_experience_to_file(self.old_state, self.old_action, reward, state,
                                                       is_episode_finished)

            # Uncomment if collecting data for the auto_encoder
            # experience = (self.old_state, self.old_action, reward, state, is_episode_finished)
            # self.buffer.append(experience)

        if is_episode_finished:
            self.first_experience = True
            self.exploration_noise.reset()

        # Safe old state and old action for next experience
        self.old_state = state
        self.old_action = self.action

    def print_q_value(self, state, action):

        string = "-"
        q_value = self.critic_network.evaluate([state], [action])
        stroke_pos = 30 * q_value[0][0] + 30
        if stroke_pos < 0:
            stroke_pos = 0
        elif stroke_pos > 60:
            stroke_pos = 60
        print '[' + stroke_pos * string + '|' + (60-stroke_pos) * string + ']', "Q: ", q_value[0][0], \
            "\tt: ", self.training_step
Example #28
0
 def __init__(self):
     self.data = DataManager("data.db")
     self.email = self.data.getPropertyValue("email")
     self.password = self.data.getPropertyValue("password")
            if existing_entity is None or existing_entity.lastSlackUpdate is None or \
                            existing_entity.lastSlackUpdate <= existing_entity.modifiedDate:
                entity.lastSlackUpdate = datetime.datetime.now()
                new_and_updated_entities.append(entity)

            self.data_manager.put_entity(entity)

            print "adding entity {}".format(entity.id)
            #self.get_file_content(entity.id)

        return new_and_updated_entities

    def process_updated_and_new_entities(self, new_and_updated_entities):
        pass


if __name__ == '__main__':
    from data_manager import DataManager

    configParser = ConfigParser.RawConfigParser()
    configParser.read('config/dob.cfg')

    data_manager = DataManager(configParser=configParser)
    data_manager.connect()

    watcher = GDriveWatcher(configParser=configParser, data_manager=data_manager)
    watcher.connect()
    watcher.update()

Example #30
0
 def __init__( self ):
     self.data = DataManager("data.db")
     QtCore.QObject.__init__(self)
Example #31
0
from data_manager import DataManager
from flight_search import FlightSearch
from datetime import datetime, timedelta
from notification_manager import NotificationManager

ORIGIN_CITY_CODE = "NYC"
# Step 3
data_manager = DataManager()
sheet_data = data_manager.get_data_destination()
customer_data = data_manager.get_customer_emails()
flight_search = FlightSearch()
notificator = NotificationManager()

# If the city in the spreadsheet doesn't have a IATA Code this part will get it and update the spreadsheet
for row in range(len(sheet_data)):
    if sheet_data[row]['IATA Code'] == '':
        sheet_data[row]['IATA Code'] = flight_search.get_city_code(
            sheet_data[row]['City'])
        data_manager.data_destination = sheet_data
        data_manager.update_destination_code()

# Obtain dates
tomorrow = datetime.now() + timedelta(days=1)
six_months_from_now = tomorrow + timedelta(days=(6 * 30))

found_flights = True
for destination in sheet_data:
    flight = flight_search.search_for_flight(
        ORIGIN_CITY_CODE,
        destination['IATA Code'],
        from_date=tomorrow,
Example #32
0
import requests
from flight_search import FlightSearch
from notification_manager import NotificationManager
from datetime import datetime
from data_manager import DataManager
from flight_data import FlightData
#This file will need to use the DataManager,FlightSearch, FlightData, NotificationManager classes to achieve the program requirements.
SHEET_API_KEY = "3d0f05f887e0580677d4c0878ad2d460"

data_manager = DataManager(SHEET_API_KEY)

sheet_data = data_manager.get_data()['prices']
flight_data = FlightData()
# print(flight_data.api_call("PAR"))

notification = NotificationManager()

for data in sheet_data:
    price = flight_data.api_call(data['iataCode'])
    for lowest in price:
        if data['lowestPrice'] > lowest['price']:
            notification.send_message(
                f"Low price alert! Only ${lowest['price']} to fly from "
                f"{lowest['cityFrom']}-{lowest['flyFrom']}"
                f" to {lowest['cityTo']}-{lowest['flyTo']}"
                f" from {datetime.now().date()} to "
                f"{lowest['utc_departure'].split('T')[0]}")
        break

# for data in sheet_data:
#     if data['iataCode'] == "":
    def add_polygon_with_circles_and_label(self, path, linecolor=None, linewidth=None,
                                            vertex_color=None, vertex_radius=None,
                                            label='unknown', label_pos=None,
                                            section=None, index=None, type=None,
                                            edits=[], side=None, side_manually_assigned=None,
                                            contour_id=None,
                                            position=None,
                                            category='contour', **kwargs):
        """
        Additional keyword arguments are used to set polygon properties.
        - set_name (str): specifies the set this polygon belongs to (handdrawn or aligned_atlas)

        Args:
            type (str): One of the following,
                    - derived_from_atlas
                    - confirmed (hand-drawn or confirming interpolated polygons)
                    - interpolated
        """

        closed = polygon_is_closed(path=path)
        vertices = vertices_from_polygon(path=path, closed=closed)
        if len(vertices) == 2:
            raise Exception("Polygon has only two vertices. Skip adding this polygon.")

        if index is not None:
            assert isinstance(index, int), "add_polygon_with_circles_and_label(): Argument `index` must be integer."

        polygon = self.add_polygon_with_circles(path, linecolor=linecolor, linewidth=linewidth,
                                                vertex_color=vertex_color, vertex_radius=vertex_radius,
                                                section=section, index=index)
        polygon.signal_emitter.property_changed.connect(self.polygon_property_changed)

        # Compute the polygon's coordinate in the depth dimension.
        if hasattr(self.data_feeder, 'sections'):
            if section is None:
                section = self.data_feeder.sections[index]
            position = DataManager.convert_section_to_z(sec=section, resolution=self.data_feeder.resolution, mid=True, stack=self.gui.stack)
            # z0, z1 = self.convert_section_to_z(sec=section, downsample=self.data_feeder.downsample, mid=True)
            # position = (z0 + z1) / 2
        else:
            position = index
        polygon.set_properties('position', position)

        polygon.set_properties('label', label)
        if label_pos is not None:
            polygon.set_properties('label_pos', label_pos)
        polygon.set_properties('type', type)
        polygon.set_properties('side', side)
        polygon.set_properties('side_manually_assigned', side_manually_assigned)
        polygon.set_properties('contour_id', contour_id) # Could be None - will be generated new in convert_drawings_to_entries()

        polygon.set_properties('orientation', self.data_feeder.orientation)

        if edits is None or len(edits) == 0:
            polygon.set_properties('edits',
            [{'username': self.gui.get_username(), 'timestamp': datetime.now().strftime("%m%d%Y%H%M%S")}])
        else:
            polygon.set_properties('edits', edits)

        polygon.set_properties('class', category)

        if hasattr(self.data_feeder, 'sections'):
            polygon.set_properties('section', section)
            d_voxel = DataManager.convert_section_to_z(sec=section, resolution=self.data_feeder.resolution, mid=True, stack=self.gui.stack)
            d_um = d_voxel * convert_resolution_string_to_voxel_size(stack=self.gui.stack, resolution=self.data_feeder.resolution)
            polygon.set_properties('position_um', d_um)
            # print 'd_voxel', d_voxel, 'position_um', d_um
        else:
            polygon.set_properties('voxel_position', index)
            d_um = index * convert_resolution_string_to_voxel_size(stack=self.gui.stack, resolution=self.data_feeder.resolution)
            polygon.set_properties('position_um', d_um)
            # print 'index', index, 'position_um', d_um

        for key, value in kwargs.iteritems():
            polygon.set_properties(key, value)

        return polygon
Example #34
0
from flask import Flask, request, render_template, redirect, make_response
from werkzeug import secure_filename

from data_manager import DataManager
from model.puzzle_manager import PuzzleManager
from model.puzzle import Puzzle, PuzzleState
from model.problem import Problem
from model.problem_attempt import ProblemAttempt


########## Initialization ###########


app = Flask(__name__, static_url_path="/content", static_folder = "content")

datamgr = DataManager()
datamgr.load()

puzzmgr = PuzzleManager(datamgr) 
puzzmgr.load()

ADMIN_PASSWORD = '******'

# Setup web app configurations
app.config['SOLUTION_FOLDER'] = 'solutions/'
if not os.path.exists(app.config['SOLUTION_FOLDER']):
	os.makedirs(app.config['SOLUTION_FOLDER'])

app.config['MAX_CONTENT_LENGTH'] = 10 * 1024  # 10KB

Example #35
0
from flight_data import FlightData
from data_manager import DataManager
from flight_search import FlightSearch
from notification_manager import NotificationManager

flight_search = FlightSearch()
data_manager = DataManager()
sheet_data = data_manager.get_destination_data()

if sheet_data[0]["iataCode"] == "":
    for row in sheet_data:
        row["iataCode"] = flight_search.get_destination_code(row["city"])
    print(sheet_data)

    data_manager.destination_data = sheet_data
    data_manager.update_destination_codes()

print(sheet_data)
cities = [value["iataCode"] for value in sheet_data]

for i in cities:
    x = 0

    price = flight_search.get_prices(i)
    notification = NotificationManager(price.price, price.origin_city,
                                       price.origin_airport,
                                       price.destination_city,
                                       price.return_date, price.out_date,
                                       price.destination_airport, price.noinfo)

    if price.noinfo and price.price < sheet_data[x]["lowestPrice"]:
Example #36
0
    def __init__(
        self,
        batch_size,
        model_path,
        examples_path,
        max_image_width,
        train_test_ratio,
        restore,
        char_set_string,
    ):
        self.step = 0
        self.CHAR_VECTOR = char_set_string
        self.NUM_CLASSES = len(self.CHAR_VECTOR) + 1

        print(f"CHAR_VECTOR {self.CHAR_VECTOR}")
        print(f"NUM_CLASSES {self.NUM_CLASSES}")

        self.__model_path = model_path
        self.__save_path = os.path.join(model_path, "ckp")

        self.__restore = restore

        self.__training_name = str(int(time.time()))
        self.__session = tf.Session()

        # Building graph
        with self.__session.as_default():
            (
                self.__inputs,
                self.__targets,
                self.__seq_len,
                self.__logits,
                self.__decoded,
                self.__optimizer,
                self.__acc,
                self.__cost,
                self.__max_char_count,
                self.__init,
            ) = self.crnn(max_image_width)
            self.__init.run()

        with self.__session.as_default():
            self.__saver = tf.train.Saver(tf.global_variables(),
                                          max_to_keep=10)
            # Loading last save if needed
            if self.__restore:
                print("Restoring")
                ckpt = tf.train.latest_checkpoint(self.__model_path)
                if ckpt:
                    print("Checkpoint is valid")
                    self.step = int(ckpt.split("-")[1])
                    self.__saver.restore(self.__session, ckpt)

        # Creating data_manager
        self.__data_manager = DataManager(
            batch_size,
            model_path,
            examples_path,
            max_image_width,
            train_test_ratio,
            self.__max_char_count,
            self.CHAR_VECTOR,
        )
Example #37
0
from data_manager import DataManager
from extractors import Extractors
from classifiers import Classifiers

data_manager = DataManager()
extractor = Extractors()
classifiers = Classifiers()

X, y, encoder = data_manager.loadData()

results = []
encoded_x1 = extractor.glcm(X)
classifiers.classify(results, encoder, y, title='GLCM - ')

encoded_x2 = extractor.lbp(X)
classifiers.classify(results, encoder, y, title='LBP - ')

encoded_x3 = extractor.huMoments(X)
classifiers.classify(results, encoder, y, title='HUMomments - ')

for index, result in enumerate(encoded_x1):
    new_result = result

    for x2 in encoded_x2[index]:
        new_result.append(x2)
    for x3 in encoded_x3[index]:
        new_result.append(x3)

    results.append(new_result)

classifiers.classify(results, encoder, y, title='Mix - ')
Example #38
0
def main():

    # create variale scope
    with tf.variable_scope("NN", reuse=tf.AUTO_REUSE):
        dm = DataManager(data_dir='data',
                         stopwords_file='',
                         sequence_len=100,
                         test_size=0.2,
                         val_samples=10,
                         n_samples=None,
                         random_state=None)
        nn = NeuralNetwork(hidden_size=[75],
                           vocab_size=dm.vocab_size,
                           embedding_size=300,
                           max_length=dm.sequence_len,
                           learning_rate=0.01,
                           n_classes=2,
                           random_state=None)

    # Prepare summaries
    summaries_dir = '{0}/{1}'.format(
        "logs",
        datetime.datetime.now().strftime('%d_%b_%Y-%H_%M_%S'))
    train_writer = tf.summary.FileWriter(summaries_dir + '/train')
    validation_writer = tf.summary.FileWriter(summaries_dir + '/validation')
    # Prepare model directory
    model_name = str(int(time.time()))
    model_dir = '{0}/{1}'.format("checkpoint", model_name)
    if not os.path.exists(model_dir):
        os.makedirs(model_dir)

    # Train model
    init = tf.global_variables_initializer()
    sess = tf.Session()
    sess.run(init)
    saver = tf.train.Saver()
    x_val, y_val, val_seq_len = dm.get_val_data()
    train_writer.add_graph(nn.input.graph)

    for i in range(10):
        # Perform training step
        x_train, y_train, train_seq_len = dm.next_batch(10)
        #print (x_train.shape, y_train.shape, train_seq_len)
        #(10, 23) (10, 2) [10  6  7  5  7  8  4  5 11  6]
        train_loss, _, summary = sess.run(
            [nn.loss, nn.train_step, nn.merged],
            feed_dict={
                nn.input: x_train,
                nn.target: y_train,
                nn.seq_len: train_seq_len,
                nn.dropout_keep_prob: 0.5
            })
        train_writer.add_summary(
            summary,
            i)  # Write train summary for step i (TensorBoard visualization)
        print('{0}/{1} train loss: {2:.4f}'.format(i + 1, "10 epoch",
                                                   train_loss))

        # Check validation performance
        if (i + 1) % 10 == 0:
            val_loss, accuracy, summary = sess.run(
                [nn.loss, nn.accuracy, nn.merged],
                feed_dict={
                    nn.input: x_val,
                    nn.target: y_val,
                    nn.seq_len: val_seq_len,
                    nn.dropout_keep_prob: 1
                })
            validation_writer.add_summary(
                summary, i
            )  # Write validation summary for step i (TensorBoard visualization)
            print('   validation loss: {0:.4f} (accuracy {1:.4f})'.format(
                val_loss, accuracy))

    # Save model
    checkpoint_file = '{}/model.ckpt'.format(model_dir)
    save_path = saver.save(sess, checkpoint_file)
    print('Model saved in: {0}'.format(model_dir))
Example #39
0
from data_manager import DataManager
from flight_search import FlightSearch
from notification_manager import NotificationManager
from flight_data import FlightData

data = DataManager()
flight = FlightSearch()
notif = NotificationManager(data)

print("Welcome to Sidi Flight Club.")
print("We find the best flight deals and email you.")
first_name = input("What is your first name?"\n)
last_name = input("What is your last name?\n")
email = input("What is your email?\n")
if email != input("Type your email again.\n"):
    print("bad email")
else:
    print("You're in the club!")
    data.add_user(first_name, last_name, email)

flight_data = FlightData(data_manager=data, flight_search=flight, notif_manager=notif)
Example #40
0
from datetime import datetime, timedelta
from data_manager import DataManager
from flight_search import FlightSearch
from notification_manager import NotificationManager

data_manager = DataManager()
sheet_data = data_manager.get_destination_data()
print(sheet_data)
flight_search = FlightSearch()
notification_manager = NotificationManager()

ORIGIN_CITY_IATA = "OAK"

if sheet_data[0]["iataCode"] == "":
    for row in sheet_data:
        row["iataCode"] = flight_search.get_destination_code(row["city"])
    data_manager.destination_data = sheet_data
    data_manager.update_destination_codes()

tomorrow = datetime.now() + timedelta(days=1)
six_month_from_today = datetime.now() + timedelta(days=(6 * 30))

for destination in sheet_data:
    flight = flight_search.check_flights(ORIGIN_CITY_IATA,
                                         destination["iataCode"],
                                         from_time=tomorrow,
                                         to_time=six_month_from_today)
    try:
        if int(flight.price) < int(destination["lowestPrice"]):
            print(int(flight.price) < int(destination["lowestPrice"]))
            id = destination["id"]
Example #41
0
class DOBService(object):

    def __init__(self):
        self.interval = 5.0
        self.running = False

        self.configParser = ConfigParser.RawConfigParser()
        self.configParser.read('config/dob.cfg')

        self.data_manager = DataManager(configParser=self.configParser)
        self.drive_watcher = GDriveWatcher(configParser=self.configParser, data_manager=self.data_manager)
        self.slack_bot = SlackBot(configParser=self.configParser)
        self.witmanager = WitAIManager(configParser=self.configParser)

        self.last_drive_scan = None


    def start(self):
        if self.running:
            return

        logging.info('Service starting')

        self.setup()

        self.running = True

        # self.thread = threading.Thread(target=self.run, args=())
        # self.thread.daemon = True
        # self.thread.start()

        self.run()


    def setup(self):
        self.data_manager.connect()
        self.drive_watcher.connect()
        self.slack_bot.connect()

    def run(self):
        while self.running:
            now = datetime.datetime.now()

            # get updated files
            if self.last_drive_scan is None or ((now - self.last_drive_scan).seconds * 0.0166667) >= 30:
                self.last_drive_scan = now

                updated_files = self.drive_watcher.update()
                self.broadcast_updated_files(updated_files)

            self.process_slack_messages()

            time.sleep(self.interval)

    def process_slack_messages(self):

        packets = self.slack_bot.update()
        for packet in packets:
            event_type = packet['type']

            #if event_type == 'hello':

            #elif event_type == 'message':
            #    self.process_slack_message(packet['message'])
            #elif event_type == 'user_typing':
            #
            #elif event_type == 'team_join':
            #
            #elif event_type == 'presence_change':
            #
            #else:
            #

            if event_type == 'message':
                self.process_slack_message(packet)

    def process_slack_message(self, packet):
        if packet is None or len(packet) == 0:
            return

        logging.info('Processing message {}'.format(packet))

        if not self.is_valid_slack_message(packet):
            return

        user = self.slack_bot.users[packet['user']]
        channel = self.slack_bot.channels[self.slack_bot.get_channel_key_for_channel_with_id(packet['channel'])]
        text = packet['text']

        if text is None or len(text) == 0:
            return

        import re
        matches = re.findall ('<@[a-zA-z0-9]*>:', text, re.DOTALL)
        idx = 0
        for match in matches:
            #if ":" in match:
            if idx == 0:
                text = text.replace(match, "")
            else:
                user_id = match.replace("<", "").replace(">", "").replace("@", "").replace(":", "").strip()
                user = None
                if user_id in self.slack_bot.users:
                    user = self.slack_bot.users[user_id]
                if user is not None:
                    #text = text.replace(match, user['emailAddress'])
                    text = text.replace(match, user['name'])
                else:
                    text = text.replace(match, "")

            idx += 1

        status, outcomes = self.witmanager.query(text)

        if outcomes is None or len(outcomes) == 0:
            return

        outcome = outcomes[0]  # TODO: check that the first always has the highest confience value

        logging.info('Message has intent {}'.format(outcome.intent))

        if outcome.intent == WitAIOutcome.INTENT_RETRIEVE_FILES:
            self.process_intent_to_retrieve_files(user, channel, outcome)

    def process_intent_to_retrieve_files(self, user, channel, outcome):
        min_date = None
        max_date = None
        contacts = []

        for entity in outcome.entities:
            if entity.type == WitAIEntity.TYPE_CONTACT:
                if entity.value is None or len(entity.value) == 0:
                    continue

                contacts.append(entity.value.lower())

            elif entity.type == WitAIEntity.TYPE_DATE:
                cur_date = entity.value_to_date()

                if cur_date is not None:
                    if min_date is None and max_date is None:
                        min_date = cur_date
                        max_date = cur_date
                    else:
                        if min_date > cur_date:
                            min_date = cur_date

                        if max_date < cur_date:
                            max_date = cur_date

        # query db
        entities = self.data_manager.search_files(contacts=contacts, min_date=min_date, max_date=max_date)

        if entities is None or len(entities) == 0:
            msg = "@{} cannot find any files based on your search terms".format(user['name'])
            self.slack_bot.send_message(msg, channel=channel['id'])
        else:
            #msgs = []
            msg = "here are some files matching your query:"
            for entity in entities:
                if entity.mimeType == GMimeType.Folder:
                    continue

                #msgs.append("{} ({})".format(entity.title, entity.get_link()))
                msg += "\n"

                link = entity.get_link()
                if link is None or len(link) == 0:
                    msg += "{}".format(entity.title)
                else:
                    msg += "{} ({})".format(entity.title, entity.get_link())

            #for msg in msgs:
            #    self.slack_bot.send_message(msg, channel=channel['id'])

            self.slack_bot.send_message(msg, channel=channel['id'])

    def is_valid_slack_message(self, message):
        if 'channel' not in message:
            return False

        if 'text' not in message:
            return False

        channel = message['channel']
        text = message['text']
        mentions = DOBService.extract_entities_from_text(text)

        return self.slack_bot.me['user_id'] in mentions and channel == self.slack_bot.channels[self.slack_bot.channel]['id']

    @staticmethod
    def extract_entities_from_text(text):
        import re
        return re.findall('<@(.*?)>', text, re.DOTALL)

    def broadcast_updated_files(self, updated_files):
        print "broadcast_updated_files; updated/added count {}".format(len(updated_files))

        for entity in updated_files:
            msgs = self.construct_slack_message_for_entity(entity)
            for msg in msgs:
                self.slack_bot.send_message(msg)

    def construct_slack_message_for_entities(self, updated_files):
        now = datetime.datetime.now()

        updates_dict = {}

        msgs = []

        for entity in updated_files:
            if (now - entity.modifiedDate).days >= 2:
                print "ignoring {}".format(entity.title)
                continue

            title = entity.title
            link = entity.get_link()

            if entity.createdDate == entity.modifiedDate:
                key = entity.get_owners()

            else:
                key = entity.get_modifier()

            if key not in updates_dict:
                updates_dict[key] = []

            updates_dict[key].append((title, link))

        # create messages
        for key, value in updates_dict.iteritems():
            titles = value[0]
            links = value[1]

            if len(titles) == 1:
                msg = "{} updated {} ({})".format(key, titles[0], links[0])
            else:
                msg = "{} has updated "
                for i in range(len(titles)):
                    if i > 0:
                        msg += ", "
                    msg += titles[i]
                    msg += " ({})".format(links[i])

            msgs.append(msg)

        return msgs


    def construct_slack_message_for_entity(self, entity):
        now = datetime.datetime.now()

        if (now - entity.modifiedDate).days >= 2:
            print "ignoring {}".format(entity.title)
            return []

        if entity.createdDate == entity.modifiedDate:
            return ["{} has created a new file {}\n{}".format(
                entity.get_owners(),
                entity.title,
                entity.get_link())]
        else:
            return ["{} has modified {}\n{}".format(
                entity.get_modifier(),
                entity.title,
                entity.get_link())]

    def stop(self):
        self.running = False
Example #42
0
 def __init__(self, server=requests):
     self.server = server
     self.data = DataManager(self.server)
Example #43
0
 def setUp(self):
     self.data_manager = DataManager()
Example #44
0
import os
from data_manager import DataManager

manager = DataManager()
a = manager.get_contests()
b = manager.get_tasks()

print a[0].key
print a[0].short_name
print a[0].remaining
print a[0].organizer
print a[0].full_name
print a[0].year
print a[0].round
print a[0].num_tasks
print a[0].url
print ''

print a[-1].key
print a[-1].short_name
print a[-1].remaining
print a[-1].organizer
print a[-1].full_name
print a[-1].year
print a[-1].round
print a[-1].num_tasks
print a[-1].url
print ''

print b[0].contests_key
print b[0].name
def get_pos_tags():
    words = DataManager.get_sentences_clean()
    get_pos_tag_comments(words, '../features/pos_tags.csv')
    get_n_grams(3, '../features/pos_tags.csv', '../features/pos_ngrams.csv')
    get_ids('../features/pos_ngrams.csv')
 def __init__(self):
     self.data_manager = DataManager()
Example #47
0
class Main(object):

    def __init__(self, top_right, bottom_left):
        self.event_instance = None
        self.data_manager = DataManager()
        self.top_right = top_right
        self.bottom_left = bottom_left
        self.result = self.run()
        self.data_manager.close_con()

    def get_result(self):
        return self.result

    def run(self):
        self.event_instance = self.data_manager.get_events_for_view(self.top_right, self.bottom_left)
        train_and_test = self.divide_events_into_train_test()
        training_data_array = np.array(self.event_instance.get_train_list(train_and_test["training_labels"]))
        training_target_array = np.array(self.event_instance.get_target_list(train_and_test["training_labels"]))
        testing_array = np.array(self.event_instance.get_test_list(train_and_test["testing_labels"]))
        testing_target = np.array(self.event_instance.get_target_list(train_and_test["testing_labels"]))
        #knn = neighbors.KNeighborsClassifier(10)
        #knn.fit(training_data_array, training_target_array)
        #testing_target = knn.predict(testing_array)
        data_points = np.concatenate((training_data_array, testing_array))
        targets = np.concatenate((training_target_array, testing_target))
        knn = neighbors.KNeighborsClassifier(12)
        knn.fit(data_points, targets)
        targets = knn.predict(data_points)
        targets_map = list()
        target_index = 0
        for target in np.unique(targets):
            targets_map.append((target, target_index, Event.get_category_color(target)))
            target_index += 1
        result = []
        result.append([[t, c] for (t, i, c) in targets_map])
        dp = list()
        #df = file("datapoints.csv", "w")

        for i in range(len(data_points)):
        #    df.write(str(data_points[i][0])+","+str(data_points[i][1])+","+str(targets_map[targets[i]][0])+"\n")
            index = [index for (t, index, c) in targets_map if t == targets[i]]
            dp.append([data_points[i][0], data_points[i][1], index[0]])
        result.append(dp)
        return result
        #df.close()
        #tf = file("targets.csv", "w")
        #for i in range(len(targets)):
        #    tf.write(str(targets_map[targets[i]])+","+targets[i]+"\n")
        #tf.close()

    def divide_events_into_train_test(self):
        frequencies = Counter(self.event_instance.get_categories())
        total = sum(frequencies.values())
        for key in frequencies:
            frequencies[key] /= float(total)

        values = sorted(frequencies.values(), reverse=True)
        if len(values) > 4:
            values = values[0:5]
            train_labels = [k for (k, v) in frequencies.items() if frequencies[k] in values and frequencies[k] > .05]
            other_labels = [k for (k, v) in frequencies.items() if k not in train_labels]
        else:
            train_labels = [k for (k, v) in frequencies.items() if frequencies[k] > .05]
            other_labels = [k for (k, v) in frequencies.items() if k not in train_labels]
        return {"training_labels": train_labels, "testing_labels": other_labels}
Example #48
0
def validation(project_name, sliding_window_size, check_days, max_k,
               random_val):
    """
    Perform validation with given parameters.

    Parameters
    ----------
    project_name (str):
        Name of the project to read change sets.

    sliding_window_size (str):
        Number of days to include the graph.

    check_days (list):
        List of integers to check if recomendations are true or false.

    max_k (int):
        Maximum k for topk and MRR calculations. When max_k is 3, top1, top2 and top3
        will be calculated, and the ranks in MRR calculations can 1, 2 and 3.

    random_val (bool):
        If True, `max_k` replacements will be selected randomly.

    Returns
    -------
    list:
        First item of the list is the name of the experiment. Second and the following
        items will include accuracy and MRR for each check day. For example, returns
        [pig_sws365, (7, {top1:.5, top2:.7, mrr:.6}), (30, {top1:.6, top2:.9, mrr:.7})].
    """
    dataset_path = get_dataset_path(project_name)
    exp_name = get_exp_name(project_name, sws=sliding_window_size)

    dm = DataManager(dataset_path, None)  # No need for sliding window size
    G = HistoryGraph(dataset_path, sliding_window_size)

    check_day_to_ranks = {check_day: [] for check_day in check_days}
    date_to_results = load_results(exp_name)
    for date, results in date_to_results.items():
        if not results["replacements"]:  # No leaving developer
            continue

        G.forward_until(date)  # Update graph

        for leaving_dev, recommended_devs in results["replacements"].items():
            if not recommended_devs:  # No recommended developers
                continue

            if random_val:  # Randomly select "max_k" developers
                other_devs = results["developers"]
                other_devs.remove(leaving_dev)
                recommended_devs = random.sample(other_devs, max_k)
            else:  # Convert dictionary keys to list and get first "max_k" items
                recommended_devs = list(recommended_devs)[:max_k]

            leaving_dev_files = set(G.find_reachable_files(leaving_dev))

            for check_day in check_days:
                # Get the change sets in the next days
                # For example, get the change sets in the next 7 days if check day is 7
                change_sets = dm.get_specific_window(
                    date + timedelta(days=1), date + timedelta(days=check_day))
                rank = float("inf")  # Not found yet
                for i, recommended_dev in enumerate(recommended_devs):
                    recommended_dev_files = set(
                        G.find_reachable_files(recommended_dev))

                    # Find the files that leaving developer can reach but recmommended
                    # developer cannot reach
                    target_files = leaving_dev_files - recommended_dev_files

                    if check_modification(change_sets, recommended_dev,
                                          target_files):
                        rank = i + 1
                        break  # No need to check other developers

                check_day_to_ranks[check_day].append(rank)

    ret_items = [exp_name]

    for check_day in check_days:
        res = {}
        for k in range(1, max_k + 1):
            res["top{}".format(k)] = cal_accuracy(
                check_day_to_ranks[check_day], k)

        res["mrr"] = cal_mrr(check_day_to_ranks[check_day])

        ret_items.append((check_day, res))
    return ret_items
# stack = args.stack
elastix_output_dir = args.elastix_output_dir
custom_output_dir = args.custom_output_dir
image_name_list = load_txt(args.image_name_list)
toanchor_transforms_fp = args.toanchor_transforms_fp

#################################################

anchor_idx = image_name_list.index(args.anchor_img_name)

transformation_to_previous_sec = {}

for i in range(1, len(image_name_list)):
    
    transformation_to_previous_sec[i] = DataManager.load_consecutive_section_transform(moving_fn=image_name_list[i], fixed_fn=image_name_list[i-1], elastix_output_dir=elastix_output_dir, custom_output_dir=custom_output_dir)

transformation_to_anchor_sec = {}

for moving_idx in range(len(image_name_list)):

    if moving_idx == anchor_idx:
        # transformation_to_anchor_sec[moving_idx] = np.eye(3)
        transformation_to_anchor_sec[image_name_list[moving_idx]] = np.eye(3)

    elif moving_idx < anchor_idx:
        T_composed = np.eye(3)
        for i in range(anchor_idx, moving_idx, -1):
            T_composed = np.dot(np.linalg.inv(transformation_to_previous_sec[i]), T_composed)
        # transformation_to_anchor_sec[moving_idx] = T_composed
        transformation_to_anchor_sec[image_name_list[moving_idx]] = T_composed
Example #50
0
"""
@author: isabelleguyon

This is an example of program that tests the Iris challenge Preprocessor class.
Another style is to incorporate the test as a main function in the Data manager class itself.
"""
from sys import path
path.append("../ingestion_program")  # Contains libraries you will need
from data_manager import DataManager  # such as DataManager

from prepro import Preprocessor
input_dir = "../sample_data"
output_dir = "../resuts"

basename = 'credit'
D = DataManager(basename, input_dir)  # Load data
print("*** Original data ***")
print D

Prepro = Preprocessor()

# Preprocess on the data and load it back into D
D.data['X_train'] = Prepro.fit_transform(D.data['X_train'], D.data['Y_train'])
D.data['X_valid'] = Prepro.transform(D.data['X_valid'])
D.data['X_test'] = Prepro.transform(D.data['X_test'])

# Here show something that proves that the preprocessing worked fine
print("*** Transformed data ***")
print D

# Preprocessing gives you opportunities of visualization:
Example #51
0
    # Put here your OWN test code
    
    # To make sure this runs on Codalab, put here things that will not be executed on Codalab
    from sys import argv, path
    path.append ("../starting_kit/ingestion_program") # Contains libraries you will need
    from data_manager import DataManager  # such as DataManager
    
    if len(argv)==1: # Use the default input and output directories if no arguments are provided
        input_dir = "../public_data" # Replace by correct path
        output_dir = "../results" # Create this directory if it does not exist
    else:
        input_dir = argv[1]
        output_dir = argv[2];
    
    basename = 'cifar10'
    D = DataManager(basename, input_dir) # Load data
    print("*** Original data ***")
    print D
    
    Prepro = PCAPreprocessor()
 
    # Preprocess on the data and load it back into D
    D.data['X_train'] = Prepro.fit_transform(D.data['X_train'], D.data['Y_train']) # Les donnes sont bien de dimensions 192
    D.data['X_valid'] = Prepro.transform(D.data['X_valid'])
    D.data['X_test'] = Prepro.transform(D.data['X_test'])
  
    # Here show something that proves that the preprocessing worked fine
    print("*** Transformed data ***")
    print D
    
Example #52
0
        if row != row_size - 1:
            row_images_with_spacers.append(spacer_h)

    ret = np.vstack(row_images_with_spacers)
    return ret


def convert_to_colomap(im, cmap):
    im = cmap(im)
    im = np.uint8(im * 255)
    return im


np.random.seed(1)

data_manager = DataManager()

place_cells = PlaceCells()
hd_cells = HDCells()

data_manager.prepare(place_cells, hd_cells)

model = Model(place_cell_size=place_cells.cell_size,
              hd_cell_size=hd_cells.cell_size,
              sequence_length=100)

sess = tf.Session()
sess.run(tf.global_variables_initializer())

# Load checkpoints
load_checkpoints(sess)
 def setUp(self):
   self.manager = DataManager()
   self.manager.load()
Example #54
0
 # ================ @CODE SUBMISSION  ================= 
 overall_time_budget = 0
 time_left_over = 0
 for basename in datanames: # Loop over datasets
     
     vprint( verbose,  "************************************************")
     vprint( verbose,  "******** Processing dataset " + basename.capitalize() + " ********")
     vprint( verbose,  "************************************************")
     
     # ======== Learning on a time budget:
     # Keep track of time not to exceed your time budget. Time spent to inventory data neglected.
     start = time.time()
     
     # ======== Creating a data object with data, informations about it
     vprint( verbose,  "========= Reading and converting data ==========")
     D = DataManager(basename, input_dir, replace_missing=True, filter_features=True, max_samples=max_samples, verbose=verbose)
     print (D)
     vprint( verbose,  "[+] Size of uploaded data  %5.2f bytes" % data_io.total_size(D))
     
     # ======== Keeping track of time
     if debug_mode<1:
         time_budget = D.info['time_budget']        # <== HERE IS THE TIME BUDGET!
     else:
         time_budget = max_time
     overall_time_budget = overall_time_budget + time_budget
     vprint( verbose,  "[+] Cumulated time budget (all tasks so far)  %5.2f sec" % (overall_time_budget))
     # We do not add the time left over form previous dataset: time_budget += time_left_over
     vprint( verbose,  "[+] Time budget for this task %5.2f sec" % time_budget)
     time_spent = time.time() - start
     vprint( verbose,  "[+] Remaining time after reading data %5.2f sec" % (time_budget-time_spent))
     if time_spent >= time_budget:
import os
from data_manager import DataManager

manager = DataManager()
a = manager.get_contests()
b = manager.get_tasks()

print a[0].key
print a[0].short_name
print a[0].remaining
print a[0].organizer
print a[0].full_name
print a[0].year
print a[0].round
print a[0].num_tasks
print a[0].url
print ''

print a[-1].key
print a[-1].short_name
print a[-1].remaining
print a[-1].organizer
print a[-1].full_name
print a[-1].year
print a[-1].round
print a[-1].num_tasks
print a[-1].url
print ''

print b[0].contest_short_name
print b[0].name
Example #56
0
        plt.plot()


if __name__ == "__main__":
    # We can use this to run this file as a script and test the Preprocessor
    if len(
            argv
    ) == 1:  # Use the default input and output directories if no arguments are provided
        input_dir = "public_data"
        output_dir = "results"
    else:
        input_dir = argv[1]
        output_dir = argv[2]

    basename = 'plankton'
    D = DataManager(basename, input_dir)  # Load data
    print("*** Original data ***")
    print(D)

    Prepro = Preprocessor()

    # Preprocess on the data and load it back into D
    D.data['X_train'] = Prepro.fit_transform(D.data['X_train'],
                                             D.data['Y_train'])
    D.data['X_train'], D.data['Y_train'] = Prepro.removeOutliers(
        D.data['X_train'], D.data['Y_train'])
    D.data['X_valid'] = Prepro.transform(D.data['X_valid'])
    D.data['X_test'] = Prepro.transform(D.data['X_test'])
    D.feat_name = np.array(['PC1', 'PC2'])
    D.feat_type = np.array(['Numeric', 'Numeric'])
Example #57
0
class Evaluation:
    def __init__(self, path):
        print("Evaluation")
        self.path = path
        self.path_logs = os.path.join(path, "logs")
        self.data_manager = DataManager(path + 'data/mnist/', batch_size=1)

    def create_graph(self):
        # TODO: get_Variable 써보기?

        x = tf.placeholder(tf.float32, shape=[None, 28, 28, 1])

        conv1 = conv_layer(x, shape=[5, 5, 1, 32])
        conv1_pool = max_pool_2x2(conv1)

        conv2 = conv_layer(conv1_pool, shape=[5, 5, 32, 64])
        conv2_pool = max_pool_2x2(conv2)

        conv2_flat = tf.reshape(conv2_pool, [-1, 7 * 7 * 64])
        full_1 = tf.nn.relu(full_layer(conv2_flat, 1024))

        keep_prob = tf.placeholder(tf.float32)
        full1_drop = tf.nn.dropout(full_1, keep_prob=keep_prob)

        y_conv = full_layer(full1_drop, 10)

        return x, keep_prob, y_conv

    def create_optimizor(self, y_conv):
        y_ = tf.placeholder(tf.float32, shape=[None, 10])

        cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=y_conv, labels=y_))
        train_step = tf.train.AdamOptimizer(1e-4).minimize(cross_entropy)
        correct_prediction = tf.equal(tf.argmax(y_conv, 1), tf.argmax(y_, 1))
        accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))

        return y_, train_step, accuracy

    def run_evaluation(self):

        # ------------ train.py의 run_training()와 동일 ------------------
        # 그래프 구성하기
        x, keep_prob, y_conv = self.create_graph()

        # 최적화 그래프 만들기(loss function, optimizer, accuracy evaluator)
        y_, train_step, accuracy = self.create_optimizor(y_conv)
        # --------------------------------------------------------------


        # Saver 만들기
        saver = tf.train.Saver()

        # 세션 열기
        with tf.Session() as sess:

            # 저장된 model-XXXX checkpoint로 모델 불러오기
            saver.restore(sess, os.path.join(self.path_logs, "model-1000"))

            # 테스트를 위한 데이터 불러오기
            X, Y = self.data_manager.get_test_batch_data()  # self.data_manager.get_test_data()

            # 테스트 수행
            test_accuracy = sess.run(accuracy, feed_dict={x: X, y_: Y, keep_prob: 1.0})

        print("Accuracy: {}".format(test_accuracy))
Example #58
0
 def setUp(self):
     self.manager = DataManager()
Example #59
0
 def __init__(self, path):
     print("Evaluation")
     self.path = path
     self.path_logs = os.path.join(path, "logs")
     self.data_manager = DataManager(path + 'data/mnist/', batch_size=1)