def get_all_products_details(self): product_ids = database.get_product_ids() results = [] for i in product_ids: product_obj = database.get_product(i) result_obj = dict() result_obj["ID"] = product_obj.get_id() result_obj["NAME"] = product_obj.get_name() result_obj["PRICE"] = product_obj.get_price() result_obj["GROUP"] = product_obj.get_group() result_obj["SUBGROUP"] = product_obj.get_subgroup() results.append(result_obj) return results
def __init__(self, name, price, group, sub_group): self.name = name self.price = price self.group = group self.sub_group = sub_group id_list = database.get_product_ids() if not id_list: self.id = 0 id_list.append(0) database.save_product_ids(id_list) else: list_len = len(id_list) last_id = id_list[list_len - 1] next_id = last_id + 1 self.id = next_id id_list.append(next_id) database.save_product_ids(id_list) database.save_product(self)
def build_index(filepath): product_names = [] try: with open(filepath, 'r') as infile: for row in infile: product_names.append(row.strip()) except IOError as ex: print(ex) return added_ids_count = 0 existing_ids = database.get_product_ids(product_names) for i in range(len(product_names)): if existing_ids[i] is None: product_name = product_names[i] # Fetch product information firstly product_infos = None try: product_infos = google_search.search_retailers(product_name) except requests.exceptions.HTTPError as ex: print(ex) except requests.exceptions.ConnectionError as ex: print(ex) except requests.exceptions.Timeout as ex: print(ex) except requests.exceptions.RequestException as ex: print(ex) if product_infos is None: print("Skipped \"" + product_name + "\" as search connection could not be created!") continue # Insure we have entries for all retailers missing_retailers = [] for (retailer, product_info) in product_infos.items(): if product_info is None: missing_retailers.append(retailer) if missing_retailers: print( "Skipped \"" + product_name + "\" as product information could not be obtained from: [" + ", ".join(missing_retailers) + "]!") continue # Insure a product image can be downloaded walmart_img_url = product_infos["walmart"][2] image_data = None try: image_data = image_util.download_image_bytes(walmart_img_url) except requests.exceptions.HTTPError as ex: print(ex) except requests.exceptions.ConnectionError as ex: print(ex) except requests.exceptions.Timeout as ex: print(ex) except requests.exceptions.RequestException as ex: print(ex) except RuntimeError as ex: print(ex) if image_data is None: print("Skipped \"" + product_name + "\" as product image could not be retrieved!") continue # Add entry to database walmart_sku = product_infos["walmart"][1] amazon_asin = product_infos["amazon"][1] dbid = database.insert_product(product_name, walmart_sku, amazon_asin) # Save image as identifier image_util.save_image_bytes(config.IMAGES_DIR + '/' + str(dbid), image_data) added_ids_count += 1 print("Successfully added \"" + product_name + "\" to the database.") else: print("Skipped \"" + product_names[i] + "\" as it is already in the database.") if added_ids_count == 0: print("Nothing to build. Database is current!") return image_dbids = [] train_images = [] for fn in [ fn for fn in os.listdir(config.IMAGES_DIR) if os.path.isfile(config.IMAGES_DIR + '/' + fn) ]: try: if '.' not in fn: # not a image file continue try: name_dbid = int(fn[0:fn.index('.')]) except ValueError: continue img_data = image_util.load_image_bytes(config.IMAGES_DIR + '/' + fn) cv_image = image_util.load_cv_image_from_image_bytes(img_data) if cv_image is None: print("Could not decode image: \"" + fn + "\"!") continue image_dbids.append(name_dbid) train_images.append(cv_image) except cv2.error as ex: print(ex) print("Could not decode image: \"" + fn + "\"!") # Build FLANN Index new_matcher_index = ImageMatcher() new_matcher_index.build(image_dbids, train_images) shared_data.lock.acquire() # Enter Critical Section shared_data.rebuild_in_progress = True # Set shared variable and save to disk shared_data.matcher_index = new_matcher_index new_matcher_index.save(os.getcwd()) shared_data.index_needs_updating = True shared_data.rebuild_in_progress = False shared_data.lock.release() # Exit Critical Section print("FLANN Index updated.")
def view_all_products(self): product_ids = database.get_product_ids() for i in product_ids: product_obj = database.get_product(i) print(product_obj.get_string())