def test_ListingsModel(): """Test ListingsModel""" from clair.coredata import make_listing_frame from clair.qtgui import ListingsModel, DataStore listings = make_listing_frame(4) data_store = DataStore() data_store.merge_listings(listings) model = ListingsModel() model.setDataStore(data_store) #Get table dimensions assert model.rowCount() == 4 assert model.columnCount() == len(listings.columns) #Get data from model - Currently it contains only None and nan index17 = model.createIndex(1, 7) data = model.data(index17, Qt.DisplayRole) assert data == "None" #Change data in model model.setData(index17, "foo", Qt.EditRole) #Test if data was really changed data = model.data(index17, Qt.EditRole) assert data == "foo" #Try to get data in edit role data = model.data(index17, Qt.EditRole) assert data == "foo" print listings print listings.icol(7)
def get_listings(ids): """ Download detailed listings from Ebay. Needs a ``list``, ``pandas.Series``, or any iterable of Ebay item IDs. """ eget = EbayGetListings #Remove duplicate IDs ids = list(set(ids)) #Download information in chunks of 20 listings. listings = make_listing_frame(0) for i_start in range(0, len(ids), 20): xml = eget.download_xml(ids[i_start:i_start+20]) listings_part = eget.parse_xml(xml) listings = listings.append(listings_part, ignore_index=True, verify_integrity=False) #Put our IDs into index listings.set_index("id", drop=False, inplace=True, verify_integrity=True) # #TODO: compute listings["final_price"] from ``Item.ListingStatus`` # # http://developer.ebay.com/Devzone/shopping/docs/CallRef/GetMultipleItems.html#Response.Item.ListingStatus # #Ebay shows final price some minutes after auction ends, in worst case. # fp = listings["time"] < datetime.utcnow() + timedelta(minutes=15) # listings["final_price"] = fp return listings
def test_LearnDataProxyModel(): """Test class RadioButtonModel""" from clair.qtgui import LearnDataProxyModel, ListingsModel from clair.coredata import make_listing_frame, DataStore csr = Qt.CheckStateRole #Create test data listings = make_listing_frame(1) #Product "foo" is present, "bar" absent, nothing is known about "baz" listings["expected_products"][0] = ["foo", "bar", "baz"] listings["products"][0] = ["foo"] listings["products_absent"][0] = ["bar"] data_store = DataStore() data_store.merge_listings(listings) #Create listings model that we can adapt lsmod = ListingsModel() lsmod.setDataStore(data_store) mo = LearnDataProxyModel() mo.setListingsModel(lsmod, 3, 4, 5) mo.setRow(lsmod.index(0, 0)) print mo.values #Test size assert mo.columnCount() == 4 assert mo.rowCount() == 4 #includes additional empty row #Test data conversion #1st row: [True, False, "foo", ...] assert bool(mo.data(mo.index(0, 0), csr)) == True assert bool(mo.data(mo.index(0, 1), csr)) == False assert mo.data(mo.index(0, 2)) == "foo" #2nd row: [False, True, "bar", ...] assert bool(mo.data(mo.index(1, 0), csr)) == False assert bool(mo.data(mo.index(1, 1), csr)) == True assert mo.data(mo.index(1, 2)) == "bar" #3rd row: [False, False, "baz", ...] assert bool(mo.data(mo.index(2, 0), csr)) == False assert bool(mo.data(mo.index(2, 1), csr)) == False assert mo.data(mo.index(2, 2)) == "baz" #Change the data mo.setData(mo.index(0, 0), False, csr) mo.setData(mo.index(1, 0), True, csr) mo.setData(mo.index(2, 1), True, csr) #Test conversion back to internal format assert data_store.listings["expected_products"][0] == ["foo", "bar", "baz"] assert data_store.listings["products"][0] == ["bar"] assert data_store.listings["products_absent"][0] == ["baz"] print "finished successfully."
def find(keywords, n_listings=10, min_price=None, max_price=None, currency="EUR", time_from=None, time_to=None): """ Find listings on Ebay by keyword. Finds only active listings, now finished listings. time_from, time_to: datetime in UTC """ efind = EbayFindListings #Ebay returns a maximum of 100 listings per call (pagination). #Compute necessary number of calls to Ebay and number of #listings per call. max_per_page = 100 #max number of listings per call - Ebay limit n_pages = math.ceil(n_listings / max_per_page) n_per_page = math.ceil(n_listings / n_pages) #Call Ebay repeatedly and concatenate results listings = make_listing_frame(0) for i_page in range(1, int(n_pages + 1)): xml = efind.download_xml(keywords=keywords, entries_per_page=n_per_page, page_number=i_page, min_price=min_price, max_price=max_price, currency=currency, time_from=time_from, time_to=time_to) listings_part = efind.parse_xml(xml) #Stop searching when Ebay returns an empty result. if len(listings_part) == 0: break listings = listings.append(listings_part, ignore_index=True, verify_integrity=False) #Remove duplicate rows: Ebay uses the same ID for variants of the #same product. listings = listings.drop_duplicates(cols="id") #Put internal IDs into index listings.set_index("id", drop=False, inplace=True, verify_integrity=True) #Only interested in auctions, assume that no prices are final. listings["final_price"] = False return listings
def test_LearnDataProxyModel_GUI(): """Test class RadioButtonModel""" from clair.qtgui import LearnDataProxyModel, ListingsModel from clair.coredata import make_listing_frame, DataStore print "Start" app = QApplication(sys.argv) #Create test data listings = make_listing_frame(1) #Product "foo" is present, "bar" absent, nothing is known about "baz" listings["expected_products"][0] = ["foo", "bar", "baz"] listings["products"][0] = ["foo"] listings["products_absent"][0] = ["bar"] data_store = DataStore() data_store.merge_listings(listings) #Create listings model that we can adapt lsmod = ListingsModel() lsmod.setDataStore(data_store) mo = LearnDataProxyModel() mo.setListingsModel(lsmod, 3, 4, 5) mo.setRow(lsmod.index(0, 0)) view = QTreeView() view.setModel(mo) view.show() app.exec_() print mo.values print "expectedProducts:", data_store.listings["expected_products"][0] print "products: ", data_store.listings["products"][0] print "productsAbsent: ", data_store.listings["products_absent"][0] print "End"
def make_test_listings(): """ Create a DataFrame with some data. """ from clair.coredata import make_listing_frame fr = make_listing_frame(3) #All listings need unique ids fr["id"] = ["eb-123", "eb-456", "eb-457"] fr["training_sample"][0] = True fr["expected_products"][0] = ["nikon-d90", "nikon-sb-24"] fr["products"][0] = ["nikon-d90"] fr["products_absent"][0] = ["nikon-sb-24"] fr["title"] = [u"Nikon D90 super duper!", u"<>müäh", None] fr["description"][0] = "Buy my old Nikon D90 camera <b>now</b>!" fr["prod_spec"][0] = {"Marke":"Nikon", "Modell":"D90"} #Put our IDs into index fr.set_index("id", drop=False, inplace=True, verify_integrity=True) return fr
def create_models(): """ Create a Qt-model-view models for listings, products and tasks. Additionally returns the related ``DataStore``. Returns ------- listings_model, product_model, task_model, price_model, data_store """ from clair.qtgui import TaskModel, ProductModel, ListingsModel, PriceModel from clair.coredata import Product, SearchTask, DataStore, \ make_listing_frame, make_price_frame fr = make_listing_frame(3) #All listings need unique ids fr["id"] = ["eb-110685959294", "eb-111014122908", "eb-457"] fr["training_sample"] = [1.0, 0.0, nan] fr["search_tasks"] = ["s-nikon-d90", "s-nikon-d70", "s-nikon-d90"] fr["expected_products"][0] = ["nikon-d90", "nikon-sb-24"] fr["expected_products"][1] = ["nikon-d70"] fr["expected_products"][2] = ["nikon-d90", "nikon-sb-24"] fr["products"] = [["nikon-d90"], ["nikon-d70"], ["nikon-d90"]] fr["products_absent"][0] = ["nikon-sb-24"] fr["thumbnail"][0] = "www.some.site/dir/to/thumb.pg" fr["image"][0] = "www.some.site/dir/to/img.pg" fr["title"] = [u"Nikon D90 super duper!", u"Süper Camera", None] fr["description"][0] = "Buy my old Nikon D90 camera <b>now</b>!" fr["prod_spec"][0] = {"Marke":"Nikon", "Modell":"D90"} fr["active"][0] = False fr["sold"] = [1., 1., 0.] fr["currency"][0] = "EUR" fr["price"] = [400., 150, 300] fr["shipping"][0] = 12. fr["type"][0] = "auction" fr["time"] = [datetime(2013,1,10), datetime(2013,2,2), datetime(2013,2,3)] fr["location"][0] = u"Köln" fr["postcode"][0] = u"50667" fr["country"][0] = "DE" fr["condition"][0] = 0.7 fr["server"][0] = "Ebay-Germany" fr["server_id"] = ["110685959294", "111014122908", "457"] #ID of listing on server fr["final_price"][0] = True # fr["data_directory"] = "" fr["url_webui"][0] = "www.some.site/dir/to/web-page.html" # fr["server_repr"][0] = nan #Put our IDs into index fr.set_index("id", drop=False, inplace=True, verify_integrity=True) tasks = [SearchTask("s-nikon-d90", datetime(2000, 1, 1), "ebay-de", "Nikon D90", "daily", 100, 150, 300, "EUR", ["nikon-d90", "nikon-18-105-f/3.5-5.6--1"]), SearchTask("s-nikon-d70", datetime(2000, 1, 1), "ebay-de", "Nikon D70", "daily", 100, 75, 150, "EUR", ["nikon-d70", "nikon-18-105-f/3.5-5.6--1"]),] products = [Product("nikon-d90", "Nikon D90", "Nikon D90 DSLR camera.", ["Nikon", "D 90"], ["photo.system.nikon.camera", "photo.camera.system.nikon"]), Product("nikon-d70", "Nikon D70", "Nikon D70 DSLR camera.", ["Nikon", "D 70"], ["photo.system.nikon.camera", "photo.camera.system.nikon"])] pri = make_price_frame(3) pri["id"] = ["pri-123", "pri-456", "pri-457"] pri["price"] = [310., 150., 300.] pri["currency"] = ["EUR", "EUR", "EUR"] pri["condition"] = [0.7, 0.7, 0.7] pri["time"] = [datetime(2013,1,10), datetime(2013,2,2), datetime(2013,2,3)] pri["product"] = ["nikon-d90", "nikon-d70", "nikon-d90"] pri["listing"] = ["eb-123", "eb-456", "eb-457"] pri["type"] = ["observed", "observed", "observed"] pri["avg_period"] = None pri["avg_num_listings"] = None conf_dir = relative("../../example-data") data_store = DataStore(conf_dir, None) data_store.merge_listings(fr) data_store.set_products(products) data_store.add_tasks(tasks) data_store.merge_prices(pri) #The models are tested here, creating them may fail. #Don't break all test, because a single model is broken. try: listings_model = ListingsModel() listings_model.setDataStore(data_store) except: #IGNORE:W0702 print "Error! ``listings_model`` could not be initialized!" listings_model = None try: task_model = TaskModel() task_model.setDataStore(data_store) except: #IGNORE:W0702 print "Error! ``task_model`` could not be initialized!" task_model = None try: product_model = ProductModel() product_model.setDataStore(data_store) except: #IGNORE:W0702 print "Error! ``product_model`` could not be initialized!" product_model = None try: price_model = PriceModel() price_model.setDataStore(data_store) except: #IGNORE:W0702 print "Error! ``price_model`` could not be initialized!" price_model = None return listings_model, product_model, task_model, price_model, data_store
def parse_xml(xml): """ Parse the XML response from Ebay's shopping API. http://developer.ebay.com/Devzone/shopping/docs/CallRef/GetMultipleItems.html """ root = objectify.fromstring(xml) # print etree.tostring(root, pretty_print=True) if root.Ack.text == "Success": pass elif root.Ack.text in ["Warning", "PartialFailure"]: error_list = [etree.tostring(err, pretty_print=True) for err in root.Errors] error_str = "\n".join(error_list) logging.warning( "Ebay warning in EbayGetListings.parse_xml: " + root.Ack.text + "\n" + error_str) else: # raise EbayError(etree.tostring(root, pretty_print=True)) logging.error("Ebay error in EbayGetListings.parse_xml: \n" + etree.tostring(root, pretty_print=True)) return make_listing_frame(0) item = root.Item nrows = len(item) listings = make_listing_frame(nrows) for i, itemi in enumerate(item): try: listings["thumbnail"][i] = itemi.GalleryURL.text except AttributeError: pass try: listings["image"][i] = itemi.PictureURL.text except AttributeError: pass listings["title"][i] = itemi.Title.text #Escaping and un-escaping XML. Necessary for the HTML description. #http://wiki.python.org/moin/EscapingXml #Cleaning up html #http://lxml.de/lxmlhtml.html#cleaning-up-html listings["description"][i] = itemi.Description.text #ItemSpecifics: try: xml_prod_specs = itemi.ItemSpecifics.NameValueList prod_specs = {} for xml_spec in xml_prod_specs: name = xml_spec.Name.text value = xml_spec.Value.text prod_specs[name] = value listings["prod_spec"][i] = prod_specs except AttributeError: pass #Listing status #http://developer.ebay.com/Devzone/shopping/docs/CallRef/GetMultipleItems.html#Response.Item.ListingStatus listings["active"][i] = itemi.ListingStatus.text == "Active" listings["final_price"][i] = (itemi.ListingStatus.text in ["Ended", "Completed"]) listings["sold"][i] = int(itemi.QuantitySold.text) > 0 #Price and shipping cost listings["currency"][i] = itemi.ConvertedCurrentPrice.get( #EUR, USD, ... "currencyID") listings["price"][i] = itemi.ConvertedCurrentPrice.text try: listings["shipping"][i] = itemi.ShippingCostSummary \ .ListedShippingServiceCost.text except AttributeError: pass #Type of listing: auction, fixed-price, unknown l_type = defaultdict(lambda: "unknown", {"Chinese" : "auction", "FixedPriceItem" : "fixed-price", "StoresFixedPrice": "fixed-price" }) listings["type"][i] = l_type[itemi.ListingType.text] #Approximate time when price is/was valid, end time in case of auctions time = dprs.parse(itemi.EndTime.text) listings["time"][i] = time.replace(tzinfo=None) listings["location"][i] = itemi.Location.text try: listings["postcode"][i] = itemi.PostalCode.text except AttributeError: pass listings["country"][i] = itemi.Country.text try: listings["condition"][i] = convert_ebay_condition( #1.: new, 0.: worthless itemi.ConditionID.text) except AttributeError: pass listings["seller"][i] = itemi.Seller.UserID.text try: listings["buyer"][i] = itemi.HighBidder.UserID.text except AttributeError: pass # listings["server"][i] = "Ebay-" + itemi.Site.text #string to identify the server listings["server_id"][i] = itemi.ItemID.text #ID of item on server # listings["data_directory"] = "" listings["url_webui"][i] = itemi.ViewItemURLForNaturalSearch.text # listings["server_repr"][i] = nan #representation of listing on server (XML) #Create internal IDs - Ebay IDs are unique (except for variants) listings["id"] = "eb-" + listings["server_id"] return listings
def parse_xml(xml): """ Parse the XML response from Ebay's finding API, and convert it into a table of listings. http://developer.ebay.com/DevZone/finding/CallRef/findItemsByKeywords.html """ root = objectify.fromstring(xml) # print etree.tostring(root, pretty_print=True) if root.ack.text == "Success": pass elif root.ack.text in ["Warning", "PartialFailure"]: logging.warning( "Ebay warning in EbayGetListings.parse_xml: " + root.Ack.text + "\n" + etree.tostring(root.errorMessage, pretty_print=True)) else: # raise EbayError(etree.tostring(root, pretty_print=True)) logging.error("Ebay error in EbayFindListings.parse_xml: \n" + etree.tostring(root, pretty_print=True)) return make_listing_frame(0) try: item = root.searchResult.item except AttributeError: return make_listing_frame(0) nrows = len(item) listings = make_listing_frame(nrows) for i, itemi in enumerate(item): # listings["training_sample"][i] = False #This is training sample if True try: listings["thumbnail"][i] = itemi.galleryURL.text except AttributeError: pass listings["title"][i] = itemi.title.text listings["active"][i] = True #findItemsByKeywords only returns active listings listings["currency"][i] = itemi.sellingStatus.currentPrice \ .get("currencyId") listings["price"][i] = itemi.sellingStatus.currentPrice.text try: listings["shipping"][i] = itemi.shippingInfo \ .shippingServiceCost.text except AttributeError: pass #Type of listing: auction, fixed-price, unknown l_type = defaultdict(lambda: "unknown", {"Auction" : "auction", "AuctionWithBIN" : "auction", "FixedPrice" : "fixed-price", "StoreInventory" : "fixed-price" }) listings["type"][i] = l_type[itemi.listingInfo.listingType.text] time = dprs.parse(itemi.listingInfo.endTime.text) listings["time"][i] = time.replace(tzinfo=None) listings["location"][i] = itemi.location.text try: listings["postcode"][i] = itemi.postalCode.text except AttributeError: pass listings["country"][i] = itemi.country.text try: listings["condition"][i] = \ convert_ebay_condition(itemi.condition.conditionId.text) except AttributeError: pass listings["server"][i] = "Ebay-" + itemi.globalId.text listings["server_id"][i] = itemi.itemId.text listings["url_webui"][i] = itemi.viewItemURL.text #Create internal IDs - Ebay IDs are unique (except for variants) listings["id"] = "eb-" + listings["server_id"] # listings.to_csv("listings0.csv") # print listings return listings