def confirmSendIfApplicable(self, host, mobile, accountMobile, headers): # /df/auth/?mobile=18611164626&category=ershouqiche # /df/auth/?mobile=&category=ershouqiche&apply=1 # /c/ev/fabu_daifa_show # mobile:15300012344 # dfmobile:15300066914 # category:ershouqiche conn = httplib.HTTPConnection(host, timeout=10) path = '/c/ev/fabu_daifa_show?mobile=' + mobile + '&dfmobile=' + accountMobile + '&category=ershouqiche' conn.request("GET", path, headers=headers) res = conn.getresponse() html = self.decodeBody(res.getheaders(), res.read()) conn.close() logger.debug("path " + path + ", response =" + html) conn = httplib.HTTPConnection(host, timeout=10) path = '/df/auth?mobile=' + mobile + '&category=ershouqiche' conn.request("GET", path, headers=headers) res = conn.getresponse() html = self.decodeBody(res.getheaders(), res.read()) conn.close() if html.count(u'我们已用短信通知对方进行授权') == 0: path = '/df/auth?mobile=' + mobile + '&category=ershouqiche&apply=1' conn = httplib.HTTPConnection(host, timeout=10) conn.request("GET", path, headers=headers) res = conn.getresponse() html = self.decodeBody(res.getheaders(), res.read()) conn.close() logger.debug("send confirmed ")
import scraperwiki import lxml.html c = [ 135, 144, 87, 110, 151, 147, 138, 131, 127, 122, 117, 108, 102, 90, 57, 41, 13, 150, 143, 137, 130, 126, 121, 116, 107, 100, 85, 54, 34, 9, 149, 141, 134, 129, 124, 120, 111, 106, 99, 61, 49, 33, 7, 148, 140, 133, 128, 123, 118, 109, 103, 98, 58, 48, 14 ] b = 0 tipos = str nombre = str while b != 55: html = scraperwiki.scrape("http://funkalab.com/collections/" + str(c[b])) root = lxml.html.fromstring(html) tipos = html.count('data-placement') for el in root.cssselect("h1"): nombre = el.text video = html.count('class="label">Video') arte = html.count('class="label">Arte') musica = html.count('class="label">M') tec = html.count('class="label">Tec') diseno = html.count('class="label">Dis') scraperwiki.sqlite.save(unique_keys=['id'], data={ 'id': c[b], 'nombre': nombre, 'video': video, 'arte': arte, 'musica': musica, 'tec': tec,
# Main routine # ########################################################### scraperwiki.sqlite.execute("drop table if exists portcall") scraperwiki.sqlite.execute("drop table if exists port") scraperwiki.sqlite.execute("drop table if exists berth") scraperwiki.sqlite.execute("CREATE TABLE `portcall` (`id` integer PRIMARY KEY AUTOINCREMENT, `portin` datetime, `portout` datetime, `ship_name` text, `prev_port` text, `next_port` text, `berth_name` text, `tour_name` text)") scraperwiki.sqlite.execute("CREATE TABLE `port` (`port_name` text PRIMARY KEY, `longitude` float, `latitude` float)") portcall_id = 0; html = scraperwiki.scrape("http://www.city.kobe.lg.jp/culture/leisure/harbor/passenger/schedule/index.html") sjis_char = "\x87\x55" print "Roman numeral 2 appears " + str(html.count(sjis_char)) + " time(s)." root = lxml.html.fromstring(html.replace(sjis_char, "II")) for elmTRow in root.cssselect("table tbody tr"): elmTData = elmTRow.cssselect("td") DBG("====> elmTRow: " + lxml.html.tostring(elmTRow)) # 結合された列をスキップ if len(elmTData) <= 1: print len(elmTData) DBG("elmTRow : " + lxml.html.tostring(elmTRow)) DBG(elmTData[0].text_content().strip())
import scraperwiki import lxml.html c=[135,144,87,110,151,147,138,131,127,122,117,108,102,90,57,41,13,150,143,137,130,126,121,116,107,100,85,54,34,9,149,141,134,129,124,120,111,106,99,61,49,33,7,148,140,133,128,123,118,109,103,98,58,48,14] b=0 tipos=str nombre=str while b != 55: html=scraperwiki.scrape("http://funkalab.com/collections/"+str(c[b])) root=lxml.html.fromstring(html) tipos=html.count('data-placement') for el in root.cssselect("h1"): nombre=el.text video=html.count('class="label">Video') arte=html.count('class="label">Arte') musica=html.count('class="label">M') tec=html.count('class="label">Tec') diseno=html.count('class="label">Dis') scraperwiki.sqlite.save(unique_keys=['id'],data={'id':c[b],'nombre':nombre,'video':video,'arte':arte,'musica':musica,'tec':tec,'diseno':diseno,'tipos':tipos}) b=b+1 import scraperwiki import lxml.html c=[135,144,87,110,151,147,138,131,127,122,117,108,102,90,57,41,13,150,143,137,130,126,121,116,107,100,85,54,34,9,149,141,134,129,124,120,111,106,99,61,49,33,7,148,140,133,128,123,118,109,103,98,58,48,14] b=0 tipos=str nombre=str while b != 55: html=scraperwiki.scrape("http://funkalab.com/collections/"+str(c[b])) root=lxml.html.fromstring(html)
import scraperwiki import lxml.html a=[2369,2337,377,2364,1764] b=3 while b!=5: c=['brasil','usa','mexico','venezuela','argentina','colombia','chile','peru','puerto_rico','panama','elsalvador','uruguay','guatemala','ecuador','nicaragua','bolivia'] d=0 while d!=16: html = scraperwiki.scrape("http://amerpages.com/spa/"+str(c[d])+"/items/search/category:"+str(a[b])) root = lxml.html.fromstring(html) for el in root.cssselect("div.resultsInfo strong"): if el.text != 'Ordenar por:': categoria=int(el.text) f=html.count('listing featured') if d==0: brasilc=categoria brasilf=f if d==1: usac=categoria usaf=f if d==2: mexicoc=categoria mexicof=f if d==3: venezuelac=categoria venezuelaf=f if d==4: argentinac=categoria argentinaf=f if d==5: colombiac=categoria
def postVehicle(self, shareJob, parseResult, queryId): shareAccount = shareJob.get("share_account", None) vehicle = shareJob.get("vehicle", None) user = vehicle.get('user', None) address = user.get('address', None) spec = vehicle.get("spec", None) specDetail = shareJob.get("vehicle_spec_detail", None) externalVehicleSpec = shareJob.get('external_vehicle_spec') iQueryId = queryId.encode('utf8') if type( queryId) == unicode else queryId uri = '/fabu/' + iQueryId if (2 == len(parseResult)): return errorcode.LOGIC_ERROR, parseResult[1] html, heads = self.getPageHtml(parseResult['host'], uri) if html.count(u'您近期多次发布错类信息,目前账号被临时禁发两天'): return errorcode.SITE_ERROR, u'账号被临时禁发' self.setCookies(heads) ks = self.cookies.keys() cookie_list = [] for k in ks: cookie_list.append(k + '=' + self.cookies[k]) self.headers['Cookie'] = string.join(cookie_list, '; ') hxs = lxml.html.fromstring(html) token = hxs.xpath('//input[@name="token"]/@value')[0] accountMobile = hxs.xpath('//*[@id="id_contact"]/div/input/@value')[0] if shareAccount.get('account_type', None) == 'public': (contactName, mobile) = self.getContact(shareJob) else: (contactName, mobile) = self.getContact(shareJob) brand = str(externalVehicleSpec.get('brand').get('id')) includeTransferFee = vehicle.get("price").get( "quoted_price_include_transfer_fee", None) transferFee = '不包含' if includeTransferFee is not None and includeTransferFee: transferFee = '包含' registrationDate = vehicle.get("vehicle_date").get("registration_date") inspectionDate = vehicle.get("vehicle_date").get("inspection_date") commercialInsuranceExpireDate = vehicle.get("vehicle_date").get( "commercial_insurance_expire_date") if commercialInsuranceExpireDate is not None: commercialInsuranceExpireDate = commercialInsuranceExpireDate commercialInsuranceExpireDateYear = commercialInsuranceExpireDate.year commercialInsuranceExpireDateMonth = commercialInsuranceExpireDate.month else: commercialInsuranceExpireDate = inspectionDate commercialInsuranceExpireDateYear = inspectionDate.year commercialInsuranceExpireDateMonth = inspectionDate.month compulsoryInsuranceExpireDate = vehicle.get("vehicle_date").get( "compulsory_insurance_expire_date") if compulsoryInsuranceExpireDate is not None: compulsoryInsuranceExpireDate = compulsoryInsuranceExpireDate compulsoryInsuranceExpireDateYear = compulsoryInsuranceExpireDate.year compulsoryInsuranceExpireDateMonth = compulsoryInsuranceExpireDate.month else: compulsoryInsuranceExpireDate = inspectionDate compulsoryInsuranceExpireDateYear = inspectionDate.year compulsoryInsuranceExpireDateMonth = inspectionDate.month form = { 'token': str(token), 'wanted': '0', 'title': str(spec['brand']) + str(spec['series']) + str(spec['sale_name']), '车品牌': brand, '车系列': str(spec['brand']) + str(spec['series']), '车型': str(spec['series']) + str(spec['sale_name']), '类型': 'm177927', '年份[0]': str(registrationDate.year), '年份[1]': str(registrationDate.month), '行驶里程': str(Decimal(vehicle['summary']['mileage']) / Decimal(10000)), '价格': str(Decimal(self.getPrice(shareJob)) / Decimal(10000)), 'content': str(self.getContentVal_baixing(shareJob)), '地区[]': parseResult['cityCode'], 'contact': mobile, '车辆颜色': str(self.getColor(vehicle)), '排量': str(specDetail['details'][23]), '变速箱': str(specDetail['details'][42]), '燃油类型': str('汽油'), '排放标准': str(specDetail['details'][10]), '车辆用途': str('家用'), '年检[0]': str(inspectionDate.year), '年检[1]': str(inspectionDate.month), '交强险[0]': str(compulsoryInsuranceExpireDateYear), '交强险[1]': str(compulsoryInsuranceExpireDateMonth), '商业险[0]': str(commercialInsuranceExpireDateYear), '商业险[1]': str(commercialInsuranceExpireDateMonth), '登记证': str('齐全'), '能否过户': str('能'), '能否按揭': str('能'), '购置税': str(self.getBooleanText(vehicle, "document", "purchase_tax")), '行驶证': str( self.getBooleanText(vehicle, "document", "registration_certificate")), '购车发票': str('齐全'), # document.property '维修记录': str(self.getBooleanText(vehicle, "document", "maintenance_manual")), '重大事故': str(self.getBooleanText(vehicle, "summary", "accident")), '承担过户费': transferFee, 'skipKeyword': 1 } #photoList if iQueryId.decode('utf8').isnumeric(): photoList = hxs.xpath('//input[@name="images[]"]/@value') else: gallery = vehicle.get("gallery", None) if gallery is None: logger.error("gallery missing") return errorcode.LOGIC_ERROR, errormsg.PHOTO_NOT_ENOUGH imageConfig = hxs.xpath( '//*[@id="id_images"]/div/script/text()')[0] logger.debug("Baixing image config from xpath = " + imageConfig) imageConfig = imageConfig.replace('imageConfig = ', '') imageJson = json.loads(imageConfig) logger.debug("Baixing image config json = " + json.dumps(imageJson)) photoList = self.uploadPics(gallery.get("photos", []), imageJson) formData = urllib.urlencode(form) imgs = '' for img in photoList: imgs += "&images[]=" + urllib.quote(img) formData += '&images[]=' + imgs headers = copy.copy(self.headers) if mobile != accountMobile: self.confirmSendIfApplicable(parseResult['host'], mobile, accountMobile, headers) headers['Content-Type'] = 'application/x-www-form-urlencoded' headers['Content-Length'] = len(formData) conn = httplib.HTTPConnection(parseResult['host'], timeout=10) conn.request("POST", uri, formData, headers=headers) postResponse = conn.getresponse() postHeaders = postResponse.getheaders() status = postResponse.status postHtml = self.decodeBody(postHeaders, postResponse.read()) conn.close() logger.debug(postHtml) if status == 302: url = self.getViewUrl(parseResult['host'], postHeaders) return errorcode.SUCCESS, url else: if postHtml.count(u'分期贷款买车'): logger.debug(postHtml) return errorcode.SITE_ERROR, errormsg.VEHICLE_DUPLICATED if postHtml.count(u'重复信息'): logger.debug(postHtml) return errorcode.SITE_ERROR, errormsg.VEHICLE_DUPLICATED return errorcode.LOGIC_ERROR, errormsg.SITE_OTHER_ERROR
creados=int while b != 50: html=scraperwiki.scrape("http://funkalab.com/users/"+str(c[b])) root=lxml.html.fromstring(html) for el in root.cssselect("h1 small"): ciudad=el.text for el in root.cssselect("h3"): descrip=el.text for el in root.cssselect("span.label-info"): if i==1: label1=el.text i=i+1 else: label2=el.text i=1 creados=html.count('board-thumb') scraperwiki.sqlite.save(unique_keys=['id'],data={'id':c[b],'ciudad':ciudad,'descrip':descrip,'label1':label1,'label2':label2,'creados':creados}) b=b+1 import scraperwiki import lxml.html c=[5,6,14,20,27,29,37,43,50,92,96,99,101,102,103,108,113,114,115,116,118,119,149,153,159,165,170,171,172,177,178,190,204,207,230,249,253,256,270,279,302,308,316,4,15,18,26,31,32,162] b=0 label=str i=1 creados=int while b != 50: html=scraperwiki.scrape("http://funkalab.com/users/"+str(c[b])) root=lxml.html.fromstring(html) for el in root.cssselect("h1 small"): ciudad=el.text
creados = int while b != 50: html = scraperwiki.scrape("http://funkalab.com/users/" + str(c[b])) root = lxml.html.fromstring(html) for el in root.cssselect("h1 small"): ciudad = el.text for el in root.cssselect("h3"): descrip = el.text for el in root.cssselect("span.label-info"): if i == 1: label1 = el.text i = i + 1 else: label2 = el.text i = 1 creados = html.count('board-thumb') scraperwiki.sqlite.save(unique_keys=['id'], data={ 'id': c[b], 'ciudad': ciudad, 'descrip': descrip, 'label1': label1, 'label2': label2, 'creados': creados }) b = b + 1 import scraperwiki import lxml.html c = [ 5, 6, 14, 20, 27, 29, 37, 43, 50, 92, 96, 99, 101, 102, 103, 108, 113, 114,
def updateVehicle(self, shareJob): logger.debug("che168 update vehicle") #登陆模块: shareAccount = shareJob.get("share_account", None) if shareAccount is None: logger.error("get shareAccount failed") return errorcode.AUTH_ERROR, errormsg.LOGIN_FAIL if shareAccount.get('account_type', None) == 'public': shareAccount['username'] = publicAccount['username'] shareAccount['password'] = publicAccount['password'] #cookies = self.sessionServer.getSession('che168', shareAccount['username']) cookies = None if cookies is None: res = self.doLogin(shareAccount['username'], shareAccount['password']) if not res: logger.debug("login error") return errorcode.AUTH_ERROR, errormsg.LOGIN_FAIL self.sessionServer.setSession('che168', shareAccount['username'], self.cookies) else: self.cookies = cookies ks = self.cookies.keys() cookie_list = [] for k in ks: cookie_list.append(k + '=' + self.cookies[k]) self.headers['Cookie'] = string.join(cookie_list, '; ') #获取修改的车源url中id,以及post urlForApp = shareJob.get("url", None) if (urlForApp is not None) and (len(urlForApp) > 0): idInfo = re.compile("[0-9]{7}").findall(str(urlForApp)) if len(idInfo): id = idInfo[0] else: id = "" logger.debug("get dealerid failed in che168 update vehicle") #1.0 get请求修改 #http://dealer.che168.com/Handler/CarManager/CarOperate.ashx?action=getSaleDealerPrice&infoId=6402915&dealerId=216816 dealerid = str(self.cookies['2scDealerId']) editUrl = '/Handler/CarManager/CarOperate.ashx?action=getSaleDealerPrice&infoId=%s&dealerId=%s' % ( id, dealerid) logger.debug("get url:" + str(editUrl)) conn = httplib.HTTPConnection('dealer.che168.com', timeout=timeout_che168) headers = copy.copy(self.headers) conn.request("GET", editUrl, headers=headers) res = conn.getresponse() logger.debug("res:" + str(res)) carRes1 = res.read() #'{"success":0,"price":"0.0000","issale":0}' if res.status != 200: return errorcode.SITE_ERROR, 'che168 update failed' #获取验证码: validcode = '' self.setCookies(res.getheaders()) carRes = res.read() #.decode("GB18030") html = self.decodeBody(res.getheaders(), carRes) html = html.decode('GB18030') #html = res.read().decode('GB18030') if html.count(u'验证码') == 0: validcode = 'undefined' conn.close() else: dom = lxml.html.fromstring(html) checkCodeImageUrls = dom.xpath('.//span/img[@src]/@src') if len(checkCodeImageUrls) == 0: return False checkCodeImageUrl = checkCodeImageUrls[0] conn.close() headers = {} #self.headers headers['Host'] = 'dealer.che168.com' conn = httplib.HTTPConnection("dealer.che168.com", timeout=timeout_che168) conn.request("GET", checkCodeImageUrl, headers=headers) res = conn.getresponse() resHeader = res.getheaders() logger.debug("resHeader === " + str(resHeader)) self.setCookies(resHeader) imageData = res.read() conn.close() image = StringIO(imageData) captcha = self.getCaptcha(image, imageData) if captcha is None: return False validcode = captcha["text"] #2.0 post 发车 #http://dealer.che168.com/Handler/CarManager/CarOperate.ashx?dealerid=216816&action=setprice&status=1&infoid=6402915&price=3.30&buyname=&buyMobile=&sourcePic=undefined&vinPic=undefined&vinCode=undefined vehicle = shareJob.get('vehicle', None) if vehicle is None or vehicle == '': return errorcode.DATA_ERROR, u'缺少车辆信息' price = vehicle.get('price', None) if price is None or '' == price: return errorcode.DATA_ERROR, u'车辆价格为空' newPrice = price.get('quoted_price', None) if newPrice is None or '' == newPrice: return errorcode.DATA_ERROR, u'车辆价格为空' newPrice = Decimal(newPrice) / Decimal(10000) updateUrl = '/Handler/CarManager/CarOperate.ashx?dealerid=%s&action=setprice&status=1&infoid=%s&price=%s&buyname=&buyMobile=&sourcePic=undefined&vinPic=undefined&vinCode=%s' % ( dealerid, id, newPrice, validcode) conn = httplib.HTTPConnection('dealer.che168.com', timeout=timeout_che168) headers = copy.copy(self.headers) conn.request("GET", updateUrl, headers=headers) res = conn.getresponse() logger.debug("res:" + str(res)) if res.status != 200: return errorcode.SITE_ERROR, 'che68 update failed' return errorcode.SUCCESS, 'che168 update success'
def postVehicle(self, shareJob, externalSpec, title, part, photoList, salesid, username): publishUrl = '/car/publish' conn = httplib.HTTPConnection('dealers.che168.com', timeout=timeout_che168) headers = copy.copy(self.headers) conn.request("GET", publishUrl, headers=headers) res = conn.getresponse() logger.debug("res:" + str(res)) if res.status != 200: return errorcode.SITE_ERROR, errormsg.VEHICLE_REMOVE_FAIL validcode = '' self.setCookies(res.getheaders()) carRes = res.read() html = self.decodeBody(res.getheaders(), carRes) html = html.decode('GB18030') if html.count(u'验证码') == 0: validcode = 'undefined' conn.close() else: dom = lxml.html.fromstring(html) checkCodeImageUrls = dom.xpath( './/span[@class="num_pic m110"]/img/@src') if len(checkCodeImageUrls) == 0: return False checkCodeImageUrl = checkCodeImageUrls[0] conn.close() headers = {} #self.headers headers['Host'] = 'dealers.che168.com' conn = httplib.HTTPConnection("dealers.che168.com", timeout=timeout_che168) conn.request("GET", checkCodeImageUrl, headers=headers) res = conn.getresponse() resHeader = res.getheaders() logger.debug("resHeader === " + str(resHeader)) self.setCookies(resHeader) imageData = res.read() conn.close() image = StringIO(imageData) captcha = self.getCaptcha(image, imageData) if captcha is None: return False validcode = captcha["text"] vehicle = shareJob['vehicle'] price = vehicle.get('price', None) if price is None: logger.error('price missing') return errorcode.DATA_ERROR, errormsg.PRICE_EMPTY spec_details = shareJob['vehicle_spec_detail']['details'] #brandid = specDetail['brandId'] brandid = externalSpec['brand']['id'] #seriesid = specDetail['seriesId'] seriesid = externalSpec['series']['id'] specid = externalSpec['model']['id'] form = "infoid=0" form += "&carname=" + urllib.quote(urllib.quote(title.encode('utf-8'))) form += "&brandid=" + str(brandid) form += "&seriesid=" + str(seriesid) form += "&specid=" + str(specid) #TODO:options 是什么? form += "&options=" + self.getOptions(specid) form += "&displa=" + spec_details[23] #part['displa'] form += "&gearbos=" + self.getGearbox(spec_details).encode( "unicode_escape").upper().replace("\\U", "%u") #是否包含过户费: quoted_price_include_transfer_fee = price.get( 'quoted_price_include_transfer_fee', True) if quoted_price_include_transfer_fee: form += "&iscontainfee=1" else: form += "&iscontainfee=0" #form += "&iscontainfee=0" #vincode:没有vin码就不需要字段 #vin码和行驶证同时有或者同时无 summary = vehicle.get('summary', None) # drivingLicenseUrl = summary.get('driving_license_picture', None) vin_picture_url = summary.get("vin_picture", None) vincode = vehicle.get("vin", None) if (vincode is None) or (vin_picture_url is None) or ('' == vin_picture_url): pass else: form += "&vincode=" + vincode #行驶证换为vin照片 picList = [] if vin_picture_url is not None: picList = self.uploadLicensePic(vin_picture_url) if len(vin_picture_url): photo = picList[0]['msg'] else: photo = "" form += "&xs_certify=" + photo MerchantSubstituteConfig = shareJob.get('merchant_substitute_config', None) if MerchantSubstituteConfig is not None: merchant_summary = MerchantSubstituteConfig.get('summary', None) if merchant_summary is not None: #质保时间 QualityAssDate = merchant_summary.get('quality_assurance_time', None) if QualityAssDate is not None: form += "&QualityAssDate=" + str(int(QualityAssDate)) #质保公里 QualityAssMile = merchant_summary.get('quality_assurance_mile', None) if QualityAssMile is not None: form += "&QualityAssMile=" + str(QualityAssMile / 10000.0) form += "&price=" + str( Decimal(self.getPrice(shareJob)) / Decimal(10000)) form += "&mileage=" + str( Decimal(vehicle['summary']['mileage']) / Decimal(10000)) form += "&pid=" + str( shareJob['vehicle']['merchant']['address']['province_code']) form += "&cid=" + str( shareJob['vehicle']['merchant']['address']['city_code']) '''share 1: "registration_date", 2: "inspection_date", 3: "commercial_insurance_expire_date", 4: "compulsory_insurance_expire_date" ''' registedate = self.getDate(shareJob, 1) if registedate is None: form += "®istedate=" + "2016-6" else: form += "®istedate=" + str(registedate.year) + "-" + str( registedate.month) Examine = self.getDate(shareJob, 2) if Examine is None: form += "&Examine=" + "2016-6" else: form += "&Examine=" + str(Examine.year) + '-' + str(Examine.month) Insurance = self.getDate(shareJob, 3) if Insurance is None: form += "&Insurance=" + str(Examine.year) + "-" + str( Examine.month) else: form += "&Insurance=" + str(Insurance.year) + "-" + str( Insurance.month) #FIXME:Taxtime只有年??? Taxtime = self.getDate(shareJob, 4) if Taxtime is None: form += "&Taxtime=" + str(Examine.year) else: form += "&Taxtime=" + str(Taxtime.year) form += "&TransferTimes=" + str(self.getTradeTimes(shareJob)) form += "&CarUse=1" form += "&colorcode=" + self.getColorCode(shareJob) form += "&linkman=" + urllib.quote(username.encode('utf-8')) form += "&linkmanid=" + str(salesid) #TODO fix bug # a = '刘先生(020-62644497)' # form += "&linkman="+urllib.quote(a) # form += "&linkmanid=118468" Symbol = "\r\n" lateral = "——" * 23 form += "&remark=" + urllib.quote( self.getContentVal(shareJob, Symbol, lateral).encode('utf-8')) #TODO:certificateType什么意思? form += "&CertificateType=0" form += "&pictures=" + self.makePhotos(photoList) #form += "&examinepics=" form += "&fromType=0" form += "&fueltype=1" form += "&isFreeCheckCar=0" form += "&validcode=" + validcode # form += "&isSelectedPromise=0" # form += "&isvalidvincode=0" # form += "&saleDealerPrice=" # form += "&vincode=LGBG22E047Y106663" # form += "&xs_certify=/escimg/g8/M01/32/66/autohomecar__wKjBz1aTloyAb5OfAAGPKOvQxEs632.jpg" logger.debug(form) conn = httplib.HTTPConnection("dealers.che168.com", timeout=timeout_che168) headers = copy.copy(self.headers) headers['Content-Length'] = len(form) headers[ 'Content-Type'] = 'application/x-www-form-urlencoded; charset=UTF-8' conn.request("POST", "/Handler/CarManager/SaleCar.ashx", form, headers=headers) res = conn.getresponse() carRes = res.read() #.decode("GB18030") result = self.decodeBody(res.getheaders(), res.read()) logger.debug(carRes) carRes = carRes.split('|') if (len(carRes) == 2) and (carRes[0] == '0'): url = "http://www.che168.com/dealer/" + str( self.cookies['2scDealerId']) + "/" + carRes[1] + ".html" return True, url return False, "carRes"