def test_fromcsv_file(): path = os.path.join( 'special_snowflake', 'test', 'fixtures', 'carte-des-licencies-sportifs-dans-les-hauts-de-seine.csv') with open(path, 'r') as fp: observed = fromcsv(fp, delimiter=';', n_columns=1) n.assert_set_equal(observed, set())
def test_sites(): def fake_get(): raise AssertionError('This should not be run.') d = os.path.join('craigsgenerator','test','fixtures','sites') observed = sites(get = fake_get, cachedir = d) expected = {'cosprings.craigslist.org', 'virgin.craigslist.org', 'albanyga.craigslist.org', 'cebu.craigslist.com.ph', 'zamboanga.craigslist.com.ph', 'xian.craigslist.com.cn', 'hiltonhead.craigslist.org', 'mazatlan.craigslist.com.mx', 'greensboro.craigslist.org', 'cairo.craigslist.org', 'marseilles.craigslist.org', 'florence.craigslist.it', 'ntl.craigslist.com.au', 'kenai.craigslist.org', 'detroit.craigslist.org', 'alicante.craigslist.es', 'ocala.craigslist.org', 'dunedin.craigslist.co.nz', 'tijuana.craigslist.com.mx', 'morgantown.craigslist.org', 'texoma.craigslist.org', 'bulgaria.craigslist.org', 'visalia.craigslist.org', 'soo.craigslist.ca', 'bajasur.craigslist.com.mx', 'nanjing.craigslist.com.cn', 'terrehaute.craigslist.org', 'york.craigslist.org', 'adelaide.craigslist.com.au', 'lancaster.craigslist.org', 'mexicocity.craigslist.com.mx', 'tucson.craigslist.org', 'colombia.craigslist.org', 'delrio.craigslist.org', 'ames.craigslist.org', 'hampshire.craigslist.co.uk', 'johannesburg.craigslist.co.za', 'quincy.craigslist.org', 'lubbock.craigslist.org', 'frankfurt.craigslist.de', 'valdosta.craigslist.org', 'surat.craigslist.co.in', 'ukraine.craigslist.org', 'kokomo.craigslist.org', 'bloomington.craigslist.org', 'yakima.craigslist.org', 'swks.craigslist.org', 'glensfalls.craigslist.org', 'yellowknife.craigslist.ca', 'delaware.craigslist.org', 'melbourne.craigslist.com.au', 'cornwall.craigslist.ca', 'malaga.craigslist.es', 'iowacity.craigslist.org', 'amarillo.craigslist.org', 'siouxfalls.craigslist.org', 'moseslake.craigslist.org', 'okaloosa.craigslist.org', 'kpr.craigslist.org', 'salem.craigslist.org', 'semo.craigslist.org', 'worcester.craigslist.org', 'loz.craigslist.org', 'chengdu.craigslist.com.cn', 'nwga.craigslist.org', 'modesto.craigslist.org', 'hiroshima.craigslist.jp', 'dublin.craigslist.org', 'kamloops.craigslist.ca', 'stuttgart.craigslist.de', 'christchurch.craigslist.org', 'mankato.craigslist.org', 'forums.craigslist.org', 'fortcollins.craigslist.org', 'greatfalls.craigslist.org', 'guatemala.craigslist.org', 'buenosaires.craigslist.org', 'ahmedabad.craigslist.co.in', 'rochester.craigslist.org', 'greenbay.craigslist.org', 'addisababa.craigslist.org', 'decatur.craigslist.org', 'albany.craigslist.org', 'venice.craigslist.it', 'raleigh.craigslist.org', 'redding.craigslist.org', 'pv.craigslist.com.mx', 'slo.craigslist.org', 'liverpool.craigslist.co.uk', 'showlow.craigslist.org', 'stgeorge.craigslist.org', 'up.craigslist.org', 'belohorizonte.craigslist.org', 'dubai.craigslist.org', 'fingerlakes.craigslist.org', 'tricities.craigslist.org', 'rockies.craigslist.org', 'windsor.craigslist.ca', 'bologna.craigslist.it', 'vancouver.craigslist.ca', 'eugene.craigslist.org', 'pullman.craigslist.org', 'sevilla.craigslist.es', 'centralmich.craigslist.org', 'topeka.craigslist.org', 'belleville.craigslist.ca', 'sudbury.craigslist.ca', 'anchorage.craigslist.org', 'boston.craigslist.org', 'spacecoast.craigslist.org', 'brownsville.craigslist.org', 'westmd.craigslist.org', 'maine.craigslist.org', 'sapporo.craigslist.jp', 'dusseldorf.craigslist.de', 'naga.craigslist.com.ph', 'ithaca.craigslist.org', 'meadville.craigslist.org', 'caribbean.craigslist.org', 'tokyo.craigslist.jp', 'myrtlebeach.craigslist.org', 'roswell.craigslist.org', 'stlouis.craigslist.org', 'baltimore.craigslist.org', 'orangecounty.craigslist.org', 'galveston.craigslist.org', 'reddeer.craigslist.ca', 'wollongong.craigslist.com.au', 'staugustine.craigslist.org', 'seoul.craigslist.co.kr', 'genoa.craigslist.it', 'bangkok.craigslist.co.th', 'tippecanoe.craigslist.org', 'portoalegre.craigslist.org', 'porthuron.craigslist.org', 'newjersey.craigslist.org', 'philadelphia.craigslist.org', 'quadcities.craigslist.org', 'iloilo.craigslist.com.ph', 'lasalle.craigslist.org', 'fayetteville.craigslist.org', 'seks.craigslist.org', 'kaiserslautern.craigslist.de', 'owensound.craigslist.ca', 'faro.craigslist.pt', 'hattiesburg.craigslist.org', 'beirut.craigslist.org', 'fortaleza.craigslist.org', 'harrisburg.craigslist.org', 'sierravista.craigslist.org', 'abilene.craigslist.org', 'huntsville.craigslist.org', 'sheffield.craigslist.co.uk', 'chihuahua.craigslist.com.mx', 'lynchburg.craigslist.org', 'columbus.craigslist.org', 'winchester.craigslist.org', 'southbend.craigslist.org', 'princegeorge.craigslist.ca', 'atlanta.craigslist.org', 'tehran.craigslist.org', 'cookeville.craigslist.org', 'wichitafalls.craigslist.org', 'nanaimo.craigslist.ca', 'norfolk.craigslist.org', 'ogden.craigslist.org', 'lethbridge.craigslist.ca', 'wv.craigslist.org', 'lincoln.craigslist.org', 'columbusga.craigslist.org', 'duluth.craigslist.org', 'smd.craigslist.org', 'charleston.craigslist.org', 'prescott.craigslist.org', 'pakistan.craigslist.org', 'comoxvalley.craigslist.ca', 'pei.craigslist.ca', 'victoriatx.craigslist.org', 'elpaso.craigslist.org', 'jacksontn.craigslist.org', 'okinawa.craigslist.jp', 'richmond.craigslist.org', 'edmonton.craigslist.ca', 'lyon.craigslist.org', 'newbrunswick.craigslist.ca', 'clovis.craigslist.org', 'louisville.craigslist.org', 'bordeaux.craigslist.org', 'minneapolis.craigslist.org', 'sanangelo.craigslist.org', 'jackson.craigslist.org', 'aberdeen.craigslist.co.uk', 'puebla.craigslist.com.mx', 'stockholm.craigslist.se', 'newlondon.craigslist.org', 'eastoregon.craigslist.org', 'monterrey.craigslist.com.mx', 'jerusalem.craigslist.org', 'eastnc.craigslist.org', 'cardiff.craigslist.co.uk', 'klamath.craigslist.org', 'casablanca.craigslist.org', 'montpellier.craigslist.org', 'canarias.craigslist.es', 'olympic.craigslist.org', 'humboldt.craigslist.org', 'dundee.craigslist.co.uk', 'santafe.craigslist.org', 'milwaukee.craigslist.org', 'burlington.craigslist.org', 'norwich.craigslist.co.uk', 'cairns.craigslist.com.au', 'lakeland.craigslist.org', 'istanbul.craigslist.com.tr', 'kenya.craigslist.org', 'bath.craigslist.co.uk', 'madrid.craigslist.es', 'madison.craigslist.org', 'fredericksburg.craigslist.org', 'williamsport.craigslist.org', 'abbotsford.craigslist.ca', 'scottsbluff.craigslist.org', 'moscow.craigslist.org', 'wilmington.craigslist.org', 'eastco.craigslist.org', 'athensohio.craigslist.org', 'southcoast.craigslist.org', 'bgky.craigslist.org', 'panamacity.craigslist.org', 'nh.craigslist.org', 'bern.craigslist.ch', 'phoenix.craigslist.org', 'skagit.craigslist.org', 'essen.craigslist.de', 'boulder.craigslist.org', 'hamilton.craigslist.ca', 'bham.craigslist.org', 'toulouse.craigslist.org', 'helena.craigslist.org', 'newhaven.craigslist.org', 'appleton.craigslist.org', 'chennai.craigslist.co.in', 'holland.craigslist.org', 'rio.craigslist.org', 'kalispell.craigslist.org', 'bhubaneswar.craigslist.co.in', 'strasbourg.craigslist.org', 'prague.craigslist.cz', 'athens.craigslist.gr', 'grandrapids.craigslist.org', 'bacolod.craigslist.com.ph', 'onslow.craigslist.org', 'helsinki.craigslist.fi', 'orlando.craigslist.org', 'davaocity.craigslist.com.ph', 'shreveport.craigslist.org', 'canberra.craigslist.com.au', 'newyork.craigslist.org', 'guanajuato.craigslist.com.mx', 'odessa.craigslist.org', 'dothan.craigslist.org', 'chautauqua.craigslist.org', 'amsterdam.craigslist.org', 'wheeling.craigslist.org', 'northmiss.craigslist.org', 'porto.craigslist.pt', 'baghdad.craigslist.org', 'thumb.craigslist.org', 'annapolis.craigslist.org', 'mobile.craigslist.org', 'springfield.craigslist.org', 'santiago.craigslist.org', 'chattanooga.craigslist.org', 'kalamazoo.craigslist.org', 'portland.craigslist.org', 'accra.craigslist.org', 'capecod.craigslist.org', 'danville.craigslist.org', 'durban.craigslist.co.za', 'charlotte.craigslist.org', 'dalian.craigslist.com.cn', 'cologne.craigslist.de', 'oaxaca.craigslist.com.mx', 'jacksonville.craigslist.org', 'wuhan.craigslist.com.cn', 'cariboo.craigslist.ca', 'mansfield.craigslist.org', 'brussels.craigslist.org', 'eauclaire.craigslist.org', 'hobart.craigslist.com.au', 'buffalo.craigslist.org', 'waco.craigslist.org', 'yuma.craigslist.org', 'allentown.craigslist.org', 'geneva.craigslist.ch', 'desmoines.craigslist.org', 'fairbanks.craigslist.org', 'sherbrooke.craigslist.ca', 'memphis.craigslist.org', 'bakersfield.craigslist.org', 'jakarta.craigslist.org', 'mumbai.craigslist.co.in', 'sendai.craigslist.jp', 'southjersey.craigslist.org', 'merced.craigslist.org', 'quebec.craigslist.ca', 'peterborough.craigslist.ca', 'brasilia.craigslist.org', 'medford.craigslist.org', 'bend.craigslist.org', 'longisland.craigslist.org', 'tuscaloosa.craigslist.org', 'nwks.craigslist.org', 'montevideo.craigslist.org', 'vienna.craigslist.at', 'sicily.craigslist.it', 'london.craigslist.co.uk', 'pensacola.craigslist.org', 'newcastle.craigslist.co.uk', 'nottingham.craigslist.co.uk', 'bremen.craigslist.de', 'copenhagen.craigslist.org', 'oregoncoast.craigslist.org', 'chongqing.craigslist.com.cn', 'chicago.craigslist.org', 'springfieldil.craigslist.org', 'veracruz.craigslist.com.mx', 'roseburg.craigslist.org', 'chambana.craigslist.org', 'florencesc.craigslist.org', 'washingtondc.craigslist.org', 'granada.craigslist.es', 'poconos.craigslist.org', 'winnipeg.craigslist.ca', 'berlin.craigslist.de', 'peace.craigslist.ca', 'cincinnati.craigslist.org', 'savannah.craigslist.org', 'reading.craigslist.org', 'stcloud.craigslist.org', 'kirksville.craigslist.org', 'gulfport.craigslist.org', 'meridian.craigslist.org', 'hyderabad.craigslist.co.in', 'bristol.craigslist.co.uk', 'lawrence.craigslist.org', 'tunis.craigslist.org', 'flagstaff.craigslist.org', 'recife.craigslist.org', 'missoula.craigslist.org', 'stpetersburg.craigslist.org', 'sheboygan.craigslist.org', 'chatham.craigslist.ca', 'rennes.craigslist.org', 'masoncity.craigslist.org', 'evansville.craigslist.org', 'monterey.craigslist.org', 'sanmarcos.craigslist.org', 'regina.craigslist.ca', 'elsalvador.craigslist.org', 'enid.craigslist.org', 'imperial.craigslist.org', 'quito.craigslist.org', 'troisrivieres.craigslist.ca', 'fortdodge.craigslist.org', 'edinburgh.craigslist.co.uk', 'sarnia.craigslist.ca', 'annarbor.craigslist.org', 'logan.craigslist.org', 'lakecharles.craigslist.org', 'lewiston.craigslist.org', 'warsaw.craigslist.pl', 'sfbay.craigslist.org', 'milan.craigslist.it', 'barcelona.craigslist.es', 'sydney.craigslist.com.au', 'janesville.craigslist.org', 'lisbon.craigslist.pt', 'cotedazur.craigslist.org', 'zurich.craigslist.ch', 'dubuque.craigslist.org', 'pune.craigslist.co.in', 'glasgow.craigslist.co.uk', 'zagreb.craigslist.org', 'shanghai.craigslist.com.cn', 'ottumwa.craigslist.org', 'indore.craigslist.co.in', 'potsdam.craigslist.org', 'hongkong.craigslist.hk', 'auckland.craigslist.org', 'westpalmbeach.craigslist.org', 'cdo.craigslist.com.ph', 'ottawa.craigslist.ca', 'telaviv.craigslist.org', 'pretoria.craigslist.co.za', 'rapidcity.craigslist.org', 'carbondale.craigslist.org', 'yubasutter.craigslist.org', 'cenla.craigslist.org', 'basel.craigslist.ch', 'fortmyers.craigslist.org', 'susanville.craigslist.org', 'omaha.craigslist.org', 'guangzhou.craigslist.com.cn', 'jerseyshore.craigslist.org', 'cranbrook.craigslist.ca', 'bilbao.craigslist.es', 'lapaz.craigslist.org', 'yucatan.craigslist.com.mx', 'ashtabula.craigslist.org', 'youngstown.craigslist.org', 'managua.craigslist.org', 'sacramento.craigslist.org', 'londonon.craigslist.ca', 'eastidaho.craigslist.org', 'altoona.craigslist.org', 'hudsonvalley.craigslist.org', 'twinfalls.craigslist.org', 'rouen.craigslist.org', 'saginaw.craigslist.org', 'dayton.craigslist.org', 'scranton.craigslist.org', 'rome.craigslist.it', 'valencia.craigslist.es', 'nagoya.craigslist.jp', 'jaipur.craigslist.co.in', 'elko.craigslist.org', 'billings.craigslist.org', 'boise.craigslist.org', 'mohave.craigslist.org', 'mcallen.craigslist.org', 'tallahassee.craigslist.org', 'butte.craigslist.org', 'wausau.craigslist.org', 'pennstate.craigslist.org', 'heidelberg.craigslist.de', 'manchester.craigslist.co.uk', 'bigbend.craigslist.org', 'collegestation.craigslist.org', 'peoria.craigslist.org', 'cedarrapids.craigslist.org', 'daytona.craigslist.org', 'montreal.craigslist.ca', 'spokane.craigslist.org', 'acapulco.craigslist.com.mx', 'calgary.craigslist.ca', 'kolkata.craigslist.co.in', 'goldcoast.craigslist.com.au', 'outerbanks.craigslist.org', 'lafayette.craigslist.org', 'saopaulo.craigslist.org', 'bangladesh.craigslist.org', 'juneau.craigslist.org', 'easttexas.craigslist.org', 'victoria.craigslist.ca', 'swmi.craigslist.org', 'stillwater.craigslist.org', 'fukuoka.craigslist.jp', 'mattoon.craigslist.org', 'utica.craigslist.org', 'vietnam.craigslist.org', 'tuscarawas.craigslist.org', 'clarksville.craigslist.org', 'bn.craigslist.org', 'hickory.craigslist.org', 'fresno.craigslist.org', 'hanford.craigslist.org', 'jxn.craigslist.org', 'malaysia.craigslist.org', 'whitehorse.craigslist.ca', 'hartford.craigslist.org', 'wenatchee.craigslist.org', 'huntington.craigslist.org', 'westky.craigslist.org', 'brantford.craigslist.ca', 'ventura.craigslist.org', 'toronto.craigslist.ca', 'delhi.craigslist.co.in', 'brisbane.craigslist.com.au', 'luxembourg.craigslist.org', 'inlandempire.craigslist.org', 'stjoseph.craigslist.org', 'manila.craigslist.com.ph', 'capetown.craigslist.co.za', 'sd.craigslist.org', 'frederick.craigslist.org', 'darwin.craigslist.com.au', 'lansing.craigslist.org', 'perth.craigslist.com.au', 'caracas.craigslist.org', 'derby.craigslist.co.uk', 'lexington.craigslist.org', 'grandforks.craigslist.org', 'reykjavik.craigslist.org', 'oslo.craigslist.org', 'houston.craigslist.org', 'santodomingo.craigslist.org', 'grenoble.craigslist.org', 'essex.craigslist.co.uk', 'munich.craigslist.de', 'chandigarh.craigslist.co.in', 'grandisland.craigslist.org', 'brainerd.craigslist.org', 'charlestonwv.craigslist.org', 'taipei.craigslist.com.tw', 'athensga.craigslist.org', 'northplatte.craigslist.org', 'westernmass.craigslist.org', 'battlecreek.craigslist.org', 'nesd.craigslist.org', 'auburn.craigslist.org', 'costarica.craigslist.org', 'www.craigslist.org', 'cadiz.craigslist.es', 'tulsa.craigslist.org', 'richmondin.craigslist.org', 'salina.craigslist.org', 'sunshine.craigslist.ca', 'nd.craigslist.org', 'bangalore.craigslist.co.in', 'honolulu.craigslist.org', 'skeena.craigslist.ca', 'chambersburg.craigslist.org', 'providence.craigslist.org', 'catskills.craigslist.org', 'hermosillo.craigslist.com.mx', 'charlottesville.craigslist.org', 'indianapolis.craigslist.org', 'farmington.craigslist.org', 'bozeman.craigslist.org', 'guelph.craigslist.ca', 'oxford.craigslist.co.uk', 'baleares.craigslist.es', 'juarez.craigslist.com.mx', 'martinsburg.craigslist.org', 'lascruces.craigslist.org', 'kent.craigslist.co.uk', 'belfast.craigslist.co.uk', 'barrie.craigslist.ca', 'beaumont.craigslist.org', 'texarkana.craigslist.org', 'waterloo.craigslist.org', 'nmi.craigslist.org', 'sarasota.craigslist.org', 'kansascity.craigslist.org', 'northernwi.craigslist.org', 'kelowna.craigslist.ca', 'thunderbay.craigslist.ca', 'columbia.craigslist.org', 'kuwait.craigslist.org', 'torino.craigslist.it', 'cambridge.craigslist.co.uk', 'osaka.craigslist.jp', 'devon.craigslist.co.uk', 'monroe.craigslist.org', 'santabarbara.craigslist.org', 'montana.craigslist.org', 'oneonta.craigslist.org', 'panama.craigslist.org', 'montgomery.craigslist.org', 'hangzhou.craigslist.com.cn', 'leeds.craigslist.co.uk', 'miami.craigslist.org', 'fortlauderdale.craigslist.org', 'macon.craigslist.org', 'goa.craigslist.co.in', 'guadalajara.craigslist.com.mx', 'micronesia.craigslist.org', 'batonrouge.craigslist.org', 'niagara.craigslist.ca', 'blacksburg.craigslist.org', 'elmira.craigslist.org', 'fortwayne.craigslist.org', 'eastky.craigslist.org', 'kerala.craigslist.co.in', 'saguenay.craigslist.ca', 'twintiers.craigslist.org', 'lausanne.craigslist.ch', 'rmn.craigslist.org', 'palmsprings.craigslist.org', 'gadsden.craigslist.org', 'bismarck.craigslist.org', 'monroemi.craigslist.org', 'halifax.craigslist.ca', 'lasvegas.craigslist.org', 'columbiamo.craigslist.org', 'greenville.craigslist.org', 'akroncanton.craigslist.org', 'provo.craigslist.org', 'owensboro.craigslist.org', 'treasure.craigslist.org', 'siskiyou.craigslist.org', 'killeen.craigslist.org', 'tampa.craigslist.org', 'easternshore.craigslist.org', 'wyoming.craigslist.org', 'cleveland.craigslist.org', 'fayar.craigslist.org', 'corvallis.craigslist.org', 'saskatoon.craigslist.ca', 'sandusky.craigslist.org', 'zanesville.craigslist.org', 'singapore.craigslist.com.sg', 'nashville.craigslist.org', 'hannover.craigslist.de', 'lucknow.craigslist.co.in', 'littlerock.craigslist.org', 'brunswick.craigslist.org', 'saltlakecity.craigslist.org', 'wichita.craigslist.org', 'fortsmith.craigslist.org', 'swva.craigslist.org', 'leipzig.craigslist.de', 'loire.craigslist.org', 'ftmcmurray.craigslist.ca', 'chico.craigslist.org', 'bellingham.craigslist.org', 'fargo.craigslist.org', 'cnj.craigslist.org', 'cfl.craigslist.org', 'watertown.craigslist.org', 'laredo.craigslist.org', 'newfoundland.craigslist.ca', 'augusta.craigslist.org', 'mendocino.craigslist.org', 'coventry.craigslist.co.uk', 'pueblo.craigslist.org', 'lacrosse.craigslist.org', 'binghamton.craigslist.org', 'boone.craigslist.org', 'csd.craigslist.org', 'lakecity.craigslist.org', 'albuquerque.craigslist.org', 'budapest.craigslist.org', 'shenzhen.craigslist.com.cn', 'naples.craigslist.it', 'asheville.craigslist.org', 'ksu.craigslist.org', 'dallas.craigslist.org', 'wellington.craigslist.org', 'beijing.craigslist.com.cn', 'losangeles.craigslist.org', 'harrisonburg.craigslist.org', 'sardinia.craigslist.it', 'muskegon.craigslist.org', 'pampanga.craigslist.com.ph', 'gainesville.craigslist.org', 'sanantonio.craigslist.org', 'ramallah.craigslist.org', 'chillicothe.craigslist.org', 'goldcountry.craigslist.org', 'territories.craigslist.ca', 'hat.craigslist.ca', 'toledo.craigslist.org', 'flint.craigslist.org', 'lawton.craigslist.org', 'muncie.craigslist.org', 'statesboro.craigslist.org', 'erie.craigslist.org', 'limaohio.craigslist.org', 'corpuschristi.craigslist.org', 'parkersburg.craigslist.org', 'pittsburgh.craigslist.org', 'racine.craigslist.org', 'oklahomacity.craigslist.org', 'bucharest.craigslist.org', 'plattsburgh.craigslist.org', 'shenyang.craigslist.com.cn', 'dresden.craigslist.de', 'sandiego.craigslist.org', 'bemidji.craigslist.org', 'stockton.craigslist.org', 'perugia.craigslist.it', 'seattle.craigslist.org', 'curitiba.craigslist.org', 'santamaria.craigslist.org', 'keys.craigslist.org', 'kitchener.craigslist.ca', 'siouxcity.craigslist.org', 'hamburg.craigslist.de', 'denver.craigslist.org', 'houma.craigslist.org', 'rockford.craigslist.org', 'marshall.craigslist.org', 'swv.craigslist.org', 'eastmids.craigslist.co.uk', 'nacogdoches.craigslist.org', 'neworleans.craigslist.org', 'brighton.craigslist.co.uk', 'salvador.craigslist.org', 'natchez.craigslist.org', 'lima.craigslist.org', 'syracuse.craigslist.org', 'nwct.craigslist.org', 'paris.craigslist.org', 'joplin.craigslist.org', 'whistler.craigslist.ca', 'kingston.craigslist.ca', 'nuremberg.craigslist.de', 'roanoke.craigslist.org', 'reno.craigslist.org', 'birmingham.craigslist.co.uk', 'westslope.craigslist.org', 'lille.craigslist.org', 'haifa.craigslist.org', 'winstonsalem.craigslist.org', 'knoxville.craigslist.org', 'puertorico.craigslist.org', 'austin.craigslist.org', 'jonesboro.craigslist.org', 'shoals.craigslist.org'} assert_set_equal(observed, expected)
def check_files_applying_sort(ref_file,test_file,infmt,outfmt): opener, args = _reader_funcs[outfmt] ref_transcripts = sorted(list(opener(open(ref_file),**args))) test_transcripts = sorted(list(opener(open(test_file),**args))) assert_equal(len(ref_transcripts),len(test_transcripts),"%s to %s: Length mismatch in discovered transcripts. Expected '%s'. Found '%s'" % (infmt,outfmt,len(ref_transcripts), len(test_transcripts))) for tx1, tx2 in zip(ref_transcripts,test_transcripts): assert_equal(tx1.get_name(),tx2.get_name(),"%s to %s: Found unordered transcripts. Expected '%s'. Found '%s"'' %(infmt,outfmt,tx1.get_name(),tx2.get_name())) set1 = tx1.get_position_set() set2 = tx2.get_position_set() assert_set_equal(set1,set2,"%s to %s: Difference in position sets. Expected '%s'. Found '%s'" % (infmt,outfmt,set1,set2)) ref_score = str(_default_scores[(infmt,outfmt)]) found_score = str(tx2.attr["score"]) assert_equal(found_score,ref_score,"%s to %s: Did not find expected score. Expected: '%s'. Found '%s'" % (infmt,outfmt,ref_score,found_score)) # BED preserves fewer attributes than GTF, so we can only test on common keys # We exclude "gene_id" for BEd and BigBed input because this will not match # We exclude "score" because we already tested it # # by testing attr we are also implicitly testing cds_genome_end and cds_genome_start attr1 = tx1.attr attr2 = tx2.attr keyset = set(attr1.keys()) & set(attr2.keys()) - { "score" } if infmt in ("BED","BigBed") and outfmt != "BED": keyset -= { "gene_id" } for k in keyset: assert_equal(attr1[k],attr2[k],"%s to %s: Difference in attribute %s. Expected '%s'. Found '%s'" % (infmt,outfmt,k,attr1[k],attr2[k]))
def test_resume_load_incomplete(): """backends.json._resume: loads incomplete results. Because resume, aggregate, and summary all use the function called _resume we can't remove incomplete tests here. It's probably worth doing a refactor to split some code out and allow this to be done in the resume path. """ with utils.tempdir() as f: backend = backends.json.JSONBackend(f) backend.initialize(BACKEND_INITIAL_META) with backend.write_test("group1/test1") as t: t(results.TestResult('fail')) with backend.write_test("group1/test2") as t: t(results.TestResult('pass')) with backend.write_test("group2/test3") as t: t(results.TestResult('crash')) with backend.write_test("group2/test4") as t: t(results.TestResult('incomplete')) test = backends.json._resume(f) nt.assert_set_equal( set(test.tests.keys()), set([ 'group1/test1', 'group1/test2', 'group2/test3', 'group2/test4' ]), )
def test_read_refs_into_cache_set_associative_lru(self): """read_refs_into_cache should work for set associative LRU cache""" refs = sim.get_addr_refs( word_addrs=TestReadRefs.WORD_ADDRS, num_addr_bits=8, num_tag_bits=5, num_index_bits=2, num_offset_bits=1) cache, ref_statuses = sim.read_refs_into_cache( refs=refs, num_sets=4, num_blocks_per_set=3, num_words_per_block=2, num_index_bits=2, replacement_policy='lru') nose.assert_dict_equal(cache, { '00': [ {'tag': '01011', 'data': [88, 89]} ], '01': [ {'tag': '00000', 'data': [2, 3]}, {'tag': '00101', 'data': [42, 43]}, {'tag': '10111', 'data': [186, 187]} ], '10': [ {'tag': '10110', 'data': [180, 181]}, {'tag': '00101', 'data': [44, 45]}, {'tag': '11111', 'data': [252, 253]} ], '11': [ {'tag': '10111', 'data': [190, 191]}, {'tag': '00001', 'data': [14, 15]}, ] }) nose.assert_set_equal(self.get_hits(ref_statuses), {3, 6, 8})
def test_listdir_normalmode(self): log.info('check listdir in normal mode') for d in testTree: results = set() for r in self.swiftfs.listdir(d): results.add(r['name']) assert_set_equal(results, testTree[d])
def _do_test_sparql(self, datasetname, sparql, expected): dataset = getattr(self, datasetname) result = dataset.query(sparql) if isinstance(expected, list): assert_list_equal(list(result), expected) else: assert_set_equal(set(result), expected)
def test_add_multiple_groups(): fields = ModelDataFields() fields.new_field_location("node", 12) fields.new_field_location("cell", 2) fields.new_field_location("face", 7) fields.new_field_location("link", 7) assert_set_equal(set(["node", "cell", "face", "link"]), fields.groups)
def test_child_critical(self): # tests D consisting of non-critical 0 and path to critical 4. # Source cover is expected to be {0} D: nx.DiGraph = nx.path_graph(5, nx.DiGraph()) D.add_edge(0, 1) cover = source_cover(D, {4}) assert_set_equal(cover, {0})
def test_slot_call(): """ Test the __call__ method passes the arguments given. """ calls = [] slot = Slot(lambda **kwa: calls.append(kwa), constkeyword1=1, constkeyword2=2, keyword3=3) slot(callkeyword1=1, callkeyword2=2, keyword3=30) # We should have a single call eq_(len(calls), 1) kwargs = calls.pop(0) # kwargs should contain the merged keywords assert_set_equal( set(kwargs.keys()), set([ 'constkeyword1', 'constkeyword2', 'callkeyword1', 'callkeyword2', 'keyword3' ])) eq_(kwargs['constkeyword1'], 1) eq_(kwargs['constkeyword2'], 2) eq_(kwargs['callkeyword1'], 1) eq_(kwargs['callkeyword2'], 2) # This is given by both, call should override constructor eq_(kwargs['keyword3'], 30)
def test_union(): x = set(zip(*IJ.fetch('i', 'j'))) y = set(zip(*JI.fetch('i', 'j'))) assert_true(len(x) > 0 and len(y) > 0 and len(IJ() * JI()) < len(x)) # ensure the IJ and JI are non-trivial z = set(zip(*(IJ + JI).fetch('i', 'j'))) # union assert_set_equal(x.union(y), z) assert_equal(len(IJ + JI), len(z))
def test_msghandler_enter_state_reject(): """ Test the message considers 'REJECT' an exit state. """ calls = [] aprshandler = DummyAPRSHandler() msghandler = APRSMessageHandler(aprshandler=aprshandler, addressee='CQ', path=['WIDE1-1', 'WIDE2-1'], message='testing', replyack=False, log=logging.getLogger('messagehandler')) msghandler.done.connect(lambda **k: calls.append(k)) # Message handler is still in the INIT state eq_(msghandler.state, msghandler.HandlerState.INIT) # Tell it to go to the reject state. msghandler._enter_state(msghandler.HandlerState.REJECT) # 'done' signal should have been called. eq_(len(calls), 1) call = calls.pop(0) assert_set_equal(set(call.keys()), set(['handler', 'state'])) assert_is(call['handler'], msghandler) eq_(call['state'], msghandler.HandlerState.REJECT)
def test_single_critical(self): # tests D consisting of single critical vertex # Source cover expected to cover it D: nx.DiGraph = nx.DiGraph() D.add_node(0) cover = source_cover(D, {0}) assert_set_equal(cover, {0})
def test_get_time_series_names(): names = tsn.get_time_series_names([ 'ice_surface__temperature@t=0', 'ice_surface__temperature@t=1', 'earth_surface__temperature', ]) assert_set_equal(names, set(['ice_surface__temperature']))
def test_parameters(self): elements = self.spc.metadata.as_dictionary()['Sample']['elements'] sem_dict = self.spc.metadata.as_dictionary( )['Acquisition_instrument']['SEM'] eds_dict = sem_dict['Detector']['EDS'] signal_dict = self.spc.metadata.as_dictionary()['Signal'] # Testing SEM parameters nt.assert_almost_equal(22, sem_dict['beam_energy']) nt.assert_almost_equal(0, sem_dict['tilt_stage']) # Testing EDS parameters nt.assert_almost_equal(0, eds_dict['azimuth_angle']) nt.assert_almost_equal(34, eds_dict['elevation_angle']) nt.assert_almost_equal(129.31299, eds_dict['energy_resolution_MnKa'], places=5) nt.assert_almost_equal(50.000004, eds_dict['live_time'], places=6) # Testing elements nt.assert_set_equal({'Al', 'C', 'Ce', 'Cu', 'F', 'Ho', 'Mg', 'O'}, set(elements)) # Testing HyperSpy parameters nt.assert_equal(True, signal_dict['binned']) nt.assert_equal('EDS_SEM', signal_dict['signal_type']) nt.assert_is_instance(self.spc, signals.EDSSEMSpectrum)
def test_outer_union_fail(): """Union of two tables with different primary keys raises an error.""" t = Trial + Ephys t.fetch() assert_set_equal(set(t.heading.names), set(Trial.heading.names) | set(Ephys.heading.names)) len(t)
def test_thread_pooled_downloader(self): req_resp_midlleware = RequestResponseMiddleware( prefix_url=self.httpd.location, request_factory=lambda x: x, ) collect_middleware = CollectRequestResponseMiddleware() downloader = ThreadedDownloader( middlewares=[UrllibAdapterMiddleware(), collect_middleware]) downloader.middlewares.insert(0, req_resp_midlleware) pomp = Pomp( downloader=downloader, pipelines=[], ) class Crawler(DummyCrawler): ENTRY_REQUESTS = '/root' pomp.pump(Crawler()) assert_set_equal( set([ r.url.replace(self.httpd.location, '') for r in collect_middleware.requests ]), set(self.httpd.sitemap.keys()))
def test_no_critical(self): # tests D consisting of single directed edge # Source cover expected to not cover it D: nx.DiGraph = nx.DiGraph() D.add_node(0) cover = source_cover(D, set()) assert_set_equal(cover, set())
def test_tree_leafs_critical(self): # tests D to be a tree, only leafs are critical # Source cover is expected to be {0} - the root D: nx.DiGraph = nx.balanced_tree(2, 5, nx.DiGraph()) critical: Set = {node for node in D.nodes if D.out_degree(node) == 0} cover: Set = source_cover(D, critical) assert_set_equal(cover, {0})
def test_from_image_2_subpalettes(self): palette = EbPalette(2, 4) tileset = EbGraphicTileset(num_tiles=4, tile_width=8, tile_height=8) arrangement = EbTileArrangement(width=4, height=1) arrangement.from_image(image=self.tile_8x8_2bpp_3_img, tileset=tileset, palette=palette) img_palette = self.tile_8x8_2bpp_3_img.getpalette() self.tile_8x8_2bpp_3_img.putpalette([x & 0xF8 for x in img_palette]) before_image_rgb = self.tile_8x8_2bpp_3_img.convert("RGB") after_image_rgb = arrangement.image(tileset, palette).convert("RGB") assert_images_equal(before_image_rgb, after_image_rgb) assert_set_equal( {palette[0, i] for i in range(4)}, {EbColor(24, 0, 248), EbColor(0, 248, 24), EbColor(152, 0, 248), EbColor(248, 144, 0)}, ) assert_set_equal( {palette[1, i] for i in range(4)}, {EbColor(24, 0, 248), EbColor(0, 248, 24), EbColor(216, 248, 0), EbColor(152, 0, 248)}, ) assert_equal(arrangement[0, 0].tile, 0) assert_equal(arrangement[0, 0].subpalette, 1) assert_equal({tileset[0][0][i] for i in [-1, -2, -3, -4]}, {0, 1, 2, 3}) assert_equal(arrangement[1, 0].tile, 1) assert_equal(arrangement[1, 0].subpalette, 0) assert_equal({tileset[1][0][i] for i in [-1, -2, -3, -4]}, {0, 1, 2, 3}) assert_equal(arrangement[2, 0].tile, 2) assert_equal(arrangement[2, 0].subpalette, 1) assert_equal(arrangement[3, 0].tile, 3) assert_equal(arrangement[3, 0].subpalette, 0)
def test_dedupe(): observed = _dedupe({('a',), ('a','b'),('b','c'), ('a','c'), ('a','b','c'), }) expected = {('a',), ('b','c')} n.assert_set_equal(observed, expected)
def test_urllib_downloader(self): req_resp_midlleware = RequestResponseMiddleware( prefix_url=self.httpd.location, request_factory=lambda x: x, ) collect_middleware = CollectRequestResponseMiddleware() downloader = UrllibDownloader( middlewares=[UrllibAdapterMiddleware(), collect_middleware] ) downloader.middlewares.insert(0, req_resp_midlleware) pomp = Pomp( downloader=downloader, pipelines=[], ) class Crawler(DummyCrawler): ENTRY_REQUESTS = '/root' pomp.pump(Crawler()) assert_set_equal( set([r.url.replace(self.httpd.location, '') for r in collect_middleware.requests]), set(self.httpd.sitemap.keys()) )
def test_Cases_prop_crossselect3(): """Check (difference) output of select method; single nplies and ps.""" actual = cases2a.select(nplies=4, ps=3, how="difference") expected1 = cases2a.select(ps=3) - cases2a.select(nplies=4) expected2 = set(cases2b3.LMs[:-1]) nt.assert_set_equal(actual, expected1) nt.assert_set_equal(actual, expected2)
def test_Cases_prop_crossselect6(): """Check (intersection) output of select method; multiple nplies and ps.""" actual = cases2a.select(nplies=[2, 4], ps=[3, 4], how="intersection") expected1 = { LM for LM in it.chain(cases2b2.LMs, cases2b3.LMs, cases2b4.LMs) if (LM.nplies in (2, 4)) & (LM.p in (3, 4)) } nt.assert_set_equal(actual, expected1)
def test_Cases_prop_crossselect2(): """Check (intersection) output of select method; single nplies and ps.""" actual = cases2a.select(nplies=4, ps=3, how="intersection") expected1 = {LM for LM in it.chain(cases2b2.LMs, cases2b3.LMs, cases2b4.LMs) if (LM.nplies == 4) & (LM.p == 3)} expected2 = {cases2b3.LMs[-1]} nt.assert_set_equal(actual, expected1) nt.assert_set_equal(actual, expected2)
def test_apply_central_symmetry(): x_grid = np.linspace(0, 10, num=3) y_grid = np.linspace(0, 10, num=6) zones = {} key_gen = ((x, y) for x in range(len(x_grid) + 1) for y in range(len(y_grid) + 1)) for zone_x, zone_y in key_gen: zones[(zone_x, zone_y)] = 0 zones[(1, 5)] = 7 zones[(2, 1)] = 1 zones[(1, 4)] = 3 zones[(2, 2)] = 4 zones[(1, 3)] = 10 zones[(2, 3)] = 8 zones = apply_central_symmetry(zones, x_grid, y_grid) half_court_keys = [(zone_x, zone_y) for zone_x in range(len(x_grid) + 1) for zone_y in range(int(np.ceil((len(y_grid) + 1) / 2))) ] # Test that the keys are correct (the right zones have been removed). assert_set_equal(set(zones.keys()), set(half_court_keys)) #Test that the numbers are right. assert_equal(zones[(2, 1)], 8) assert_equal(zones[(2, 2)], 7) assert_equal(zones[(1, 3)], 10) assert_equal(zones[(2, 3)], 8)
def test_add_multiple_groups(): fields = ModelDataFields() fields.new_field_location('node', 12) fields.new_field_location('cell', 2) fields.new_field_location('face', 7) fields.new_field_location('link', 7) assert_set_equal(set(['node', 'cell', 'face', 'link']), fields.groups)
def test_kdtree_raw_comp_nodes_searcher(): nodes_size = 100 coords = np.random.rand(nodes_size, 2) coords[:, 0] = -2 + coords[:, 0] * 9 coords[:, 1] = -4 + coords[:, 0] * 9 nodes = [MockNode(coord) for coord in coords] kd_searcher = KDTreeNodeSearcher().setup(nodes=nodes) raw_searcher = RawNodeSearcher().setup(nodes=nodes) xs_size = 20 xs = np.random.rand(xs_size, 2) xs[:, 0] = -2 + xs[:, 0] * 9 xs[:, 1] = -4 + xs[:, 1] * 9 rads = np.random.rand(xs_size) * 5 for x, rad in zip(xs, rads): kd_indes = kd_searcher.search_indes(x, rad) raw_indes = raw_searcher.search_indes(x, rad) assert_set_equal(set(kd_indes), set(raw_indes)) kd_nodes = kd_searcher.search(x, rad) raw_nodes = raw_searcher.search(x, rad) eq_(len(kd_nodes), len(raw_nodes)) assert_set_equal(set(kd_nodes), set(raw_nodes))
def test_raw_kdtree_segment_searcher_cmp(): start_range = np.array([[-10, -10], [90, 90]], dtype=float) length_mean = 10 length_std = 20 num_segs = 100 segments = gen_rand_segments(start_range, length_mean, length_std, num_segs) raw_searcher = RawSegmentSearcher().setup(segments=segments) kd_searcher = KDTreeSegmentSearcher().setup(segments=segments) pts_num = 200 pts = rand_coords(start_range, pts_num) rads = rand_coords(np.array([[5], [60]], dtype=float), pts_num) eps = 1e-6 for pt, rad in zip(pts, rads): raw_indes = raw_searcher.search_indes(pt, rad, eps) kd_indes = kd_searcher.search_indes(pt, rad, eps) eq_(len(raw_indes), len(kd_indes)) assert_set_equal(set(raw_indes), set(kd_indes)) raw_segs = raw_searcher.search(pt, rad, eps) kd_segs = kd_searcher.search(pt, rad, eps) eq_(len(raw_segs), len(kd_segs)) assert_set_equal(set(raw_segs), set(kd_segs))
def test_directed_path_joins_ends(self): # Testing if called on directed path, # expected for i in range(2, 11): assert_set_equal( EswaranTarjan.eswaran_tarjan(nx.path_graph(i, nx.DiGraph())), {(i - 1, 0)})
def test_Cases_prop_crossselect1(): """Check (union) output of select method; single nplies and ps.""" actual1 = cases2a.select(nplies=4, ps=3) actual2 = cases2a.select(nplies=4, ps=3, how="union") expected = {LM for LM in it.chain(cases2b2.LMs, cases2b3.LMs, cases2b4.LMs) if (LM.nplies == 4) | (LM.p == 3)} nt.assert_set_equal(actual1, expected) nt.assert_set_equal(actual2, expected)
def test_child_non_critical(self): # tests D consisting of non-critical path. # Source cover is expected to be empty D: nx.DiGraph = nx.DiGraph() D.add_edge(0, 1) cover = source_cover(D, set()) assert_set_equal(cover, set())
def test_snow(): 'We should somehow find out about the status code issue.' with open(os.path.join('blizzard','test','fixture','budget-credits_de_paiement.p'),'rb') as fp: dataset = {'datasetid':8,'catalog':'foo','fields':[],'download':pickle.load(fp)} observed = _snow(dataset)['unique_indices'] expected = set() n.assert_set_equal(observed, expected)
def test_resume_load_incomplete(): """backends.json._resume: loads incomplete results. Because resume, aggregate, and summary all use the function called _resume we can't remove incomplete tests here. It's probably worth doing a refactor to split some code out and allow this to be done in the resume path. """ with utils.nose.tempdir() as f: backend = backends.json.JSONBackend(f) backend.initialize(BACKEND_INITIAL_META) with backend.write_test("group1/test1") as t: t(results.TestResult('fail')) with backend.write_test("group1/test2") as t: t(results.TestResult('pass')) with backend.write_test("group2/test3") as t: t(results.TestResult('crash')) with backend.write_test("group2/test4") as t: t(results.TestResult('incomplete')) test = backends.json._resume(f) nt.assert_set_equal( set(test.tests.keys()), set(['group1/test1', 'group1/test2', 'group2/test3', 'group2/test4']), )
def test_asyncio_engine(self): req_resp_midlleware = RequestResponseMiddleware( prefix_url=self.httpd.location, request_factory=lambda x: x, ) collect_middleware = CollectRequestResponseMiddleware() downloader = AiohttpDownloader( middlewares=[AiohttpAdapterMiddleware(), collect_middleware] ) downloader.middlewares.insert(0, req_resp_midlleware) pomp = AioPomp( downloader=downloader, pipelines=[], ) class Crawler(DummyCrawler): ENTRY_REQUESTS = '/root' loop = asyncio.get_event_loop() loop.run_until_complete(ensure_future(pomp.pump(Crawler()))) loop.close() assert_set_equal( set([r.url.replace(self.httpd.location, '') for r in collect_middleware.requests]), set(self.httpd.sitemap.keys()) )
def test_user_info(self): client = self.get_client() subject = "Joe USER" headers = { HTTP_HEADER_USER_INFO: subject, HTTP_HEADER_SIGNATURE: "" } t = client.fetch_user_token(headers) nt.assert_equal(self.appId, t.validity.issuedTo) nt.assert_equal(self.appId, t.validity.issuedFor) nt.assert_equal(subject, t.tokenPrincipal.principal) nt.assert_equal("Joe User", t.tokenPrincipal.name) nt.assert_set_equal({'A', 'B', 'C'}, t.authorizations.formalAuthorizations) nt.assert_equal("EzBake", t.organization) nt.assert_equal("USA", t.citizenship) nt.assert_equal("low", t.authorizationLevel) nt.assert_dict_equal(dict([ ('EzBake', ['Core']), ('42six', ['Dev', 'Emp']), ('Nothing', ['groups', 'group2'])]), t.externalProjectGroups) community_membership = t.externalCommunities['EzBake'] nt.assert_equal("office", community_membership.type) nt.assert_equal("EzBake", community_membership.organization) nt.assert_true(community_membership.flags['ACIP']) nt.assert_list_equal(['topic1', 'topic2'], community_membership.topics) nt.assert_list_equal(['region1', 'region2', 'region3'], community_membership.regions) nt.assert_list_equal([], community_membership.groups)
def _do_test_quads(self, datasetname, triple_or_quad, expected): dataset = getattr(self, datasetname) quads = set( (s, p, o, ctx.identifier) for s, p, o, ctx in dataset.quads(triple_or_quad) ) assert_set_equal(quads, set(expected))
def test_concurrent_urllib_downloader(self): req_resp_midlleware = RequestResponseMiddleware( prefix_url=self.httpd.location, request_factory=UrllibHttpRequest, ) collect_middleware = CollectRequestResponseMiddleware() downloader = ConcurrentUrllibDownloader( middlewares=[collect_middleware]) downloader.middlewares.insert(0, req_resp_midlleware) pomp = Pomp( downloader=downloader, pipelines=[], ) class Crawler(DummyCrawler): ENTRY_REQUESTS = '/root' pomp.pump(Crawler()) assert_set_equal( set([ r.url.replace(self.httpd.location, '') for r in collect_middleware.requests ]), set(self.httpd.sitemap.keys()))
def test_visible_support_domain_searcher(): data = _visible_support_domain_data() x_bnd_exp_s = data['x_bnd_exp_s'] searcher = data['searcher'] for x, bnd, exp in x_bnd_exp_s: act = searcher.search_indes(x, bnd) eq_(len(exp), len(act)) assert_set_equal(set(exp), set(act))
def test_duplicate_floor_requests(): """should gracefully ignore duplicate floor requests""" elevator = Elevator(num_floors=5, starting_floor=1) elevator.request_floor(3) elevator.request_floor(3) nose.assert_set_equal(elevator.requested_floors, {3}) elevator.travel() nose.assert_list_equal(elevator.visited_floors, [3])
def test_parallelize_spawns_processes_and_gets_correct_ansswer(): x = range(500) my_func = parallelize(util_func) outer_vals, outer_pids = zip(*[util_func(X) for X in x]) inner_vals, inner_pids = zip(*my_func(x)) assert_set_equal(set(outer_vals), set(inner_vals)) assert_not_equal(set(outer_pids), set(inner_pids))
def test_Cases_prop_crossselect7(): """Check (difference) output of select method; multiple nplies and single ps.""" # Subtracts nplies from ps. actual = cases2a.select(nplies=[2, 4], ps=3, how="difference") expected1 = cases2a.select(ps=3) - cases2a.select(nplies=[2, 4]) expected2 = set(cases2b3.LMs[::2]) nt.assert_set_equal(actual, expected1) nt.assert_set_equal(actual, expected2)
def test_init(self): n.assert_set_equal(set(self.db.file_numbers.values()), {0}) self.db.disk['file_numbers'].insert({'file_number':442}) self.db.disk['file_numbers'].insert({'file_number':442}) self.db.disk['file_numbers'].insert({'file_number':442}) self.db.disk['file_numbers'].insert({'file_number':8}) self.db._init_cache() n.assert_set_equal(set(self.db.file_numbers.values()), {3,1,0})
def test_parallelize_spawns_processes_and_gets_correct_ansswer(): x = range(500) my_func = parallelize(util_func) outer_vals, outer_pids = zip(*[util_func(X) for X in x]) inner_vals, inner_pids = zip(*my_func(x)) assert_set_equal(set(outer_vals),set(inner_vals)) assert_not_equal(set(outer_pids),set(inner_pids))
def test_main(out, get_matches, transform_data): """should accept input and produce correct output when run from CLI""" player.main() transform_data.assert_called_once_with({'cards': []}) output = out.getvalue() nose.assert_equal(output, output.strip()) match = set(json.loads(output)) nose.assert_set_equal(match, {'hbu', 'kca', 'pto'})
def test_simple_needs_to_be_augmented(self): # tests a simple graph that admits a perfect matching and needs to be augmented G: nx.Graph = nx.Graph() G.add_edges_from({(0, 1), (2, 3)}) assert_set_equal(bipartite_matching_augmentation(G, {0, 2}), {(0, 3), (2, 1)}) G.add_edge(0, 3) assert_set_equal(bipartite_matching_augmentation(G, {0, 2}), {(2, 1)})
def test_init_params(self): """should store correct initial parameters when elevator is initalized""" controller = self.controller noset.assert_equal(controller.num_floors, 5) noset.assert_equal(controller.current_floor, 2) noset.assert_set_equal(controller.requested_floors, set()) noset.assert_list_equal(controller.visited_floors, []) noset.assert_equal(controller.num_floors_traveled, 0)
def compare_sets(label,found,expected,crossmap,ref,slide): message = "Failed test %s.\n\tCrossmap: %s\n\tRef: %s\n\tslide: %s\nItems expected but not found: %s\nVice versa: %s\n" % (label, crossmap, ref, slide, expected-found, found-expected) assert_set_equal(expected,found,message)
def test_init_params(): """should store correct initial parameters when elevator is initalized""" elevator = Elevator(num_floors=5, starting_floor=2) nose.assert_equal(elevator.num_floors, 5) nose.assert_equal(elevator.current_floor, 2) nose.assert_set_equal(elevator.requested_floors, set()) nose.assert_list_equal(elevator.visited_floors, []) nose.assert_equal(elevator.num_floors_traveled, 0)
def test_shell_files_correspond(): '.sh files in ``code`` should correspond with test directories in ``test``' if not (os.path.isdir('code') and os.path.isdir('test')): return code_files = filter(lambda filename: filename.endswith('.sh'), os.listdir('code')) test_files = filter(lambda filename: (not filename.startswith('.')) and filename.endswith('.sh'), os.listdir('test')) n.assert_set_equal(set(test_files), set(code_files))
def test_tree_all_critical(self): # tests D to be a tree, all vertices are critical # Source cover is expected to be the root # tests correct functionality of deletion of vertices # reachable from a critical vertex D: nx.DiGraph = nx.balanced_tree(2, 5, nx.DiGraph()) cover: Set = source_cover(D, set(D.nodes)) assert_set_equal(cover, {0})
def test_pack(): for x in (32, -3.7e-2, np.float64(3e31), -np.inf, np.int8(-3), np.uint8(-1), np.int16(-33), np.uint16(-33), np.int32(-3), np.uint32(-1), np.int64(373), np.uint64(-3)): assert_equal(x, unpack(pack(x)), "Scalars don't match!") x = np.nan assert_true(np.isnan(unpack(pack(x))), "nan scalar did not match!") x = np.random.randn(8, 10) assert_array_equal(x, unpack(pack(x)), "Arrays do not match!") x = np.random.randn(10) assert_array_equal(x, unpack(pack(x)), "Arrays do not match!") x = np.float32(np.random.randn(3, 4, 5)) assert_array_equal(x, unpack(pack(x)), "Arrays do not match!") x = np.int16(np.random.randn(1, 2, 3)) assert_array_equal(x, unpack(pack(x)), "Arrays do not match!") x = None assert_true(x is None, "None did not match") x = [None] assert_list_equal(x, unpack(pack(x))) x = {'name': 'Anonymous', 'age': 15, 99: datetime.now(), 'range': [110, 190], (11,12): None} assert_dict_equal(x, unpack(pack(x)), "Dict do not match!") x = uuid.uuid4() assert_equal(x, unpack(pack(x)), 'UUID did not match') x = Decimal("-112122121.000003000") assert_equal(x, unpack(pack(x)), "Decimal did not pack/unpack correctly") x = [1, datetime.now(), {1: "one", "two": 2}, (1, 2)] assert_list_equal(x, unpack(pack(x)), "List did not pack/unpack correctly") x = (1, datetime.now(), {1: "one", "two": 2}, (uuid.uuid4(), 2)) assert_tuple_equal(x, unpack(pack(x)), "Tuple did not pack/unpack correctly") x = (1, {datetime.now().date(): "today", "now": datetime.now().date()}, {"yes!": [1, 2, np.array((3, 4))]}) y = unpack(pack(x)) assert_dict_equal(x[1], y[1]) assert_array_equal(x[2]['yes!'][2], y[2]['yes!'][2]) x = {'elephant'} assert_set_equal(x, unpack(pack(x)), "Set did not pack/unpack correctly") x = tuple(range(10)) assert_tuple_equal(x, unpack(pack(range(10))), "Iterator did not pack/unpack correctly") x = Decimal('1.24') assert_true(x == unpack(pack(x)), "Decimal object did not pack/unpack correctly") x = datetime.now() assert_true(x == unpack(pack(x)), "Datetime object did not pack/unpack correctly")
def test_complex(): t = TestCase( q_filter="codice_ipa,codice_fiscale,location", ldap_attributes={ "o", "codiceFiscaleAmm", "objectClass", "description" } | set(Location.q_fields().values()), ) assert_set_equal(set(parse_fields(t.q_filter)), t.ldap_attributes)
def test_init_without_args(): timeline = Timeline() assert_equal(0., timeline.time) assert_set_equal(set(), timeline.events) with assert_raises(IndexError): timeline.time_of_next_event with assert_raises(IndexError): timeline.next_event
def test_app_info_with_target(self): client = self.get_client() token = client.fetch_app_token('testapp') nt.assert_is_not_none(token) nt.assert_equal(self.appId, token.tokenPrincipal.principal) nt.assert_equal('testapp', token.validity.issuedFor) nt.assert_set_equal({'A', 'B', 'C'}, token.authorizations.formalAuthorizations) nt.assert_equal("low", token.authorizationLevel)