def test_installer_simple(): silentremove("/tmp/simple.meta") error_code = main([SIMPLE, "/tmp/simple.meta"]) assert error_code == 0 with open("/tmp/simple.meta") as metadata: reader = csv.reader(metadata) n_tables = 0 products_table_exists = 0 for row in reader: n_tables += 1 if row[1] == 'Products': products_table_exists += 1 assert "xmin" in row[2] assert "productDescription" in row[2] assert n_tables == 8 assert products_table_exists == 1 # check solr s = Solr() res = s.list() assert res["hits"] == 8 assert len(res["docs"]) == 8 # check database db = Database() db.connect() cursor = db.cursor() cursor.execute("SELECT * FROM documents ORDER BY ID") rows = cursor.fetchall() assert len(rows) == 8 assert rows[0]["universal_id"] == "ClassicModels.public.OrderDetails" assert rows[1]["universal_id"] == "ClassicModels.public.Offices" assert rows[2]["universal_id"] == "ClassicModels.public.Payments" assert rows[3]["universal_id"] == "ClassicModels.public.ProductLines" assert rows[4]["universal_id"] == "ClassicModels.public.Customers" assert rows[5]["universal_id"] == "ClassicModels.public.Orders" assert rows[6]["universal_id"] == "ClassicModels.public.Employees" assert rows[7]["universal_id"] == "ClassicModels.public.Products" cursor.execute("SELECT * FROM filters ORDER BY ID") rows = cursor.fetchall() assert len(rows) == 2 assert rows[0]["name"] == "ClassicModels" assert rows[1]["name"] == "public" assert rows[0]["id"] == rows[1]["parent_id"]
def main(args): if len(args) != 2: pass else: ds_config, meta_data_file = args if not os.path.exists(ds_config): print >> sys.stderr, "%s does not exists" % ds_config return 1 if os.path.exists(meta_data_file): print >> sys.stderr, "%s already exists, please remove it first" % meta_data_file return 1 if not os.path.exists(os.path.dirname(meta_data_file)): print >> sys.stderr, "dir %s does not exists, please create it first" % os.path.dirname( meta_data_file) return 1 sc = SearchConfiguration(ds_config) sc.get_search_config() sc.parse() for handle in sc.handles: handle.connect() handle.copy_raw_meta_data(meta_data_file, append=True) handle.close() # meta to database doc = Document() doc.clear() with open(meta_data_file) as metadata: reader = csv.reader(metadata) for row in reader: if len(row) > 3: doc.create(row[0], row[1], row[2], row[3]) else: doc.create(row[0], row[1], row[2]) doc.update_filters() doc.close() # database to solr s = Solr() db = Database() db.connect() cursor = db.cursor() cursor.execute("SELECT * FROM documents") datalist = [] for row in cursor.fetchall(): datalist.append({ "universal_id_s": row["universal_id"], "title_s": row["title"], "all_txt_ng": row["keywords"], "path_s": row["path"], }) cursor.close() db.close() s.solr().delete(q="*:*") s.solr().add(datalist) return 0