def storeFacts(results, table_name): [rows, header] = results for row in rows: col_counter = 0 corresponding_facts = CorrespondingFacts() corresponding_facts.put() fact_keys = [] for col in row: fact = Fact(value = col, name = header[col_counter], corresponding_facts = corresponding_facts.key) fact.put() fact_keys.append(fact.key) col_counter += 1 corresponding_facts.facts = fact_keys corresponding_facts.names = header corresponding_facts.put()
def process_csv(blob_info): blob_reader = blobstore.BlobReader(blob_info.key()) reader = csv.reader(blob_reader, delimiter=',') # get file name csv_file_name = blob_info.filename # get csv header [prefix, postfix] = csv_file_name.split(".") header = reader.next() temp_header = header header = [] for col in temp_header: header.append(prefix + "." + col) for row in reader: col_counter = 0 corresponding_facts = CorrespondingFacts() corresponding_facts.put() fact_keys = [] if csv_file_name == "Sales.csv": date_str = row[4] for col in row: date = datetime.datetime.strptime("1/1/14", '%m/%d/%y').date() if csv_file_name == "Sales.csv": date = datetime.datetime.strptime(date_str, '%m/%d/%y').date() logging.info(col + "/n") fact = Fact(value = col, name = header[col_counter], corresponding_facts = corresponding_facts.key, date = date) fact.put() fact_keys.append(fact.key) col_counter += 1 corresponding_facts.facts = fact_keys corresponding_facts.names = header corresponding_facts.put()