each_com.replace(" ", ""), company_code_mapping.get(each_com), "balance-sheet", each_division) if each_division_table_data: csv_writer( os.path.join( path, "balance_sheet", each_sector, each_com + "_" + str(each_division) + ".csv"), each_division_table_data) else: break return True if __name__ == '__main__': mapping_path, mapping_obj = validate_and_get_mappings() if mapping_path: logger_obj = setup_logger(mapping_path) logger_obj.debug("Got the sector mapping and company code mapping") is_success = get_balance_sheet(mapping_obj["sectorwise_com"], mapping_obj["company_code_mapping"], mapping_path, logger_obj) if is_success: logger_obj.debug("Completed the job successfully")
except ValueError: print("Please provide a valid number between 1 and 7" ) # TODO : Fix not to print when entered NO else: if 1 <= metric_num <= 7: break else: print("Exiting") sys.exit(0) metric = list(metrics)[metric_num - 1] # Just to make sure that complete data resides in a single directory and for combined logs mapping_path, _ = validate_and_get_mappings() logger = setup_logger(mapping_path) mappings = get_economic_times_mappings() if not os.path.exists(os.path.join(mapping_path, "Economic_times", metric)): os.makedirs(os.path.join(mapping_path, "Economic_times", metric)) for each_company, each_company_code in mappings.items(): logger.debug("Getting the Economic Times data for " + each_company) data = get_data_economic_times(each_company_code, metrics[metric]) if data: csv_writer( os.path.join(mapping_path, "Economic_times", metric, each_company.rstrip(" Ltd.") + ".csv"), data)
sectorwise_com[eachSector] = [] for eachElement in company_code_parser: company_code_mapping[eachElement.text] = eachElement.get('href').split("/")[-1] sectorwise_com[eachSector].append(eachElement.text) logger.debug("Writing the meta to json file") with open(os.path.join(path, "sector_key_map.json"), 'w') as fl: fl.write(simplejson.dumps(sector_key_map, sort_keys=True, indent=4 * ' ')) with open(os.path.join(path, "company_code_mapping.json"), 'w') as fl: fl.write(simplejson.dumps(company_code_mapping, sort_keys=True, indent=4 * ' ')) with open(os.path.join(path, "sectorwise_com.json"), 'w') as fl: fl.write(simplejson.dumps(sectorwise_com, sort_keys=True, indent=4 * ' ')) return True if __name__ == '__main__': obj_path = get_path() if obj_path: logger_obj = setup_logger(obj_path) is_success = get_mappings(obj_path, logger_obj) if is_success: logger_obj.debug("*********************** Completed ******************************")
line = data_file.readline() while line: with open(os.path.join(download_path, "index", eachValue + '.csv'), 'ab') as file: file.write(line) line = data_file.readline() return True if __name__ == "__main__": path, _ = validate_and_get_mappings() if path: logger = setup_logger(path) if not os.path.exists(os.path.join(path, "index")): os.makedirs(os.path.join(path, "index")) for each_type in indices.keys(): if each_type == "International": logger.debug( "****************** Getting the international indices *********************" ) _ = get_international_index(indices["International"], path) elif each_type == "Indian": logger.debug( "****************** Getting the Indian indices ****************************" ) _ = get_indian_index(indices["Indian"], path)
write_file.close() continue else: logger.debug() except: logger.debug( "!!!!!!!!!!!!!!!!!! Could not get the data for " + each_com) return True if __name__ == '__main__': mapping_path, mapping_obj = validate_and_get_mappings() if mapping_path: logger_obj = setup_logger(os.path.join(mapping_path, 'historical_data')) is_success = get_stock_data(mapping_obj["sectorwise_com"], mapping_obj["company_code_mapping"], mapping_path, logger_obj) if is_success: logger_obj.debug("Completed the job successfully") else: print("Everything doesn't seems to be fine")