def get_todays_high_data(output_json_folder):
    data = bse.data_52_week_high()
    security_codes_vs_data = get_security_code_vs_data_dict(data)
    
    security_code_vs_market_cap_and_name = {}    
    p = ThreadPool(len(security_codes_vs_data.keys()))    
    results = p.map(bse.get_market_cap_and_name, security_codes_vs_data.keys()) 
    p.close()
    p.join()
    
    security_id_and_codes = []
    
    for tup in results:
        security_code_vs_market_cap_and_name[tup[0]] = tup[1:]  + [security_codes_vs_data[tup[0]][0]]
        security_id_and_codes.append( (security_codes_vs_data[tup[0]][0], tup[0]))
    func = lambda x : float(x[1][0])
    sorted_security_code_vs_market_cap_and_name = sorted(security_code_vs_market_cap_and_name.items(), key=func)
    json_file = write_output(sorted_security_code_vs_market_cap_and_name, output_json_folder)    
    mc.read_json(json_file) # creates csv
#standard
import subprocess
import sys
#from multiprocessing import Pool as ProcessPool
#locals
import mc

def get_data(file):    
    subprocess.call(["D:/Python27/python.exe ./mc.py", file])    
    print file

if __name__ == "__main__":
    if len(sys.argv) != 2:
        print "Usage: bulk_json_processing.py <dir>"
        exit(-1)
    folder_path = sys.argv[1].replace('\\','/')
    files = subprocess.check_output(["ls", folder_path + '/' + "*.json"])
    for file in files.split('\r\n'):
        if len(file) > 0:
            mc.read_json(file)
def combine_and_process_json(json_daily_folder, json_consolidated_folder):
    combined_company = combine_company_json(json_daily_folder)
    json_file = process_combine(combined_company, json_consolidated_folder)
    mc.read_json(json_file) # creates csv    
Example #4
0
#standard
import subprocess
import sys
#from multiprocessing import Pool as ProcessPool
#locals
import mc


def get_data(file):
    subprocess.call(["D:/Python27/python.exe ./mc.py", file])
    print file


if __name__ == "__main__":
    if len(sys.argv) != 2:
        print "Usage: bulk_json_processing.py <dir>"
        exit(-1)
    folder_path = sys.argv[1].replace('\\', '/')
    files = subprocess.check_output(["ls", folder_path + '/' + "*.json"])
    for file in files.split('\r\n'):
        if len(file) > 0:
            mc.read_json(file)
Example #5
0
def combine_and_process_json(json_daily_folder, json_consolidated_folder):
    combined_company = combine_company_json(json_daily_folder)
    json_file = process_combine(combined_company, json_consolidated_folder)
    mc.read_json(json_file)  # creates csv