def bloomberg_data_query(response_key, prepared_entries, use_terminal): cache.set(response_key, 0.0) all_fields = BloombergDataContainerMapping.objects.all().values_list('short_name__code', flat=True) response = BloombergTasks.send_bloomberg_get_data(prepared_entries, ticker_type='TICKER', use_terminal=use_terminal, fields=all_fields) cache.set('data_' + response_key, response) cache.set('type_' + response_key, 'securities') cache.set(response_key, 0.5) securities, final_tickers, errors = populate_security_from_bloomberg_protobuf(response) result = [] new_securities_count = 0 for security in securities.keys(): with_isin = [] with_bloomberg = [] isin_field = securities[security].aliases.filter(alias_type__name='ISIN') isin_code = None if isin_field.exists(): with_isin = sorted(securities[security].__class__.objects.filter(aliases__alias_type__name='ISIN', aliases__alias_value=isin_field[0].alias_value), key=lambda x: x.id) isin_code = isin_field[0].alias_value bloomberg_field = securities[security].aliases.filter(alias_type__name='BLOOMBERG') bloomberg_code = None if bloomberg_field.exists(): with_bloomberg = sorted(securities[security].__class__.objects.filter(aliases__alias_type__name='BLOOMBERG', aliases__alias_value=bloomberg_field[0].alias_value), key=lambda x: x.id) bloomberg_code = bloomberg_field[0].alias_value if len(with_bloomberg)>1: securities[security].delete() result.append(with_bloomberg[0]) elif len(with_isin)>1 and (bloomberg_code==None or bloomberg_code==isin_code): securities[security].delete() result.append(with_isin[0]) else: new_securities_count += 1 result.append(securities[security]) cache.set('securities_' + response_key, result) cache.set('errors_' + response_key, errors) cache.set(response_key, 1.0) # Getting all kind of containers all_containers = {} for security in result: if not all_containers.has_key(security.type.identifier): all_containers[security.type.identifier] = [] try: all_containers[security.type.identifier].append(security.aliases.get(alias_type__name='BLOOMBERG').alias_value) except: all_containers[security.type.identifier].append(security.aliases.get(alias_type__name='ISIN').alias_value) for key in all_containers.keys(): fields = BloombergTrackContainerMapping.objects.filter(Q(container__identifier='CONT_SECURITY') | Q(container__identifier=key), Q(active=True)).values_list('short_name__code', flat=True) all_containers[key] = [to_bloomberg_code(ticker,use_terminal) for ticker in all_containers[key]] history_key = uuid.uuid4().get_hex() bb_thread = threading.Thread(None, bloomberg_history_query, history_key, (history_key, all_containers[key], fields, True)) bb_thread.start() bb_thread.join()
def bloomberg_wizard_execute(request, entity): # TODO: Check user # TODO: Check Bloomberg method entries = [str(entry).strip() for entry in request.POST['bloombergList'].split('\r')] try: use_terminal = request.POST['bloombergSource']=='True' except: use_terminal = False prepared_entries = [to_bloomberg_code(entry,use_terminal) for entry in entries] response_key = uuid.uuid4().get_hex() # TODO: Implement CONSTANTS and dynamic choice if entity=='financials': bb_thread = threading.Thread(None, bloomberg_data_query, response_key, (response_key, prepared_entries, use_terminal)) bb_thread.start() context = {'response_key': response_key} return render(request,entity + '/bloomberg/wizard_waiting.html', context)
def bloomberg_update(request): # TODO: Check user # TODO: Check Bloomberg method (Terminal/DL) bloomberg_company = CompanyContainer.objects.get(name='Bloomberg LP') bloomberg_fields = {entry[0]: entry[1] for entry in BloombergTrackContainerMapping.objects.values_list('name__name', 'short_name__name')} try: universe_id = request.GET['universe_id'] except: universe_id = None if universe_id!=None: universe = Universe.objects.filter(Q(id=universe_id), Q(public=True)|Q(owner__id=request.user.id)) if universe.exists(): universe = universe[0] all_tracks = TrackContainer.objects.filter(effective_container_id__in=universe.members.all().values_list('id', flat=True), source__id=bloomberg_company.id).order_by('end_date') else: all_tracks = TrackContainer.objects.filter(source__id=bloomberg_company.id).order_by('end_date') bulk_information = {} count = 0 for track in all_tracks: if track.end_date==None or dates.AddDay(track.end_date,1)<datetime.datetime.combine(datetime.date.today(), datetime.datetime.min.time()): if track.end_date!=None: key = dates.AddDay(track.end_date,1) else: key = 'None' if bloomberg_fields.has_key(track.type.name): track_field = bloomberg_fields[track.type.name] if not bulk_information.has_key(track_field): bulk_information[track_field] = {} if not bulk_information[track_field].has_key(key): bulk_information[track_field][key] = [] try: bb_code = track.effective_container.aliases.get(alias_type__name='BLOOMBERG').alias_value bulk_information[track_field][key].append(to_bloomberg_code(bb_code, True)) count += 1 except: #traceback.print_exc() LOGGER.error("No associated BLOOMBERG information for " + str(track.effective_container.name)) if count>0: history_key = uuid.uuid4().get_hex() update_thread = threading.Thread(None, bloomberg_update_query, history_key, (history_key, bulk_information, True)) update_thread.start() # TODO: Return error message return redirect('universes.html')
def import_external_data(data_source, data_type): LOGGER.info('Loading working data') query = QUERIES[data_source][data_type]['query'] group_by = QUERIES[data_source][data_type]['group_by'] external_alias = Attributes.objects.get(identifier='ALIAS_' + data_source.upper()) if data_type=='securities': bloomberg_alias = Attributes.objects.get(identifier='ALIAS_BLOOMBERG') isin_alias = Attributes.objects.get(identifier='ALIAS_ISIN') sec_container_type = Attributes.objects.get(identifier='CONT_SECURITY') fund_container_type = Attributes.objects.get(identifier='CONT_FUND') bond_container_type = Attributes.objects.get(identifier='CONT_BOND') stock_container_type = Attributes.objects.get(identifier='CONT_SECURITY') security_type = Attributes.objects.get(identifier='SECTYP_SECURITY') fund_type = Attributes.objects.get(identifier='SECTYP_FUND') bond_type = Attributes.objects.get(identifier='SECTYP_BOND') stock_type = Attributes.objects.get(identifier='SECTYP_STOCK') daily = Attributes.objects.get(identifier='FREQ_DAILY', active=True) bloomberg_provider = get_bloomberg_provider() universe = get_universe_from_datasource(data_source) LOGGER.info('Cleaning already imported aliases for ' + data_source) securities = SecurityContainer.objects.filter(aliases__alias_type__short_name=data_source.upper()).distinct() for security in securities: for alias in security.aliases.all(): if alias.alias_type.short_name==data_source.upper(): security.aliases.remove(alias) security.save() LOGGER.info('\tCleaning done') LOGGER.info('Importing ' + str(data_type) + ' from ' + str(data_source)) LOGGER.info('Using query:' + str(query)) results = dbutils.query_to_dicts(query, data_source) database = getattr(client, data_source) database[data_type].drop() all_tickers = [] for result in results: # Clean the data new_entry = convert_to_mongo(result) # LOGGER.info('Removing entries with name ' + new_entry[QUERIES[data_source][data_type]['name']]) # SecurityContainer.objects.filter(name=new_entry[QUERIES[data_source][data_type]['name']]).delete() for group_id in group_by: if new_entry[group_id]!=None and new_entry[group_id]!='': # LOADING INTO MONGO LOGGER.info('Adding entry [' + group_id + '=' + str(new_entry[group_id]) + "]") new_entry['_id'] = new_entry[group_id] if QUERIES[data_source][data_type].has_key('joins'): for join_info in QUERIES[data_source][data_type]['joins']: values = [new_entry[identifier] for identifier in join_info['on']] underlying_query = join_info['query'] for value in values: underlying_query = underlying_query%value new_entry[join_info['name']] = [] under_results = dbutils.query_to_dicts(underlying_query, data_source) for under_result in under_results: LOGGER.info('\tAdding underlying [' + join_info['name'] + ', ' + group_id + '=' + str(new_entry[group_id]) + "]") # Clean the data under_entry = convert_to_mongo(under_result) new_entry[join_info['name']].append(under_entry) try: database[data_type].save(new_entry) except DuplicateKeyError: LOGGER.error("The following entry already exists:") # CONVERTING TO FINALE if group_id==QUERIES[data_source][data_type]['EXTERNAL'] and data_type=='securities': name_field = QUERIES[data_source][data_type]['name'] short_name_field = QUERIES[data_source][data_type]['short_name'] bloomberg_ticker = result[QUERIES[data_source][data_type]['BLOOMBERG']] isin_code = result[QUERIES[data_source][data_type]['ISIN']] currency = result[QUERIES[data_source][data_type]['currency']] currency = Attributes.objects.filter(type='currency', short_name=currency) if currency.exists(): currency = currency[0] else: currency = None security = SecurityContainer.objects.filter(aliases__alias_type__short_name=data_source.upper(), aliases__alias_value=result[group_id]) external_append = False additional = '' if not security.exists(): security = SecurityContainer.objects.filter(aliases__alias_type__id=bloomberg_alias.id, aliases__alias_value=bloomberg_ticker) additional = result[name_field] external_append = True if is_fund(data_source, result): container = fund_container_type stype = fund_type elif is_bond(data_source, result): container = bond_container_type stype = bond_type elif is_stock(data_source, result): container = stock_container_type stype = stock_type else: container = sec_container_type stype = security_type if not security.exists(): LOGGER.info("Creating security with " + data_source + " id [" + str(result[group_id]) + "]") security = SecurityContainer() security.name = result[name_field] security.short_name = result[short_name_field] if result[short_name_field]!=None and result[short_name_field]!='' else result[name_field] security.save() security.associated_companies.add(bloomberg_provider) security.save() else: LOGGER.info("Security with " + data_source + " id [" + str(result[group_id]) + "] already exists.") security = security[0] if not security.associated_companies.filter(role__identifier='SCR_DP').exists(): security.associated_companies.add(bloomberg_provider) security.save() security.update_alias(external_alias, result[group_id], additional, external_append) security.currency = currency security.frequency = daily security.type = container security.security_type = stype security.save() if bloomberg_ticker!=None and bloomberg_ticker!='': all_tickers.append(bloomberg_ticker) elif isin_code!=None and isin_code!='': all_tickers.append(isin_code) if bloomberg_ticker!=None and bloomberg_ticker!='': security.update_alias(bloomberg_alias, bloomberg_ticker) if isin_code!=None and isin_code!='': security.update_alias(isin_alias, isin_code) universe.members.add(security) if data_type=='securities': universe.save() all_containers = {} for member in universe.members.all(): member = get_effective_instance(member) if not all_containers.has_key(member.type.identifier): all_containers[member.type.identifier] = [] try: all_containers[member.type.identifier].append(member.aliases.get(alias_type__name='BLOOMBERG').alias_value) except: try: all_containers[member.type.identifier].append(member.aliases.get(alias_type__name='ISIN').alias_value) except: LOGGER.info("There is no Bloomberg nor ISIN code available for this security [" + member.name + "]") for key in all_containers.keys(): fields = BloombergTrackContainerMapping.objects.filter(Q(container__identifier='CONT_SECURITY') | Q(container__identifier=key), Q(active=True)).values_list('short_name__code', flat=True) all_containers[key] = [to_bloomberg_code(ticker,True) for ticker in all_containers[key]] history_key = uuid.uuid4().get_hex() bb_thread = threading.Thread(None, bloomberg_history_query, history_key, (history_key, all_containers[key], fields, True)) bb_thread.start() bb_thread.join()