def import_external_data(data_source, data_type): LOGGER.info('Loading working data') query = QUERIES[data_source][data_type]['query'] group_by = QUERIES[data_source][data_type]['group_by'] external_alias = Attributes.objects.get(identifier='ALIAS_' + data_source.upper()) if data_type=='securities': bloomberg_alias = Attributes.objects.get(identifier='ALIAS_BLOOMBERG') isin_alias = Attributes.objects.get(identifier='ALIAS_ISIN') sec_container_type = Attributes.objects.get(identifier='CONT_SECURITY') fund_container_type = Attributes.objects.get(identifier='CONT_FUND') bond_container_type = Attributes.objects.get(identifier='CONT_BOND') stock_container_type = Attributes.objects.get(identifier='CONT_SECURITY') security_type = Attributes.objects.get(identifier='SECTYP_SECURITY') fund_type = Attributes.objects.get(identifier='SECTYP_FUND') bond_type = Attributes.objects.get(identifier='SECTYP_BOND') stock_type = Attributes.objects.get(identifier='SECTYP_STOCK') daily = Attributes.objects.get(identifier='FREQ_DAILY', active=True) bloomberg_provider = get_bloomberg_provider() universe = get_universe_from_datasource(data_source) LOGGER.info('Cleaning already imported aliases for ' + data_source) securities = SecurityContainer.objects.filter(aliases__alias_type__short_name=data_source.upper()).distinct() for security in securities: for alias in security.aliases.all(): if alias.alias_type.short_name==data_source.upper(): security.aliases.remove(alias) security.save() LOGGER.info('\tCleaning done') LOGGER.info('Importing ' + str(data_type) + ' from ' + str(data_source)) LOGGER.info('Using query:' + str(query)) results = dbutils.query_to_dicts(query, data_source) database = getattr(client, data_source) database[data_type].drop() all_tickers = [] for result in results: # Clean the data new_entry = convert_to_mongo(result) # LOGGER.info('Removing entries with name ' + new_entry[QUERIES[data_source][data_type]['name']]) # SecurityContainer.objects.filter(name=new_entry[QUERIES[data_source][data_type]['name']]).delete() for group_id in group_by: if new_entry[group_id]!=None and new_entry[group_id]!='': # LOADING INTO MONGO LOGGER.info('Adding entry [' + group_id + '=' + str(new_entry[group_id]) + "]") new_entry['_id'] = new_entry[group_id] if QUERIES[data_source][data_type].has_key('joins'): for join_info in QUERIES[data_source][data_type]['joins']: values = [new_entry[identifier] for identifier in join_info['on']] underlying_query = join_info['query'] for value in values: underlying_query = underlying_query%value new_entry[join_info['name']] = [] under_results = dbutils.query_to_dicts(underlying_query, data_source) for under_result in under_results: LOGGER.info('\tAdding underlying [' + join_info['name'] + ', ' + group_id + '=' + str(new_entry[group_id]) + "]") # Clean the data under_entry = convert_to_mongo(under_result) new_entry[join_info['name']].append(under_entry) try: database[data_type].save(new_entry) except DuplicateKeyError: LOGGER.error("The following entry already exists:") # CONVERTING TO FINALE if group_id==QUERIES[data_source][data_type]['EXTERNAL'] and data_type=='securities': name_field = QUERIES[data_source][data_type]['name'] short_name_field = QUERIES[data_source][data_type]['short_name'] bloomberg_ticker = result[QUERIES[data_source][data_type]['BLOOMBERG']] isin_code = result[QUERIES[data_source][data_type]['ISIN']] currency = result[QUERIES[data_source][data_type]['currency']] currency = Attributes.objects.filter(type='currency', short_name=currency) if currency.exists(): currency = currency[0] else: currency = None security = SecurityContainer.objects.filter(aliases__alias_type__short_name=data_source.upper(), aliases__alias_value=result[group_id]) external_append = False additional = '' if not security.exists(): security = SecurityContainer.objects.filter(aliases__alias_type__id=bloomberg_alias.id, aliases__alias_value=bloomberg_ticker) additional = result[name_field] external_append = True if is_fund(data_source, result): container = fund_container_type stype = fund_type elif is_bond(data_source, result): container = bond_container_type stype = bond_type elif is_stock(data_source, result): container = stock_container_type stype = stock_type else: container = sec_container_type stype = security_type if not security.exists(): LOGGER.info("Creating security with " + data_source + " id [" + str(result[group_id]) + "]") security = SecurityContainer() security.name = result[name_field] security.short_name = result[short_name_field] if result[short_name_field]!=None and result[short_name_field]!='' else result[name_field] security.save() security.associated_companies.add(bloomberg_provider) security.save() else: LOGGER.info("Security with " + data_source + " id [" + str(result[group_id]) + "] already exists.") security = security[0] if not security.associated_companies.filter(role__identifier='SCR_DP').exists(): security.associated_companies.add(bloomberg_provider) security.save() security.update_alias(external_alias, result[group_id], additional, external_append) security.currency = currency security.frequency = daily security.type = container security.security_type = stype security.save() if bloomberg_ticker!=None and bloomberg_ticker!='': all_tickers.append(bloomberg_ticker) elif isin_code!=None and isin_code!='': all_tickers.append(isin_code) if bloomberg_ticker!=None and bloomberg_ticker!='': security.update_alias(bloomberg_alias, bloomberg_ticker) if isin_code!=None and isin_code!='': security.update_alias(isin_alias, isin_code) universe.members.add(security) if data_type=='securities': universe.save() all_containers = {} for member in universe.members.all(): member = get_effective_instance(member) if not all_containers.has_key(member.type.identifier): all_containers[member.type.identifier] = [] try: all_containers[member.type.identifier].append(member.aliases.get(alias_type__name='BLOOMBERG').alias_value) except: try: all_containers[member.type.identifier].append(member.aliases.get(alias_type__name='ISIN').alias_value) except: LOGGER.info("There is no Bloomberg nor ISIN code available for this security [" + member.name + "]") for key in all_containers.keys(): fields = BloombergTrackContainerMapping.objects.filter(Q(container__identifier='CONT_SECURITY') | Q(container__identifier=key), Q(active=True)).values_list('short_name__code', flat=True) all_containers[key] = [to_bloomberg_code(ticker,True) for ticker in all_containers[key]] history_key = uuid.uuid4().get_hex() bb_thread = threading.Thread(None, bloomberg_history_query, history_key, (history_key, all_containers[key], fields, True)) bb_thread.start() bb_thread.join()
def populate_security_from_bloomberg_protobuf(data): bloomberg_alias = Attributes.objects.get(identifier='ALIAS_BLOOMBERG') daily = Attributes.objects.get(identifier='FREQ_DAILY', active=True) bloomberg_provider = get_bloomberg_provider() securities = {} with_errors = [] for row in data.rows: if row.errorCode==0: if row.field=='SECURITY_TYP': try: LOGGER.debug('Entity identified by [' + row.ticker + ',' + row.valueString + '] will be created') sec_type_name = Attributes.objects.get(type='bloomberg_security_type', name=row.valueString).short_name cont_type_name = Attributes.objects.get(type='bloomberg_container_type', name=row.valueString).short_name container_type = Attributes.objects.get(identifier=cont_type_name) security_type = Attributes.objects.get(type='security_type', identifier=sec_type_name) if not securities.has_key(row.ticker): LOGGER.info('Creating new security for ticker ' + str(row.ticker)) securities[row.ticker] = SecurityContainer.create() securities[row.ticker].type = container_type securities[row.ticker].security_type = security_type except: traceback.print_exc() LOGGER.warn('Entity identified by [' + row.ticker + ',' + row.valueString + ',' + sec_type_name + '] will be treated as a simple security') securities[row.ticker] = SecurityContainer.create() else: with_errors.append(row.ticker) for row in data.rows: if row.errorCode==0: field_info = BloombergDataContainerMapping.objects.filter(Q(short_name__code=row.field), Q(container__short_name=container_type.short_name) | Q(container__short_name='Security') , Q(active=True)) if field_info.exists(): field_info = BloombergDataContainerMapping.objects.get(short_name__code=row.field, active=True) set_security_information(securities[row.ticker], field_info.name , row.valueString, 'bloomberg') securities[row.ticker].save() if field_info.model_link!=None and field_info.model_link!='': info = Attributes() info.name = row.field info.short_name = field_info.model_link securities[row.ticker].set_attribute('bloomberg', info, row.valueString) else: LOGGER.debug("Cannot find matching field for " + row.field) for security in securities.values(): security.finalize() #[security.finalize() for security in securities.values()] [security.associated_companies.add(bloomberg_provider) for security in securities.values() if len(security.associated_companies.all())==0] [setattr(security,'frequency',daily) for security in securities.values()] [security.save() for security in securities.values()] final_tickers = [] for ticker in securities: securities[ticker].status = Attributes.objects.get(identifier='STATUS_ACTIVE') ticker_value = securities[ticker].aliases.filter(alias_type__name='BLOOMBERG') if ticker_value.exists() and securities[ticker].market_sector!=None: LOGGER.info("Using Bloomberg information for ticker and exchange") ticker_value = ticker_value[0] if not ticker_value.alias_value.endswith(securities[ticker].market_sector): new_full_ticker = ticker_value.alias_value + ' ' + securities[ticker].market_sector ticker_value.alias_value = new_full_ticker final_tickers.append(new_full_ticker) ticker_value.save() else: LOGGER.info("Using user information for ticker and exchange") ticker_value = Alias() ticker_value.alias_type = bloomberg_alias ticker_value.alias_value = ticker ticker_value.save() final_tickers.append(ticker) securities[ticker].aliases.add(ticker_value) [security.save() for security in securities.values()] for security in securities.values(): if security.type!=None: if security.type.identifier=='CONT_BOND': data = get_security_provider_information(security, 'bloomberg') if data.has_key('coupon_rate') and data.has_key('maturity_date'): security.name = data['short_name'] + ' ' + data['coupon_rate'] + '% ' + data['maturity_date'] security.save() else: LOGGER.error(u"The following security has incomplete data [" + unicode(security.name) + u"," + unicode(security.id) + u"]") else: LOGGER.error(u"The following security is wrongly categorized [" + unicode(security.name) + u"," + unicode(security.id) + u"]") return securities, final_tickers, with_errors