def __get_output_configuration( config: dict, attribute: str, option: str, context: BaseContext = Provide[ApplicationContainer.context_factory]): if attribute in config: return config[attribute] config = context.get_configuration(option) return '' if config is None else config
def index(metadata_context: BaseContext = Provide[ApplicationContainer.context_factory]): if request.method == constants.HTTP_GET: logging.info("Fetching archive metadata from sDAS database") with metadata_context.get_session() as session: archives = session.query(Archive).all() return jsonify([json(archive) for archive in archives]) if len(request.files) > 0: # Handling file upload in IPFS logging.info("Handling archive file upload request") file: FileStorage = request.files['dataset'] connection_string = metadata_context.get_configuration( 'archiveIpfsConnectionString') if connection_string is None: logging.error( "No connection string was found, unable to upload to IPFS!") abort(500) symbols = request.form['company_symbols'] logging.info("Uploading archive") archive = upload_archive(file.stream.read(), connection_string.value, metadata_context, company_symbols=symbols) else: # Support registering existing dataset metadata within sDAS logging.info("Registering archive in sDAS metadata database") archive = Archive.from_meta(request.get_json()) with metadata_context.get_session() as session: archive_count = session.query(Archive).filter( Archive.address == archive.address).count() if archive_count > 0: message = f'Found possible dataset duplicate at address "{archive.address}".' logging.warning(message) response = make_response(message, 400) return response session.add(archive) return jsonify(json(archive))
def acquire(company_symbol, metadata_context: BaseContext = Provide[ ApplicationContainer.context_factory]): """ Provides API function for dataset generation. Parameters ---------- company_symbol: str Symbol for the company to acquire data for. Returns ------- flask.Response: HTTP response with dataset as a JSON payload or delimited file. This is determined from whether a query parameter is provided that specifies a file output formatter. Raises ------ sqlalchemy.exc.OperationalError: Thrown if there is an issue communicating with the metadata database. """ should_handle_events = metadata_context.get_feature_toggle( FeatureToggles.event_handlers) if should_handle_events: logging.info("Signalling pre-acquisition event handlers") SignalFactory.pre_acquisition.send( company_symbol=company_symbol, start_date=datetime.now().isoformat()) results = dict() api_key = metadata_context.get_configuration('apiKey') logging.debug("Creating IEX client with API Key: %s", api_key) client = IexClient(api_key) try: if should_handle_events: logging.info("Signalling pre-company event handlers") SignalFactory.pre_company.send( company_symbol=company_symbol, start_date=datetime.now().isoformat()) with metadata_context.get_session() as session: entity = session.query(Entity).filter( Entity.identifier == company_symbol).one() for feature in entity.features: if should_handle_events: logging.info("Signalling pre-feature event handlers") SignalFactory.pre_feature.send( company_symbol=company_symbol, feature_name=feature.name, start_date=datetime.now().isoformat()) feature_option = session.query(Option).filter( Option.company_symbol == company_symbol, Option.feature_name == feature.name).all() option = [json(option) for option in feature_option] logging.info('Retrieved mapped options: [%s]', (" ").join( [json(option, True) for option in feature_option])) # TODO: Determine if this could/should be moved into source-aware code if feature.handler_metadata.name == "tech_indicators_handler" and not option: logging.info( 'Adding missing option on technical indicator') option.append({ "feature_name": feature.name, "name": "range", "value": "1m" }) logging.info('Acquiring feature data') data = client.get_feature_data(feature, entity, option) if isinstance(data, list): results[feature.name] = feature.get_values(data) elif isinstance(data, dict): results[feature.name] = [feature.get_value(data)] logging.info("Acquired %d rows", len(results[feature.name])) if should_handle_events: logging.info("Signalling post-feature event handlers") SignalFactory.post_feature.send( company_symbol=company_symbol, feature_name=feature.name, feature_rows=len(results[feature.name]), end_date=datetime.now().isoformat()) results = __format_output(results, 'json') if should_handle_events: logging.info("Signalling post-company event handlers") count = reduce(lambda total, iter: total + len(iter), results['values'], 0) SignalFactory.post_company.send( company_symbol=company_symbol, data=results, total_rows=count, end_date=datetime.now().isoformat()) if 'format' in request.args: try: format_result = __format_output(results, request.args['format']) except Exception as exc: response = make_response(str(exc), 400) return response if request.args['format'].lower() == 'file': if should_handle_events: logging.info("Signalling post-acquisition event handlers") SignalFactory.post_acquisition.send( company_symbol=company_symbol, end_date=datetime.now().isoformat(), message='Completed data acquisition!', uri=request.path, type='INFO') return send_file(format_result, as_attachment=True, cache_timeout=0) if should_handle_events: logging.info("Signalling post-acquisition event handlers") SignalFactory.post_acquisition.send( company_symbol=company_symbol, end_date=datetime.now().isoformat(), message='Completed data acquisition!', uri=request.path, type='INFO') return jsonify(results) except NoResultFound: response = make_response('Cannot find company', 404) return response