def export_result_table(request, result_id, api_key):
    result = Result.objects.get(id=result_id)
    result_table_name = str(Result.objects.get(id=result_id).db_entity_interest.db_entity.full_table_name)
    result_query = 'select * from {0}'.format(result_table_name)
    logger.debug("Exporting results for db entity key: %s" % (result.db_entity_key))
    logger.debug("Exporting results using query: %s" % (result_query))
    job = start_and_track_task(_export_query_results, api_key, class_name_for_client(Result), result.id, result.db_entity_key, result_query)
    return HttpResponse(job.hashid)
Пример #2
0
def export_result_table(request, result_id, api_key):
    result = Result.objects.get(id=result_id)
    result_table_name = str(Result.objects.get(id=result_id).db_entity_interest.db_entity.full_table_name)
    result_query = 'select * from {0}'.format(result_table_name)
    logger.debug("Exporting results for db entity key: %s" % (result.db_entity_key))
    logger.debug("Exporting results using query: %s" % (result_query))
    job = start_and_track_task(_export_query_results, api_key, class_name_for_client(Result), result.id, result.db_entity_key, result_query)
    return HttpResponse(job.hashid)
def export_query_results(request, layer_selection_unique_id, api_key):
    layer_selection = LayerSelection.from_unique_id(layer_selection_unique_id)
    query = layer_selection.query_sql
    logger.debug("Exporting query results for layer_selection: %s, query: %s" %
                 (layer_selection.unique_id, query))
    job = start_and_track_task(_export_query_results, api_key,
                               class_name_for_client(LayerSelection),
                               layer_selection.unique_id,
                               layer_selection.layer.db_entity_key, query)
    return HttpResponse(job.hashid)
Пример #4
0
def export_query_summary(request, layer_selection_unique_id, api_key):
    layer_selection = LayerSelection.from_unique_id(layer_selection_unique_id)
    query = layer_selection.summary_query_sql
    logger.debug("Exporting summary query results for layer_selection: %s, summary query: %s" % (layer_selection.unique_id, query))
    job = start_and_track_task(_export_query_results, api_key, class_name_for_client(LayerSelection), layer_selection.unique_id, '%s_Summary' % layer_selection.layer.db_entity_key, query)
    return HttpResponse(job.hashid)
Пример #5
0
def export_layer(request, layer_id, api_key):
    job = start_and_track_task(_export_layer, api_key, layer_id)
    return HttpResponse(job.hashid)
Пример #6
0
def post_save_publishing(signal_path, config_entity, user, **kwargs):
    """
        The initial entry point and recursive entry point for all post save publishing methods
        :signal_path - the full module path of the signal that called this
        :param kwargs:
            signal_proportion_lookup - A dictionary of signal names to the proportion complete of the overall post save.
            The signal matching signal_path will be sought in the dictionary
            config_entity - The scope of whatever being post-saved, whether a config_entity or something within it
            dependent_signal_paths - Full module signal paths called in sequentially by this publisher
            crud_type - CrudKey.CREATE|CLONE|UPDATE|SYNC|DELETE
            instance_class - Optional. Overrides the class of the instance for use in communicating with the client.
            This is used when the client only cares about a base class, such as Feature or to for DbEntityInterest
            to be a DbEntity
            client_instance_path - Optional. Property path to resolve the instance to another instance for the client.
             (this is only used to convert DbEntityInterest to DbEntity)
    """
    api_key = ApiKey.objects.get(user=user).key

    # Gather instance ids, class, and optional instance keys
    bundle = InstanceBundle(**merge(kwargs, dict(user_id=user.id)))

    # Pass the arguments to the task and run via celery. Note that kwargs is being treated
    # as a dict here and passed along
    logger.info("Django post save: %s" % unicode(bundle))

    # Send the start event to the client if we aren't recursing.
    if not kwargs.get('recurse', False):
        event = 'postSavePublisherStarted'
        logger.info("Sending start message %s to user %s with %s" % (event, user.username, unicode(bundle)))

        send_message_to_client(
            user.id,
            dict(
                event=event,
                config_entity_id=config_entity and config_entity.id,
                config_entity_class_name=config_entity and config_entity.__class__.__name__,
                class_name=bundle.class_name_for_client,
                # Always send 0 for initial
                proportion=0,
                ids=bundle.client_instance_ids,
                keys=bundle.keys,
                class_key=bundle.class_key
            )
        )

    # Start Celery
    logger.info("Starting post save publishing with signal path %s" % signal_path)
    job = start_and_track_task(_post_save_publishing,
                               api_key,
                               config_entity,
                               user,
                               **merge(
                                     remove_keys(kwargs, ['instance']),
                                     dict(
                                         # If we are recursing (already in a celery worker, don't start a new celery task
                                         # When we get dependency order figured out, we can do this, but there's probably
                                         # a better way via the Task object or something
                                         current_job=kwargs.get('job', None),
                                         signal_path=signal_path,
                                         crud_type=kwargs.get('crud_type'),
                                         bundle=bundle
                               )))

    return HttpResponse(job.hashid)
def export_layer(request, layer_id, api_key):
    job = start_and_track_task(_export_layer, api_key, layer_id)
    return HttpResponse(job.hashid)
Пример #8
0
def post_save_publishing(signal_path, config_entity, user, **kwargs):
    """
        The initial entry point and recursive entry point for all post save publishing methods
        :signal_path - the full module path of the signal that called this
        :param kwargs:
            signal_proportion_lookup - A dictionary of signal names to the proportion complete of the overall post save.
            The signal matching signal_path will be sought in the dictionary
            config_entity - The scope of whatever being post-saved, whether a config_entity or something within it
            dependent_signal_paths - Full module signal paths called in sequentially by this publisher
            crud_type - CrudKey.CREATE|CLONE|UPDATE|SYNC|DELETE
            instance_class - Optional. Overrides the class of the instance for use in communicating with the client.
            This is used when the client only cares about a base class, such as Feature or to for DbEntityInterest
            to be a DbEntity
            client_instance_path - Optional. Property path to resolve the instance to another instance for the client.
             (this is only used to convert DbEntityInterest to DbEntity)
    """
    api_key = ApiKey.objects.get(user=user).key

    # Gather instance ids, class, and optional instance keys
    bundle = InstanceBundle(**merge(kwargs, dict(user_id=user.id)))

    # Pass the arguments to the task and run via celery. Note that kwargs is being treated
    # as a dict here and passed along
    logger.info("Django post save: %s" % unicode(bundle))

    # Send the start event to the client if we aren't recursing.
    if not kwargs.get('recurse', False):
        event = 'postSavePublisherStarted'
        logger.info("Sending start message %s to user %s with %s" %
                    (event, user.username, unicode(bundle)))

        send_message_to_client(
            user.id,
            dict(
                event=event,
                config_entity_id=config_entity and config_entity.id,
                config_entity_class_name=config_entity
                and config_entity.__class__.__name__,
                class_name=bundle.class_name_for_client,
                # Always send 0 for initial
                proportion=0,
                ids=bundle.client_instance_ids,
                keys=bundle.keys,
                class_key=bundle.class_key))

    # Start Celery
    logger.info("Starting post save publishing with signal path %s" %
                signal_path)
    job = start_and_track_task(
        _post_save_publishing,
        api_key,
        config_entity,
        user,
        **merge(
            remove_keys(kwargs, ['instance']),
            dict(
                # If we are recursing (already in a celery worker, don't start a new celery task
                # When we get dependency order figured out, we can do this, but there's probably
                # a better way via the Task object or something
                current_job=kwargs.get('job', None),
                signal_path=signal_path,
                crud_type=kwargs.get('crud_type'),
                bundle=bundle)))

    return HttpResponse(job.hashid)