def get_communities(dsname):
    """Use datasource get_communities() facility."""
    c.log( 'Starting' )

    # Build the datasource configuration
    dsconfig = ds.build_conf( dsname, DATASOURCES_CONF )

    # Get the cache directory
    config = ConfigParser.RawConfigParser()
    config.read(GENERAL_CONF)
    dsconfig['cache_dir'] = config.get('paths','cache_dir')

    communities = ds.get_communities( dsconfig )
    fields = ds.fields('communities')
    for community in communities:
        id = community[fields['id']]
        title = (community[fields['title']], '(no title)')\
                [community[fields['title']] is None]
        start_date = (community[fields['start_date']], '(no start date)')\
                [community[fields['start_date']] is None]
        end_date = (community[fields['end_date']], '(no end date)')\
                [community[fields['end_date']] is None]
        print '{0}\t{1}\t{2}\t{3}'.format( id, title.encode('UTF-8'),
                                           start_date, end_date )
    c.log( 'Finished' )
def compute_feature( parms ):
    """Do the actual feature computation."""
    c.log( 'Starting' )

    # Try to load the requested feature
    try:
        feature = imp_module( 'features.'+parms['feature_name'] )
    except ImportError:
        c.log( "The feature '{0}' could not be loaded." \
               .format(parms['feature_name']) )
        return False

    # Try to load the requested aggregation
    try:
        aggregation = imp_module( 'aggregations.' +
                                  parms['aggregation_name'] )
    except ImportError:
        c.log( "The aggregation '{0}' could not be loaded." \
               .format(parms['aggregation_name']) )
        return False

    # Try to load the requested consolidation
    try:
        consolidation = imp_module( 'consolidations.' +
                                     parms['consolidation_name'] )
    except ImportError:
        c.log( "The consolidation '{0}' could not be loaded." \
               .format(parms['consolidation_name']) )
        return False

    # Build the datasource configuration
    dsconfig = ds.build_conf( parms['data_source'],
                             DATASOURCES_CONF )

    # Get the cache directory
    config = ConfigParser.RawConfigParser()
    config.read(GENERAL_CONF)
    dsconfig['cache_dir'] = config.get('paths','cache_dir')

    # Compute the requested feature
    c.log("Computing '{0}' in {1}, from {2} to {3} ..." \
          .format( parms['feature_name'],
          ('community id {0}'.format(parms['community_id']),
           'all communities')[parms['community_id'] is None],
          parms['start_date'], parms['end_date'] ))
    start_time = t.time()
    values = feature.compute( dsconfig,
                             parms['community_id'],
                             parms['start_date'], parms['end_date'])
    c.log('Time elapsed: {0}s'.format(t.time() - start_time) )

    # Compute aggregation/consolidation
    values = consolidation.compute( aggregation.compute(values) )

    # Output values
    print values
    c.log('Finished')
    def soap_getCommunities(self, ps, **kw):
        request, response = RobustCommunityAnalysis \
                            .soap_getCommunities(self, ps, **kw)

        # Get arguments
        accessKey = request.get_element_accessKey()
        if not checkKey(accessKey):
            raise Exception("Your accessKey is not valid.")
        dataSourceId = request.get_element_dataSourceId()
        c.log( 'Starting for dataSourceId {}'.format(dataSourceId) )

        # Build the datasource configuration
        dsconfig = ds.build_conf( DATASOURCES[dataSourceId]['name'],
                                  DATASOURCES_CONF )

        # Get the cache directory
        config = ConfigParser.RawConfigParser()
        config.read(GENERAL_CONF)
        dsconfig['cache_dir'] = config.get('paths','cache_dir')

        # Compute the available communities
        COMMUNITIES = ds.get_communities(dsconfig)
        response = getCommunitiesResponse()
        communities = []
        for id, title, startDate, endDate in COMMUNITIES:
            if startDate is None or endDate is None:
                continue
            community = response.new_communities().new_community()
            community.set_element_id(id)
            community.set_element_title(title)
            community.set_element_startDate( startDate.timetuple() )
            community.set_element_endDate( endDate.timetuple() )
            communities += [ community ]
        _communities = response.new_communities()
        _communities.set_element_community( communities )
        response.set_element_communities( _communities )

        # Return response
        c.log( 'Finished' )
        return request, response
def getIndicatorScore( indicatorId, dataSourceId, communityId,
                       startDate, endDate ):
    """Computes the indicator health score."""
    c.log( 'Starting' )

    # Build the datasource configuration
    dsconfig = ds.build_conf( DATASOURCES[dataSourceId]['name'],
                              DATASOURCES_CONF )

    # Get the cache directory
    config = ConfigParser.RawConfigParser()
    config.read(GENERAL_CONF)
    dsconfig['cache_dir'] = config.get('paths','cache_dir')

    # Prepare score parameters
    feature = INDICATORS[indicatorId]['feature']
    aggregation = INDICATORS[indicatorId]['aggregation']
    consolidation = INDICATORS[indicatorId]['consolidation'][dataSourceId]
    score = consolidation.compute(aggregation.compute(feature.compute(
                dsconfig, communityId, startDate, endDate
            )))
    c.log( 'Finished' )
    return score
import datasources as ds

GENERAL_CONF = "config/general.conf"
DATASOURCES_CONF = "config/datasources.conf"

# Get the feature name to test
if len(sys.argv) != 3:
    print 'Usage: %s [datasource_name] [feat_name]' % sys.argv[0]
    sys.exit(1)

ds_name = sys.argv[1]
feat_name = sys.argv[2]

# Build the datasource configuration
dsconfig = ds.build_conf( ds_name, DATASOURCES_CONF )

# Get the cache directory
config = ConfigParser.RawConfigParser()
config.read(GENERAL_CONF)
dsconfig['cache_dir'] = config.get('paths','cache_dir')

# Get a community for testing
communities = ds.get_communities( dsconfig )
communityId = random.choice(communities)[0]

# Get some dates
t1 = dt.date( 2006, 1, 1 )
t2 = dt.date( 2006, 1, 31 )

# Compute the feature