def check_dataset(dataset): log.debug(u'Checking dataset "{}".'.format(dataset['name'])) error_verified_dataset, errors = cow_json_to_error_verified_dataset(dataset, state = default_state) if errors is None: errors = {} warning_verified_dataset, warnings = cow_json_to_warning_verified_dataset(error_verified_dataset, state = default_state) if warnings is None or 'frequency' not in warnings: if warning_verified_dataset[u'frequency'] != u'temps réel': warning_verified_dataset[u'temporal_coverage_from'], error = not_none( warning_verified_dataset[u'temporal_coverage_from'], state = default_state) if error is not None: if warnings is None: warnings = {} warnings[u'temporal_coverage_from'] = error alerts = {} if errors: alerts['error'] = json.loads(json.dumps(errors)) # Convert numeric keys to strings. if warnings: alerts['warning'] = json.loads(json.dumps(warnings)) # Convert numeric keys to strings. if alerts != dict( (level, level_alerts[app_name]['error']) for level, level_alerts in (dataset.get('alerts') or {}).iteritems() if level_alerts.get(app_name) ): log.info(u'Updating dataset "{}" alerts.'.format(dataset['name'])) request_headers = headers.copy() request_headers['Content-Type'] = 'application/json' request = urllib2.Request(urlparse.urljoin(conf['ckan_of_worms.site_url'], 'api/1/datasets/{}/alert'.format(dataset['id'])), headers = request_headers) request_data = dict( api_key = conf['ckan_of_worms.api_key'], author = app_name, draft_id = dataset['draft_id'], ) request_data.update(alerts) try: response = urllib2.urlopen(request, json.dumps(request_data)) except urllib2.HTTPError as response: if response.code == 409: # The dataset has been modified. Don't submit alerts because we will be notified of the new dataset #version. log.info(u'Dataset "{}" has been modified. Alerts are ignored.'.format(dataset['name'])) return log.error(u'An error occured while setting dataset "{}" alerts: {}'.format(dataset['name'], alerts)) response_text = response.read() try: response_dict = json.loads(response_text) except ValueError: log.error(response_text) raise for key, value in response_dict.iteritems(): print '{} = {}'.format(key, value) raise else: assert response.code == 200 check(cow_response_to_value)(response.read(), state = default_state)
def load_configuration(global_conf, app_conf): """Build the application configuration dict.""" app_dir = os.path.dirname(os.path.abspath(__file__)) conf = {} conf.update(strings.deep_decode(global_conf)) conf.update(strings.deep_decode(app_conf)) conf.update(check(struct( { 'app_conf': default(app_conf), 'app_dir': default(app_dir), 'app_name': default('Harmony Jobs'), 'cache_dir': default(os.path.join(os.path.dirname(app_dir), 'cache')), 'cdn.url': default('http://localhost:7000'), 'custom_templates_dir': default(None), 'database.host_name': default('localhost'), 'database.name': default('harmony'), 'database.port': pipe(input_to_int, default(27017)), 'debug': pipe(guess_bool, default(False)), 'global_conf': default(global_conf), 'log_level': pipe( default('WARNING'), function(lambda log_level: getattr(logging, log_level.upper())), ), 'package_name': default('harmony-jobs'), 'static_files': pipe(guess_bool, default(False)), 'static_files_dir': default(os.path.join(app_dir, 'static')), 'webrokeit.database.collections.states': default('states'), 'webrokeit.database.collections.subscriptions': default('subscriptions'), 'webrokeit.database.collections.tasks': default('tasks'), 'webrokeit.database.host_name': default('localhost'), 'webrokeit.database.name': default('webrokeit'), 'webrokeit.database.port': pipe(input_to_int, default(27017)), 'webrokeit.url.base': default('http;//localhost:8765'), 'webrokeit.url.emit': default('http://localhost:8765/emit'), }, default='drop', drop_none_values=False, ))(conf)) # Assets conf.update(check(struct( { 'cdn.bootstrap.css': default( urlparse.urljoin(conf['cdn.url'], '/bootstrap/2.2.2/css/bootstrap.min.css') ), 'cdn.bootstrap.js': default(urlparse.urljoin(conf['cdn.url'], '/bootstrap/2.2.2/js/bootstrap.js')), 'cdn.html5shiv.js': default(urlparse.urljoin(conf['cdn.url'], '/html5shiv/html5shiv.js')), 'cdn.jquery.js': default(urlparse.urljoin(conf['cdn.url'], '/jquery/jquery-1.9.1.min.js')), 'cdn.plupload.js': default(urlparse.urljoin(conf['cdn.url'], '/plupload/plupload.full.js')), 'cdn.underscore.js': default(urlparse.urljoin(conf['cdn.url'], '/underscore/underscore.js')), }, default=noop, ))(conf)) return conf
def load_configuration(global_conf, app_conf): """Build the application configuration dict.""" app_dir = os.path.dirname(os.path.abspath(__file__)) conf = {} conf.update(strings.deep_decode(global_conf)) conf.update(strings.deep_decode(app_conf)) conf.update(check(struct( { 'app_conf': default(app_conf), 'app_dir': default(app_dir), 'app_name': default('Harmony Viewer'), 'cache_dir': default(os.path.join(os.path.dirname(app_dir), 'cache')), 'cdn.url': pipe(cleanup_line, not_none), 'custom_templates_dir': default(None), 'debug': pipe(guess_bool, default(False)), 'global_conf': default(global_conf), 'log_level': pipe( default('WARNING'), function(lambda log_level: getattr(logging, log_level.upper())), ), 'package_name': default('harmony-viewer'), 'projects_base_dir': pipe(cleanup_line, not_none), 'static_files': pipe(guess_bool, default(False)), 'static_files_dir': default(os.path.join(app_dir, 'static')), 'tile_server_url_scheme': pipe(cleanup_line, not_none), }, default='drop', drop_none_values=False, ))(conf)) # Assets conf.update(check(struct( { 'cdn.bootstrap.css': default( urlparse.urljoin(conf['cdn.url'], '/bootstrap/2.2.2/css/bootstrap.min.css') ), 'cdn.bootstrap.js': default(urlparse.urljoin(conf['cdn.url'], '/bootstrap/2.2.2/js/bootstrap.js')), 'cdn.html5shiv.js': default(urlparse.urljoin(conf['cdn.url'], '/html5shiv/html5shiv.js')), 'cdn.jquery.js': default(urlparse.urljoin(conf['cdn.url'], '/jquery/jquery-1.9.1.min.js')), 'cdn.leaflet.css': default(urlparse.urljoin(conf['cdn.url'], '/leaflet/0.5.1/leaflet.css')), 'cdn.leaflet.ie.css': default(urlparse.urljoin(conf['cdn.url'], '/leaflet/0.5.1/leaflet.ie.css')), 'cdn.leaflet.js': default(urlparse.urljoin(conf['cdn.url'], '/leaflet/0.5.1/leaflet.js')), 'cdn.plupload.js': default(urlparse.urljoin(conf['cdn.url'], '/plupload/plupload.full.js')), 'cdn.underscore.js': default(urlparse.urljoin(conf['cdn.url'], '/underscore/underscore.js')), }, default=noop, ))(conf)) return conf
def load_configuration(global_conf, app_conf): """Build the application configuration dict.""" app_dir = os.path.dirname(os.path.abspath(__file__)) conf = {} conf.update(strings.deep_decode(global_conf)) conf.update(strings.deep_decode(app_conf)) conf.update(check(struct( { 'app_conf': default(app_conf), 'app_dir': default(app_dir), 'cache_dir': default(os.path.join(os.path.dirname(app_dir), 'cache')), 'database.collections.states': default('states'), 'database.collections.subscriptions': default('subscriptions'), 'database.collections.tasks': default('tasks'), 'database.host_name': default('localhost'), 'database.name': default('webrokeit'), 'database.port': pipe(input_to_int, default(27017)), 'debug': pipe(guess_bool, default(False)), 'global_conf': default(global_conf), 'log_level': pipe( default('WARNING'), function(lambda log_level: getattr(logging, log_level.upper())), ), 'package_name': default('webrokeit'), }, default='drop', drop_none_values=False, ))(conf)) return conf
def load_configuration(global_conf, app_conf): """Build the application configuration dict.""" app_dir = os.path.dirname(os.path.abspath(__file__)) conf = {} conf.update(strings.deep_decode(global_conf)) conf.update(strings.deep_decode(app_conf)) conf.update(check(struct( { 'app_conf': default(app_conf), 'app_dir': default(app_dir), 'app_name': default('Harmony Tiles'), 'debug': pipe(guess_bool, default(False)), 'global_conf': default(global_conf), 'log_level': pipe( default('WARNING'), function(lambda log_level: getattr(logging, log_level.upper())), ), 'package_name': default('harmony-tiles'), 'projects_base_dir': pipe( function(lambda value: os.path.abspath(value)), not_none, ), }, default='drop', drop_none_values=False, ))(conf)) return conf
def load_configuration(global_conf, app_conf): """Build the application configuration dict.""" app_dir = os.path.dirname(os.path.abspath(__file__)) conf = {} conf.update(strings.deep_decode(global_conf)) conf.update(strings.deep_decode(app_conf)) conf.update(check(struct( { 'app_conf': default(app_conf), 'app_dir': default(app_dir), 'cache_dir': default(os.path.join(os.path.dirname(app_dir), 'cache')), 'debug': pipe(guess_bool, default(False)), 'global_conf': default(global_conf), }, default='drop', drop_none_values=False, ))(conf)) return conf
def state(req): assert req.method == 'POST' data, errors = conv.params_to_state_data(req.params) if errors is not None: return wsgi_helpers.respond_json(req.ctx, {'errors': errors}, code=400) states_collection = req.ctx.db[req.ctx.conf['database.collections.states']] if data['action'] == 'save': state_document = { 'created_at': datetime.datetime.now(), 'data': data['data'], 'key': data['key'], } states_collection.save(state_document) spec = {'key': data['key']} state_document_list = [ check(conv.state_document_to_json(state_document)) for state_document in states_collection.find(spec).sort('created_at') ] return wsgi_helpers.respond_json(req.ctx, state_document_list)
def test_jsons(): for json_file_name in os.listdir(json_dir_path): with open(os.path.join(json_dir_path, json_file_name)) as json_file: content = json.load(json_file) scenario_json = content['scenario'] scenario = check(tax_benefit_system.Scenario.make_json_to_instance(tax_benefit_system = tax_benefit_system))( scenario_json) if 'year' in scenario_json: year = scenario_json['year'] else: date = datetime.datetime.strptime(scenario_json['date'], "%Y-%m-%d") year = date.year totpac = scenario.test_case['foyers_fiscaux'].values()[0].get('personnes_a_charge') for code, field in content['resultat_officiel'].iteritems(): yield check_variable, { 'code': code, 'field': field, 'json_file_name': json_file_name, 'scenario': scenario, 'totpac': totpac, 'year': year, }
def configure(self, config): hostname = socket.gethostname().split('.')[0] global fedmsg_config fedmsg_config = conv.check(conv.struct(dict( environment = conv.pipe( conv.empty_to_none, conv.test_in(['dev', 'prod', 'stg']), conv.default('dev'), ), modname = conv.pipe( conv.empty_to_none, conv.test(lambda value: value == value.strip('.'), error = 'Value must not begin or end with a "."'), conv.default('ckan'), ), name = conv.pipe( conv.empty_to_none, conv.default('ckan.{}'.format(hostname)), ), topic_prefix = conv.pipe( conv.empty_to_none, conv.test(lambda value: value == value.strip('.'), error = 'Value must not begin or end with a "."'), conv.not_none, ), )))(dict( (key[len('fedmsg.'):], value) for key, value in config.iteritems() if key.startswith('fedmsg.') )) #fedmsg.init(**fedmsg_config) fedmsg.init(active = True, name = 'relay_inbound', **dict( (key, value) for key, value in fedmsg_config.iteritems() if key != 'name' and value is not None ))
def to_bson(self): return check(object_to_clean_dict(self))
def main(): parser = argparse.ArgumentParser(description = __doc__) parser.add_argument('config', help = 'path of configuration file') parser.add_argument('-f', '--fedmsg', action = 'store_true', help = 'poll fedmsg events') parser.add_argument('-v', '--verbose', action = 'store_true', help = 'increase output verbosity') global args args = parser.parse_args() logging.basicConfig(level = logging.DEBUG if args.verbose else logging.WARNING, stream = sys.stdout) config_parser = ConfigParser.SafeConfigParser(dict( here = os.path.dirname(os.path.abspath(os.path.normpath(args.config))), )) config_parser.read(args.config) global conf conf = check(pipe( test_isinstance(dict), struct( { 'ckan_of_worms.api_key': pipe( cleanup_line, not_none, ), 'ckan_of_worms.site_url': pipe( make_input_to_url(error_if_fragment = True, error_if_path = True, error_if_query = True, full = True), not_none, ), 'user_agent': pipe( cleanup_line, not_none, ), }, default = 'drop', ), not_none, ))(dict(config_parser.items('CowBots-Check-Datasets')), default_state) global headers headers = { 'User-Agent': conf['user_agent'], } if args.fedmsg: import fedmsg fedmsg_conf = check(struct( dict( environment = pipe( empty_to_none, test_in(['dev', 'prod', 'stg']), ), modname = pipe( empty_to_none, test(lambda value: value == value.strip('.'), error = 'Value must not begin or end with a "."'), default('ckan_of_worms'), ), # name = pipe( # empty_to_none, # default('ckan_of_worms.{}'.format(hostname)), # ), topic_prefix = pipe( empty_to_none, test(lambda value: value == value.strip('.'), error = 'Value must not begin or end with a "."'), ), ), default = 'drop', ))(dict(config_parser.items('fedmsg'))) # Read in the config from /etc/fedmsg.d/. fedmsg_config = fedmsg.config.load_config([], None) # Disable a warning about not sending. We know. We only want to tail. fedmsg_config['mute'] = True # Disable timing out so that we can tail forever. This is deprecated # and will disappear in future versions. fedmsg_config['timeout'] = 0 # For the time being, don't require message to be signed. fedmsg_config['validate_signatures'] = False for key, value in fedmsg_conf.iteritems(): if value is not None: fedmsg_config[key] = value expected_topic_prefix = '{}.{}.ckan_of_worms.'.format(fedmsg_config['topic_prefix'], fedmsg_config['environment']) for name, endpoint, topic, message in fedmsg.tail_messages(**fedmsg_config): if not topic.startswith(expected_topic_prefix): log.debug(u'Ignoring message: {}, {}'.format(topic, name)) continue kind, action = topic[len(expected_topic_prefix):].split('.') if kind == 'dataset': if action in ('create', 'update'): dataset = check(pipe( cow_json_to_dataset, not_none, ))(message['msg'], state = default_state) check_dataset(dataset) else: log.debug(u'TODO: Handle {}, {} for {}'.format(kind, action, message)) else: log.debug(u'TODO: Handle {}, {} for {}'.format(kind, action, message)) else: request = urllib2.Request(urlparse.urljoin(conf['ckan_of_worms.site_url'], 'api/1/datasets'), headers = headers) response = urllib2.urlopen(request) datasets_id = check(pipe( cow_response_to_value, cow_json_to_ids, not_none, ))(response.read(), state = default_state) for dataset_id in datasets_id: request = urllib2.Request(urlparse.urljoin(conf['ckan_of_worms.site_url'], 'api/1/datasets/{}'.format(dataset_id)), headers = headers) response = urllib2.urlopen(request) dataset = check(pipe( cow_response_to_value, cow_json_to_dataset, not_none, ))(response.read(), state = default_state) check_dataset(dataset) return 0