def status(ctx): """Display some informations and statistics""" title("Current status") section("Settings") click.echo("GEOZONES_HOME: {0}".format(ctx.obj["home"])) section("Levels") for level in ctx.obj["levels"]: click.echo("{id}: {label}".format(**level.__dict__)) section("downloads") urls = (level.urls for level in ctx.obj["levels"] if level.urls) urls = set([url for lst in urls for url in lst]) for url in urls: filename = basename(url).strip() click.echo("{0} ... ".format(filename), nl=False) if os.path.exists(os.path.join(DL_DIR, filename)): success("present") else: error("absent") section("coverage") zones = DB() total = 0 properties = ("population", "area", "wikipedia") totals = dict((prop, 0) for prop in properties) def countprop(name): results = zones.aggregate( [ {"$match": {name: {"$exists": True}, "level": {"$in": [l.id for l in ctx.obj["levels"]]}}}, {"$group": {"_id": "$level", "value": {"$sum": 1}}}, ] ) return dict((r["_id"], r["value"]) for r in results) def display_prop(name, count, total): click.echo("\t{0}: ".format(name), nl=False) if count == 0: func = error elif count == total: func = success else: func = warning func("{0}/{1}".format(count, total)) counts = dict((p, countprop(p)) for p in properties) for level in ctx.obj["levels"]: count = zones.count({"level": level.id}) total += count click.echo("{0}: {1}".format(level.id, count)) for prop in properties: prop_count = counts[prop].get(level.id, 0) totals[prop] += prop_count display_prop(prop, prop_count, count) click.secho("TOTAL: {0}".format(total), bold=True) for prop in properties: prop_total = totals[prop] display_prop(prop, prop_total, total)
def load(self, workdir, db): '''Extract territories from a given file for a given level with a given extractor function''' loaded = 0 for url, extractor in self.extractors: loaded += self.process_dataset(workdir, db, url, extractor) success('Loaded {0} zones for level {1}'.format(loaded, self.id)) return loaded
def attach_and_clean_iris(db, filename): info('Attaching French IRIS to their region') processed = 0 for zone in db.find({'level': iris.id}): candidates_ids = [p for p in zone['parents'] if p.startswith(town.id)] if len(candidates_ids) < 1: warning('No parent candidate found for: {0}'.format(zone['_id'])) continue town_id = candidates_ids[0] town_zone = db.find_one({'_id': town_id}) if not town_zone: warning('Town {0} not found'.format(town_id)) continue if zone.get('_type') == 'Z': name = town_zone['name'] else: name = ''.join((town_zone['name'], ' (', zone['name'], ')')) ops = { '$addToSet': {'parents': {'$each': town_zone['parents']}}, '$set': {'name': name}, '$unset': {'_town': 1, '_type': 1} } if db.find_one_and_update({'_id': zone['_id']}, ops): processed += 1 success('Attached {0} french IRIS to their parents'.format(processed))
def postprocess(ctx): '''Perform some postprocessing''' title('Performing post-processing') zones = DB() for level in ctx.obj['levels']: level.postprocess(DL_DIR, zones) success('Post-processing done')
def postprocess(ctx, only): '''Perform some postprocessing''' title('Performing post-processing') zones = DB() for level in ctx.obj['levels']: level.postprocess(DL_DIR, zones, only) success('Post-processing done')
def postprocess(ctx, only): """Perform some postprocessing""" title("Performing post-processing") zones = DB() for level in ctx.obj["levels"]: level.postprocess(DL_DIR, zones, only) success("Post-processing done")
def aggregate(ctx): """Perform zones aggregations""" title("Performing zones aggregations") zones = DB() total = 0 for level in reversed(ctx.obj["levels"]): total += level.build_aggregates(zones) success("Done: Built {0} zones by aggregation".format(total))
def aggregate(ctx): '''Perform zones aggregations''' title('Performing zones aggregations') zones = DB() total = 0 for level in reversed(ctx.obj['levels']): total += level.build_aggregates(zones) success('Done: Built {0} zones by aggregation'.format(total))
def add_ue_to_parents(db, filename): info('Adding European Union to countries parents') result = db.update_many( { 'level': country.id, 'code': { '$in': UE_COUNTRIES } }, {'$addToSet': { 'parents': 'country-group/ue' }}) success('Added European Union as parent to {0} countries'.format( result.modified_count))
def process_postal_codes(db, filename): ''' Extract postal codes from https://www.data.gouv.fr/fr/datasets/base-officielle-des-codes-postaux/ ''' info('Processing french postal codes') processed = 0 with open(filename, encoding='cp1252') as csvfile: reader = csv.reader(csvfile, delimiter=';') # skip header next(reader, None) for insee, _, postal, _, _ in reader: ops = {'$addToSet': {'keys.postal': postal}} if db.find_one_and_update({'level': town.id, 'code': insee}, ops): processed += 1 success('Processed {0} french postal codes'.format(processed))
def compute_district_population(db, filename): info('Computing french district population by aggregation') processed = 0 pipeline = [ {'$match': {'level': town.id}}, {'$unwind': '$parents'}, {'$match': {'parents': {'$regex': district.id}}}, {'$group': {'_id': '$parents', 'population': {'$sum': '$population'}}} ] for result in db.aggregate(pipeline): if result.get('population'): if db.find_one_and_update( {'_id': result['_id']}, {'$set': {'population': result['population']}}): processed += 1 success('Computed population for {0} french districts'.format(processed))
def add_job(request): if request.method == 'POST': job_form = SingleJobForm(request.POST) if job_form.is_valid(): cd = job_form.cleaned_data try: if cd['parameter'].find(';') == -1: cd['parameter'] += ';' protocol = ProtocolList.objects.get(id=cd['protocol']) if protocol.check_owner( request.user.id) or request.user.is_superuser: job = Queue( protocol_id=cd['protocol'], parameter=cd['parameter'], run_dir=get_config('env', 'workspace'), user_id=request.user.id, input_file=cd['input_files'], ) if check_disk_quota_lock(request.user.id): job.save() return success('Successfully added job into queue.') else: return error( 'You have exceed the disk quota limit! Please delete some files!' ) else: return error('You are not owner of the protocol.') except Exception, e: return error(e) return error(str(job_form.errors))
def fetch_learning(request): import urllib2, json query_string = request.GET['hash'] + ',' + request.GET['type'] + ',' + str( get_config('env', 'cpu')) + ',' + str(get_config( 'env', 'memory')) + ',' + str(os_to_int()) api_bus = get_config('ml', 'api') + '/Index/share/q/' + query_string try: req = urllib2.Request(api_bus) res_data = urllib2.urlopen(req) res = json.loads(res_data.read()) session_dict = { 'hash': request.GET['hash'], 'type': request.GET['type'], 'a': res['a'], 'b': res['b'], 'r': res['r'], } request.session['learning'] = session_dict template = loader.get_template('ui/fetch_learning.html') context = RequestContext(request, { 'step': res, }) return success(template.render(context)) except Exception, e: return error(api_bus)
def heist_optimal_high_level(casing_needed, challenge_level, suspicion_gain, casing_loss, casing=0, actions=0) -> int: if casing >= casing_needed: if success(calculate_chance_narrow(challenge_level, casing)): return actions else: casing -= casing_loss # suspicion can be lowered with the help of friends at the cost of 1 Action / 4 CP actions += .25 * suspicion_gain return heist_optimal_high_level(casing_needed, challenge_level, suspicion_gain, casing_loss, casing, actions) elif casing >= casing_needed - 9: # The decoy actions += 3 casing += 9 return heist_optimal_high_level(casing_needed, challenge_level, suspicion_gain, casing_loss, casing, actions) else: # Set your gang of hoodlums to business actions += 5 casing += 18 return heist_optimal_high_level(casing_needed, challenge_level, suspicion_gain, casing_loss, casing, actions)
def query_job_parameter(request): import re parent = request.GET.get('parent') user_defined_wildcards = [] try: protocol = ProtocolList.objects.get(id=parent) if protocol.check_owner(request.user.id) or request.user.is_superuser: pre_defined_keys = [ 'InputFile', 'LastOutput', 'Job', 'ThreadN', 'Output', 'LastOutput', 'Uploaded', 'Suffix', 'Workspace' ] reference_list = References.objects.filter( user_id=request.user.id).all() pre_defined_keys.extend( [reference.name for reference in reference_list]) steps = Protocol.objects.filter(parent=protocol.id) wildcard_pattern = re.compile("\\{(.*?)\\}", re.IGNORECASE | re.DOTALL) for step in steps: for wildcard in re.findall(wildcard_pattern, step.parameter): wildcard = wildcard.split(':')[0] if wildcard not in pre_defined_keys: user_defined_wildcards.append(wildcard) except: pass user_defined_wildcards = list(set(user_defined_wildcards)) if len(user_defined_wildcards) > 0: result = '=;'.join(user_defined_wildcards) result += '=;' return success(result)
def delete_job_file(request, f): import base64 file_path = os.path.join(get_config('env', 'workspace'), str(request.user.id), base64.b64decode(f)) delete_file(file_path) return success('Deleted')
def create_protocol(request): import hashlib if request.method == 'POST': protocol_form = CreateProtocolForm(request.POST) if protocol_form.is_valid(): try: cd = protocol_form.cleaned_data if ProtocolList.objects.filter( name=cd['name'], user_id=request.user.id).exists(): return error('Duplicate record!') protocol = ProtocolList(name=cd['name'], user_id=request.user.id) protocol.save() softwares = request.POST.getlist('software', '') parameters = request.POST.getlist('parameter', '') steps = [] for index, software in enumerate(softwares): if parameters[index]: m = hashlib.md5() m.update(software + ' ' + parameters[index].strip()) steps.append( Protocol(software=software, parameter=parameters[index], parent=protocol.id, hash=m.hexdigest(), user_id=request.user.id)) Protocol.objects.bulk_create(steps) return success('Your protocol have been created!') except Exception, e: return error(e) else: return error(str(protocol_form.errors))
def manage_reference(request): if request.method == 'POST': reference_form = CreateReferenceForm(request.POST) if reference_form.is_valid(): cd = reference_form.cleaned_data if References.objects.filter(user_id=request.user.id, name=cd['name']).exists(): return error('Duplicate record!') ref = References( name=cd['name'], path=cd['path'], user_id=request.user.id, ) ref.save() return success(ref.id) else: return error(str(reference_form.errors)) else: if request.user.is_superuser: reference_list = References.objects.all() else: reference_list = References.objects.filter( user_id=request.user.id).all() return render(request, 'ui/manage_reference.html', {'references': reference_list})
def attach_canton_parents(db, filename): info('Attaching French Canton to their parents') canton_processed = 0 for zone in db.find({'level': canton.id}): candidates_ids = [p for p in zone['parents'] if p.startswith(county.id)] if len(candidates_ids) < 1: warning('No parent candidate found for: {0}'.format(zone['_id'])) continue county_id = candidates_ids[0] county_zone = db.find_one({'_id': county_id}) ops = { '$addToSet': {'parents': {'$each': county_zone['parents']}}, '$unset': {'_dep': 1} } if db.find_one_and_update({'_id': zone['_id']}, ops): canton_processed += 1 success('Attached {0} french cantons to their parents'.format(canton_processed))
def attach_epci(db, filename): ''' Attach EPCI towns to their EPCI from http://www.collectivites-locales.gouv.fr/liste-et-composition-2015 ''' info('Processing EPCI town list') processed = 0 # epci_region = {} with open(filename, encoding='cp1252') as csvfile: reader = csv.DictReader(csvfile, delimiter=';') for row in reader: siren = row['siren_epci'] insee = row['insee'].lower() # region = row['region'] # epci_region[siren] = region epci_id = 'fr/epci/{0}'.format(siren) if db.find_one_and_update( {'level': town.id, 'code': insee}, {'$addToSet': {'parents': epci_id}}): processed += 1 success('Attached {0} french town to their EPCI'.format(processed))
def town_with_districts(db, filename): info('Attaching Paris town districts') paris = db.find_one({'_id': 'fr/town/75056'}) parents = paris['parents'] parents.append(paris['_id']) result = db.update_many( {'_id': {'$in': PARIS_DISTRICTS}}, {'$addToSet': {'parents': {'$each': parents}}}) success('Attached {0} districts to Paris'.format(result.modified_count)) info('Attaching Marseille town districts') marseille = db.find_one({'_id': 'fr/town/13055'}) parents = marseille['parents'] parents.append(marseille['_id']) result = db.update_many( {'_id': {'$in': MARSEILLE_DISTRICTS}}, {'$addToSet': {'parents': {'$each': parents}}}) success('Attached {0} districts to Marseille'.format(result.modified_count)) info('Attaching Lyon town districts') lyon = db.find_one({'_id': 'fr/town/69123'}) parents = lyon['parents'] parents.append(lyon['_id']) result = db.update_many( {'_id': {'$in': LYON_DISTRICTS}}, {'$addToSet': {'parents': {'$each': parents}}}) success('Attached {0} districts to Lyon'.format(result.modified_count))
def delete_reference(request): if request.method == 'GET': if 'ref' in request.GET: ref = References.objects.get(id=request.GET['ref']) if ref.check_owner(request.user.id) or request.user.is_superuser: ref.delete() return success('Your reference has been deleted.') else: return error('You are not owner of the reference.') else: return error('Missing parameter.') else: return error('Error Method.')
def compute_county_area_and_population(db, filename): info('Computing french counties areas and population by aggregation') processed = 0 pipeline = [ {'$match': {'level': town.id}}, {'$unwind': '$parents'}, {'$match': {'parents': {'$regex': county.id}}}, {'$group': { '_id': '$parents', 'area': {'$sum': '$area'}, 'population': {'$sum': '$population'} }} ] for result in db.aggregate(pipeline): if db.find_one_and_update( {'_id': result['_id']}, {'$set': { 'area': result['area'], 'population': result['population'] }}): processed += 1 success('Computed area and population for {0} french counties'.format(processed))
def process_insee_cog(db, filename): '''Use informations from INSEE COG to attach parents. http://www.insee.fr/fr/methodes/nomenclatures/cog/telechargement.asp ''' info('Processing INSEE COG') processed = 0 counties = {} districts = {} with ZipFile(filename) as cogzip: with cogzip.open('comsimp2016.txt') as tsvfile: tsvio = io.TextIOWrapper(tsvfile, encoding='cp1252') reader = csv.DictReader(tsvio, delimiter='\t') for row in reader: # Lower everything, from 2B to 2b for instance. region_code = row['REG'].lower() county_code = row['DEP'].lower() district_code = row['AR'].lower() town_code = row['COM'].lower() insee_code = ''.join((county_code, town_code)) region_id = 'fr/region/{0}'.format(region_code) county_id = 'fr/county/{0}'.format(county_code) parents = [region_id, county_id] if county_id not in counties: counties[county_id] = region_id if district_code: district_code = ''.join((county_code, district_code)) district_id = 'fr/district/{0}'.format(district_code) parents.append(district_id) if district_id not in districts: districts[district_id] = [region_id, county_id] if db.find_one_and_update( {'level': town.id, 'code': insee_code}, {'$addToSet': {'parents': {'$each': parents}}}): processed += 1 success('Attached {0} french towns to their parents'.format(processed)) processed = 0 for district_id, parents in districts.items(): if db.find_one_and_update( {'_id': district_id}, {'$addToSet': { 'parents': {'$each': parents}, }}): processed += 1 success('Attached {0} french districts to their parents'.format(processed)) processed = 0 for county_id, parent in counties.items(): if db.find_one_and_update( {'_id': county_id}, {'$addToSet': {'parents': parent}}): processed += 1 success('Attached {0} french counties to their parents'.format(processed))
def load(ctx, drop): """Load zones from a folder of zip files containing shapefiles""" title("Extracting zones from datasets") zones = DB() if drop: info("Drop existing collection") zones.drop() with ok("Creating index (level,code)"): zones.create_index([("level", ASCENDING), ("code", ASCENDING)]) info("Creating index (level,keys)") zones.create_index([("level", ASCENDING), ("keys", ASCENDING)]) info("Creating index (parents)") zones.create_index("parents") total = 0 for level in ctx.obj["levels"]: info('Processing level "{0}"'.format(level.id)) total += level.load(DL_DIR, zones) success("Done: Loaded {0} zones".format(total))
def load(ctx, drop): '''Load zones from a folder of zip files containing shapefiles''' title('Extracting zones from datasets') zones = DB() if drop: info('Drop existing collection') zones.drop() with ok('Creating index (level,code)'): zones.create_index([('level', ASCENDING), ('code', ASCENDING)]) info('Creating index (level,keys)') zones.create_index([('level', ASCENDING), ('keys', ASCENDING)]) info('Creating index (parents)') zones.create_index('parents') total = 0 for level in ctx.obj['levels']: info('Processing level "{0}"'.format(level.id)) total += level.load(DL_DIR, zones) success('Done: Loaded {0} zones'.format(total))
def fetch_missing_data_from_dbpedia(db, filename): info('Fetching DBPedia data') processed = 0 for zone in db.find({ 'wikipedia': {'$exists': True, '$ne': None}, '$or': [ {'population': None}, {'population': {'$exists': False}}, {'area': None}, {'area': {'$exists': False}}, ] }, no_cursor_timeout=True): dbpedia = DBPedia(zone['wikipedia']) metadata = { 'dbpedia': dbpedia.resource_url, } metadata.update(dbpedia.fetch_population_or_area()) metadata.update(dbpedia.fetch_flag_or_blazon()) if db.find_one_and_update({'_id': zone['_id']}, {'$set': metadata}): processed += 1 success('Fetched DBPedia data for {0} zones'.format(processed))
def batch_operation(request): if request.method == 'POST': job_list = request.POST['jobs'].split(',') while '' in job_list: job_list.remove('') if request.POST['operation'] == 'd': for job_id in job_list: job = Queue.objects.get(id=job_id) if job.check_owner( request.user.id) or request.user.is_superuser: job.delete() else: return error('Your are not the owner of the job.') return success('Ok') elif request.POST['operation'] == 't': for job_id in job_list: job = Queue.objects.get(id=job_id) if job.check_owner( request.user.id) or request.user.is_superuser: job.terminate_job() else: return error('Your are not the owner of the job.') return success('Ok') elif request.POST['operation'] == 'r': for job_id in job_list: job = Queue.objects.get(id=job_id) if job.check_owner( request.user.id) or request.user.is_superuser: job.rerun_job() else: return error('Your are not the owner of the job.') return success('Ok') else: return error('Please choose an operation.') else: return success('abc')
def delete_step(request): if request.method == 'GET': if 'id' in request.GET: try: step = Protocol.objects.get(id=int(request.GET['id'])) if step.check_owner( request.user.id) or request.user.is_superuser: step.delete() return success('Your step has been deleted.') else: return error('You are not owner of the step.') except Exception, e: return error(e) else: return error('Unknown parameter.')
def terminate_job(request): if request.method == 'POST': terminate_form = JobManipulateForm(request.POST) if terminate_form.is_valid() or request.user.is_superuser: cd = terminate_form.cleaned_data try: job = Queue.objects.get(id=cd['job']) if job.check_owner(request.user.id): job.terminate_job() return success('Your job will be terminated soon.') else: return error('Your are not the owner of the job.') except Exception, e: return error(e) else: return error(str(terminate_form.errors))
def import_learning(request): if request.session['learning']: if request.session['learning']['a'] != 'no records': learn = Prediction( step_hash=request.session['learning']['hash'], type=request.session['learning']['type'], a=request.session['learning']['a'], b=request.session['learning']['b'], r=request.session['learning']['r'], ) learn.save() return success('Imported.') else: return error('Can not import records!') else: return error('Error')
def delete_protocol(request): if request.method == 'GET': if 'id' in request.GET: protocol_parent = ProtocolList.objects.get( id=int(request.GET['id'])) if protocol_parent.check_owner( request.user.id) or request.user.is_superuser: protocol_parent.delete() steps = Protocol.objects.filter(parent=int(request.GET['id'])) steps.delete() return success('Your protocol has been deleted.') else: return error('You are not owner of the protocol.') else: return error('Unknown parameter.') else: return error('Method error.')
def get_learning_result(request): if request.method == 'GET': learning_form = QueryLearningForm(request.GET) if learning_form.is_valid(): cd = learning_form.cleaned_data try: train = Prediction.objects.get(step_hash=cd['stephash'], type=cd['type']) template = loader.get_template('ui/get_learning_result.html') context = RequestContext(request, { 'hit': train, }) return success(template.render(context)) except Exception, e: return error(e) else: return error(str(learning_form.errors))
def update_parameter(request): if request.method == 'GET': from urllib import unquote update_parameter_form = StepManipulateForm(request.GET) if update_parameter_form.is_valid(): cd = update_parameter_form.cleaned_data step = Protocol.objects.get(id=cd['id']) if (step.check_owner(request.user.id) or request.user.is_superuser) and step.check_parent( cd['parent']): step.update_parameter(unquote(cd['parameter'])) step.save() return success('Your step has been updated.') else: return error('Your are not owner of the step.') else: return error(str(update_parameter_form.errors)) else: return error('Method error')
def show_job_log(request): if request.method == 'POST': query_job_form = JobManipulateForm(request.POST) if query_job_form.is_valid(): cd = query_job_form.cleaned_data log_path = os.path.join(get_config('env', 'log'), str(cd['job'])) try: log_file = open(log_path, mode='r') log = log_file.readlines() log.reverse() log = log[:100] # log_content = [line+'<br />' for line in log] log_content = '<br />'.join(log) log_file.close() return success(log_content) except Exception, e: return error(e) else: return error(str(query_job_form.errors))
def show_learning_steps(request): if request.method == 'GET': if 'parent' in request.GET: if request.user.is_superuser: step_list = Protocol.objects.filter( parent=int(request.GET['parent'])).all() else: step_list = Protocol.objects.filter( parent=int(request.GET['parent'])).filter( user_id=request.user.id).all() template = loader.get_template('ui/show_learning_steps.html') context = RequestContext(request, { 'step_list': step_list, }) return success(template.render(context)) else: return error('Wrong parameter.') else: return error('Method error.')
def show_step(request): if request.method == 'POST': query_protocol_form = ProtocolManipulateForm(request.POST) if query_protocol_form.is_valid(): cd = query_protocol_form.cleaned_data if request.user.is_superuser: step_list = Protocol.objects.filter(parent=cd['parent']).all() else: step_list = Protocol.objects.filter( parent=cd['parent']).filter(user_id=request.user.id).all() template = loader.get_template('ui/show_steps.html') context = RequestContext(request, { 'step_list': step_list, }) return success(template.render(context)) else: return error(str(query_protocol_form.errors)) else: return error('Method error.')
def show_job_folder(request): import time, base64 if request.method == 'POST': query_job_form = JobManipulateForm(request.POST) if query_job_form.is_valid(): cd = query_job_form.cleaned_data try: job = Queue.objects.get(id=cd['job']) if job.check_owner( request.user.id) or request.user.is_superuser: result_folder = job.get_result() user_path = os.path.join(get_config('env', 'workspace'), str(request.user.id), result_folder) user_files = [] for root, dirs, files in os.walk(user_path): for file_name in files: file_full_path = os.path.join(root, file_name) file_path = file_full_path.replace(user_path+'\\', '')\ .replace(user_path+'/', '').replace(user_path, '') tmp = dict() tmp['name'] = file_path tmp['file_size'] = os.path.getsize(file_full_path) tmp['file_create'] = time.ctime( os.path.getctime(file_full_path)) tmp['trace'] = base64.b64encode( os.path.join(result_folder, file_path)) user_files.append(tmp) template = loader.get_template('ui/show_job_folder.html') context = RequestContext(request, { 'user_files': user_files, }) return success(template.render(context)) else: return error('Your are not the owner of the job.') except Exception, e: return error(e) else: return error(str(query_job_form.errors))
def add_step(request): import hashlib if request.method == 'POST': step_form = CreateStepForm(request.POST) if step_form.is_valid(): cd = step_form.cleaned_data try: protocol = ProtocolList.objects.get(id=cd['parent']) if protocol.check_owner( request.user.id) or request.user.is_superuser: m = hashlib.md5() m.update(cd['software'] + ' ' + cd['parameter'].strip()) step = Protocol(software=cd['software'], parameter=cd['parameter'], parent=cd['parent'], user_id=request.user.id, hash=m.hexdigest()) step.save() return success('Your step have been created.') else: return error('You are not owner of the protocol.') except Exception, e: return error(str(e))
def share_with_peer(request): if request.method == 'POST': share_form = ShareProtocolForm(request.POST) if share_form.is_valid(): cd = share_form.cleaned_data if cd['peer'] == request.user.username: return error('You can not share a protocol with yourself.') to_user = check_user_existence(cd['peer']) if to_user: from copy import deepcopy protocol = ProtocolList.objects.get(id=cd['pro']) if protocol.check_owner( request.user.id) or request.user.is_superuser: new_protocol = deepcopy(protocol) new_protocol.id = None new_protocol.user_id = to_user new_protocol.save() steps = Protocol.objects.filter(parent=cd['pro']) for step in steps: new_step = deepcopy(step) new_step.id = None new_step.parent = new_protocol.id new_step.save() return success( 'You have successfully shared a protocol with a peer(%s).' % cd['peer']) else: return error('Your are not owner of the protocol.') else: return error( 'The person you want to share with doesn\'t exist.') else: return error(str(share_form.errors)) else: return error('Error Method.')
def compute_town_with_districts_population(db, filename): info('Computing Paris town districts population') districts = db.find({'_id': {'$in': PARIS_DISTRICTS}}) population = sum(district['population'] for district in districts) db.find_one_and_update( {'_id': 'fr/town/75056'}, {'$set': {'population': population}}) success('Computed population for Paris') info('Computing Marseille town districts population') districts = db.find({'_id': {'$in': MARSEILLE_DISTRICTS}}) population = sum(district['population'] for district in districts) db.find_one_and_update( {'_id': 'fr/town/13055'}, {'$set': {'population': population}}) success('Computed population for Marseille') info('Computing Lyon town districts population') districts = db.find({'_id': {'$in': LYON_DISTRICTS}}) population = sum(district['population'] for district in districts) db.find_one_and_update( {'_id': 'fr/town/69123'}, {'$set': {'population': population}}) success('Computed population for Lyon')
def attach_counties_to_subcountries(db, filename): info('Attaching French Metropolitan counties') ids = ['fr/county/{0}' .format(c) for c in FR_METRO_COUNTIES] result = db.update_many( {'$or': [{'_id': {'$in': ids}}, {'parents': {'$in': ids}}]}, {'$addToSet': {'parents': 'country-subset/fr/metro'}} ) success('Attached {0} French Metropolitan children'.format(result.modified_count)) info('Attaching French DOM counties') ids = ['fr/county/{0}' .format(c) for c in FR_DOM_COUNTIES] result = db.update_many( {'$or': [{'_id': {'$in': ids}}, {'parents': {'$in': ids}}]}, {'$addToSet': {'parents': 'country-subset/fr/dom'}} ) success('Attached {0} French DOM children'.format(result.modified_count)) info('Attaching French DOM/TOM counties') ids = ['fr/county/{0}' .format(c) for c in FR_DOMTOM_COUNTIES] result = db.update_many( {'$or': [{'_id': {'$in': ids}}, {'parents': {'$in': ids}}]}, {'$addToSet': {'parents': 'country-subset/fr/domtom'}} ) success('Attached {0} French DOM/TOM children'.format(result.modified_count))
### # @file reproduce-appendix.py # @author Sébastien Rouault <*****@*****.**> # # @section LICENSE # # Copyright © 2019-2021 École Polytechnique Fédérale de Lausanne (EPFL). # See LICENSE file. # # @section DESCRIPTION # # Reproduce the (missing) experiments and plots (supplementary experiments). ### import tools tools.success("Module loading...") import argparse import pathlib import signal import sys import torch import experiments # ---------------------------------------------------------------------------- # # Miscellaneous initializations tools.success("Miscellaneous initializations...") # "Exit requested" global variable accessors
def add_ue_to_parents(db, filename): info('Adding European Union to countries parents') result = db.update_many({'level': country.id, 'code': {'$in': UE_COUNTRIES}}, {'$addToSet': {'parents': 'country-group/ue'}}) success('Added European Union as parent to {0} countries'.format(result.modified_count))
def status(ctx): '''Display some informations and statistics''' title('Current status') section('Settings') click.echo('GEOZONES_HOME: {0}'.format(ctx.obj['home'])) section('Levels') for level in ctx.obj['levels']: click.echo('{id}: {label}'.format(**level.__dict__)) section('downloads') urls = (level.urls for level in ctx.obj['levels'] if level.urls) urls = set([url for lst in urls for url in lst]) for url in urls: filename = basename(url).strip() click.echo('{0} ... '.format(filename), nl=False) if os.path.exists(os.path.join(DL_DIR, filename)): success('present') else: error('absent') section('coverage') zones = DB() total = 0 properties = ('population', 'area', 'wikipedia') totals = dict((prop, 0) for prop in properties) def countprop(name): results = zones.aggregate([ {'$match': { name: {'$exists': True}, 'level': {'$in': [l.id for l in ctx.obj['levels']]} }}, {'$group': {'_id': '$level', 'value': {'$sum': 1}}} ]) return dict((r['_id'], r['value']) for r in results) def display_prop(name, count, total): click.echo('\t{0}: '.format(name), nl=False) if count == 0: func = error elif count == total: func = success else: func = warning func('{0}/{1}'.format(count, total)) counts = dict((p, countprop(p)) for p in properties) for level in ctx.obj['levels']: count = zones.count({'level': level.id}) total += count click.echo('{0}: {1}'.format(level.id, count)) for prop in properties: prop_count = counts[prop].get(level.id, 0) totals[prop] += prop_count display_prop(prop, prop_count, count) click.secho('TOTAL: {0}'.format(total), bold=True) for prop in properties: prop_total = totals[prop] display_prop(prop, prop_total, total)
### # @file train.py # @author Sébastien Rouault <*****@*****.**> # # @section LICENSE # # Copyright © 2019-2021 École Polytechnique Fédérale de Lausanne (EPFL). # See LICENSE file. # # @section DESCRIPTION # # Simulate a training session under attack. ### import tools tools.success("Module loading...") import argparse import collections import json import math import os import pathlib import random import signal import sys import torch import torchvision import traceback import aggregators
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # # @section DESCRIPTION # # Main execution script. ### if __name__ != "__main__": raise tools.UserException("Script " + repr(__file__) + " is to be used as the main module only") import tools tools.success("Python module loading phase...") import argparse import fractions import math import os import pathlib import signal import sys import threading import time import numpy as np import tensorflow as tf import config
parameter=cd['parameter'], parent=cd['parent'], user_id=request.user.id, hash=m.hexdigest()) step.save() return success('Your step have been created.') else: return error('You are not owner of the protocol.') except Exception, e: return error(str(e)) elif request.method == 'GET': template = loader.get_template('ui/add_step.html') context = RequestContext(request, { 'parent': request.GET['protocol'], }) return success(template.render(context)) else: return error('Method error') @login_required def batch_job(request): if request.method == 'POST': form = BatchJobForm(request.POST, request.FILES) if form.is_valid(): file_name = handle_uploaded_file(request.FILES['job_list']) try: protocol_cache = dict() with open(file_name) as f: jobs = f.readlines() job_list = []