def __buildClusters(self, task, results, result): """ Build the cluster objects :param task: :param results: :param result: :return: """ for tmp in results: cluster = Cluster() cluster.centerMean = [] cluster.centerMedian = [] cluster.result = result profiles = [] for sequence in tmp: profiles.append(sequence.dna) profiles = np.array(profiles) mean = np.mean(profiles, axis = 0) median = np.median(profiles, axis = 0) cluster.centerMean = mean.tolist() cluster.centerMedian = median.tolist() cluster.save() for sequence in tmp: cluster.sequences.add(sequence) cluster.save()
def create(self, payload): """ signup function :return: """ try: if payload.get('cluster_name') == "": return {'msg': "Cluster name is requerid", 'code': 0} records = Cluster.objects( cluster_name__exact=payload.get('cluster_name')) #_id = base_obj.insert(COLLECTIONS['USERS'], payload) if records.count() > 0: return {'msg': 'Cluster Aready Exists', 'code': 0} cluster = Cluster( cluster_name=payload.get('cluster_name'), cluster_id=payload.get('cluster_id'), ) _id = Cluster.objects.insert(cluster) return {'msg': _id.pk, 'code': 1} except Exception as err: print(err) return err
def clusters_post(): """ CREATE new cluster from POST data. args: - none expected request json: - {'name': str, 'notes': str} returns: - response with json payload and http status code """ try: data = request.json keys = ['name', 'notes'] cluster = Cluster() for key in keys: if key not in data: continue setattr(cluster, key, data[key]) cluster.insert() return jsonify({'success': True, 'clusters': [cluster.as_dict()]}), 200 except Exception as err: abort(422, str(err))
def index_view(self): # Grab parameters from URL page, sort_idx, sort_desc, search = self._get_list_extra_args() page_size = 10 count = Cluster._get_collection().count() num_pages = count // page_size if count % page_size != 0: num_pages += 1 data = Cluster._get_collection().find( {}, {}).skip(page * page_size).limit(page_size) # Various URL generation helpers def pager_url(p): # Do not add page number if it is first page if p == 0: p = None return self._get_url('.index_view', p, sort_idx, sort_desc, search) return self.render('admin/shreds.html', data=data, count=count, pager_url=pager_url, num_pages=num_pages, page=page,)
def calc_clusters(request): if request.method == 'POST': form = CalcClusterForm(request.POST) if form.is_valid(): compound_group = CompoundCollection.objects().with_id( str(request.POST["collection_id"])) compounds = Compound.objects( compound_group=str(request.POST["collection_id"])) #input_text = "<(" #for compound in compounds : # input_text += compound.mol + "\n" #input_text += ")" input_text = "/home/mjs/Test/approved.gspan.gz" Cluster.calcualte_clusters(compound_group, input_text, "EDEN") # handle_uploaded_file(request.FILES['file']) return HttpResponseRedirect('/cluster/') else: form = CalcClusterForm() return render_to_response( 'calc_clusters.html', { "view_titel": "Calculate Clusters", "form": form }, context_instance=RequestContext(request)) # Create your views here.
def next(): if request.method == "POST": # TODO: helper tags = set(map(unicode.lower, request.form.getlist("tags"))) Cluster.objects(pk=request.form["_id"]).update_one( push__tags=ShredTags( user=g.user.id, tags=list(tags), recognizable_chars=request.form.get("recognizable_chars", ""), angle=int(request.form.get("angle", 0))), inc__users_count=1, add_to_set__users_processed=g.user.id) User.objects(pk=g.user.id).update_one( inc__processed=1, inc__tags_count=len(tags), add_to_set__tags=list(tags)) session["processed"] = session.get("processed", 0) + 1 for tag in tags: Tags.objects(pk=tag).update_one( set_on_insert__is_base=False, set_on_insert__created_by=g.user.id, set_on_insert__created_at=Tags().created_at, inc__usages=1, add_to_set__shreds=request.form["_id"], upsert=True) start = datetime.strptime(request.form["tagging_start"], '%Y-%m-%d %H:%M:%S.%f') end = datetime.utcnow() TaggingSpeed.objects.create( user=g.user.id, cluster=request.form["_id"], tags_count=len(tags), msec=(end - start).total_seconds() * 1000) cluster = Cluster.next_for_user(g.user, app.config['USERS_PER_SHRED']) auto_tags = cluster and cluster.get_auto_tags() or [] return render_template( "_shred.html", cluster=cluster, auto_tags=auto_tags, all_tags=get_tags(), tagging_start=datetime.utcnow(), # TODO: move to context processor processed_per_session=session.get("processed", 0), processed_total=User.objects(id=g.user.id).first()["processed"], rating=list(User.objects.order_by( "-processed").values_list("id")).index(g.user.id) + 1 )
def _gen_cluster(zone_name, zone_cluster_details, latency, pricing, datacenters_map, services_config): zone = _gen_zone(zone_name, latency, pricing, datacenters_map) cluster = Cluster(zone) available_services = zone_cluster_details["services"] for service in available_services.values(): service_name = service["name"] service_capacity = service["rps_capacity"] svc = _gen_service(services_config[service_name], service_capacity) cluster.add_service(svc) return cluster
def rest_clusters(request): if request.method == 'GET': clusters = Cluster.objects.filter(user_id=request.user.id) return JsonResponse(clusters, encoder=QuerySetJSONEncoder, safe=False) elif request.method == 'POST': data = json.loads(request.body) cluster = Cluster() cluster.id = data['id'] cluster.name = data['name'] cluster.systems = data['systems']['storage'] + data['systems']['compute'] cluster.created = datetime.now() cluster.user = request.user cluster.save() data['config']['id'] = cluster.id response = requests.post(url=DEPLOY_URL, data=json.dumps(data['config'])) if json.loads(response.text).get('result') == 'success': return JsonResponse(model_to_dict(cluster)) else: return JsonResponse({ 'error': response['error_message'] }) else: return HttpResponse(status=404)
def pages(): if request.method == "POST": shreds = set(request.form.getlist("shreds")) page_name = request.form.get("page_name") page_id = request.form.get("page_id") if page_id: page = Pages.objects.get(pk=page_id) else: page, _ = Pages.objects.get_or_create( created_by=g.user.id, name=page_name) page.update(add_to_set__shreds=shreds) for shred in Cluster.objects(id__in=shreds): tags = shred.get_user_tags(g.user) if tags is not None: tags.pages = list(set(tags.pages + [page])) # TODO: else 404? shred.save() pages = Pages.objects(created_by=g.user.id) return render_template( "_pages.html", pages=pages)
def cluster_settings(request, username): state = "Change Settings" if request.method == "POST": form = ClusterForm(request.user, request.POST) if "delete" in request.POST: i = 0 for i, cluster in enumerate(get_clusters_from_request(request)): cluster.delete() logger.info("%s deleted %d clusters(s)" % (username, i+1)) state = "Settings Successfully Saved" form = ClusterForm(request.user) elif "save" in request.POST: if form.is_valid(): obj = form.save(commit=False) obj.creator = request.user obj.save() state = "Settings Successfully Saved" form = ClusterForm(request.user) else: form = ClusterForm(request.user) c = { "pages": PAGES, "page": "clusters", "state": state, "form": form, "clusters": Cluster.get_clusters(request.user), } return render(request, "cluster/cluster_settings.html", c)
def cluster_settings(request, username): state = "Change Settings" if request.method == "POST": form = ClusterForm(request.user, request.POST) if "delete" in request.POST: i = 0 for i, cluster in enumerate(get_clusters_from_request(request)): cluster.delete() logger.info("%s deleted %d clusters(s)" % (username, i + 1)) state = "Settings Successfully Saved" form = ClusterForm(request.user) elif "save" in request.POST: if form.is_valid(): obj = form.save(commit=False) obj.creator = request.user obj.save() state = "Settings Successfully Saved" form = ClusterForm(request.user) else: form = ClusterForm(request.user) c = { "pages": PAGES, "page": "clusters", "state": state, "form": form, "clusters": Cluster.get_clusters(request.user), } return render(request, "cluster/cluster_settings.html", c)
def test_skipping(self): self.create_user_and_login("user") user = User.objects.get(username="******") self.assertEqual(user.skipped, 0) self.assertEqual(user.processed, 0) self.client.post(url_for("fixtures.create_shreds")) res = self.client.get(url_for("next")) self.assert200(res) body = res.get_data(as_text=True) current_shred_id = first_shred_id = self.parse_shred_id(body) seen_shreds = {current_shred_id} for i in xrange(9): res = self.client.post(url_for("skip"), data={"_id": current_shred_id}, follow_redirects=True) body = res.get_data(as_text=True) self.assert200(res) current_shred_id = self.parse_shred_id(body) self.assertNotIn(current_shred_id, seen_shreds) seen_shreds.add(current_shred_id) self.assertEqual( len(Cluster.objects(id=first_shred_id).first().users_skipped), 1) res = self.client.post(url_for("skip"), data={"_id": current_shred_id}, follow_redirects=True) body = res.get_data(as_text=True) self.assert200(res) current_shred_id = self.parse_shred_id(body) self.assertIn(current_shred_id, seen_shreds) self.assertEqual( len(Cluster.objects(id=current_shred_id).first().users_skipped), 0) user.reload() self.assertEqual(user.skipped, 10) self.assertEqual(user.processed, 0)
def _launch_cluster_from_thread(request, node_count, roxie_nodes, launch_config): # Get needed info session = request.session access_key_id = session.get('access_key_id') secret_access_key = session.get('secret_access_key') owner_id = session.get('owner_id') region = launch_config['region'] # Get a new cluster row id cluster = Cluster(owner_id=owner_id, node_count=node_count) cluster.requesting_ip = request.META.get('REMOTE_ADDR') cluster.region = region cluster.is_launching = True cluster.save() cluster_id = cluster.pk debug = False # Launch Cluster if debug: launch_cluster(node_count, roxie_nodes, cluster_id, launch_config, access_key_id, secret_access_key, owner_id) else: args = (node_count, roxie_nodes, cluster_id, launch_config, access_key_id, secret_access_key, owner_id) thread = Thread(target=launch_cluster, args=args) thread.daemon = True thread.start() return cluster_id
def update(self, payload): try: cluster = Cluster.objects(_id=id).get() cluster.update(cluster_name=payload['cluster_name'], ) return {'msg': "User updated!", 'code': 1} except Exception as err: print(err) return {'msg': "User not updated!", 'code': 0}
def calc_clusters(request): if request.method == 'POST': form = CalcClusterForm(request.POST) if form.is_valid(): compound_group = CompoundCollection.objects().with_id(str(request.POST["collection_id"])) compounds = Compound.objects(compound_group = str(request.POST["collection_id"])) #input_text = "<(" #for compound in compounds : # input_text += compound.mol + "\n" #input_text += ")" input_text = "/home/mjs/Test/approved.gspan.gz" Cluster.calcualte_clusters(compound_group,input_text,"EDEN") # handle_uploaded_file(request.FILES['file']) return HttpResponseRedirect('/cluster/') else: form = CalcClusterForm() return render_to_response('calc_clusters.html',{"view_titel":"Calculate Clusters","form":form},context_instance=RequestContext(request)) # Create your views here.
def get_user_compensar(self, id, id_type): """ Get Usuarios active or inactive :param id: :return: """ #records = Usuarios.objects(id_usuario__exact=id) #rows = Usuarios.objects(id_usuario='1000000765') try: elemento_usos = [] record = records = Usuarios.objects( id_usuario__exact=id, tipo_id_usurio__exact=id_type.upper()) if record.count() == 0: return {'msg': 'User Not Activated', 'code': 0} records = Usuarios.objects(id_usuario__exact=id, tipo_id_usurio__exact=id_type.upper()).aggregate(*[ { '$lookup': { 'from': Usos._get_collection_name(), 'localField': 'id_trabajador', 'foreignField': 'id_trabajador', 'as': 'Usos' }, }, { '$lookup': { 'from': Cluster._get_collection_name(), 'localField': 'cluster_id', 'foreignField': 'cluster_id', 'as': 'Cluster' } }, ]) for element in records: elemento_usos = element elemento_usos = copy.deepcopy(elemento_usos) if len(element['Usos']) == 0: elemento_usos['Usos'] = {'red': 0, 'aliados': 0} else: elemento_usos['Usos'] = [] elemento_usos['Usos'] = { "red": element['Usos'][0]['usos_red'], "aliados": element['Usos'][0]['usos_aliados'], } return elemento_usos except Exception as err: print(err) return err
def asymsched_test(): """ 正确性测试 模拟CPU一共有四个节点。 运行两个app,每个app有一个cluster,每个cluster使用两个node。 """ test_bandwidths = [[0, 3000000000, 1000000000, 1000000000], [3000000000, 0, 1000000000, 1000000000], [1000000000, 1000000000, 0, 7000000000], [1000000000, 1000000000, 7000000000, 0]] test_remote_access = [[0, 3000000000, 0, 0], [3000000000, 0, 0, 0], [0, 0, 0, 5000], [0, 0, 5000, 0]] test_apps = [] test_apps.append(App()) test_apps[0].tt = 50000 test_apps[0].clusters.append(Cluster()) test_apps[0].clusters[0].memories = [200, 400] test_apps[0].clusters[0].current_nodes = [0, 1] test_apps.append(App()) test_apps[1].tt = 50000 test_apps[1].clusters.append(Cluster()) test_apps[1].clusters[0].memories = [30, 300] test_apps[1].clusters[0].current_nodes = [2, 3] _, _, test_min_pid, test_do_migration = asymsched(test_apps, test_bandwidths, test_remote_access) print(test_min_pid, test_do_migration) for app in test_apps: for cluster in app.clusters: print(cluster.origin_nodes, "==>", cluster.current_nodes)
def list_batches(): batch_counts = Cluster._get_collection().aggregate([ { '$group': { '_id': '$batch', 'shreds_created': { '$sum': 1} } }, { '$sort': { 'name': 1, }, }])['result'] return [{'name': item['_id'], 'shreds_created': item['shreds_created']} for item in batch_counts]
def get_cluster(cluster_id=None): """Renders a single cluster as json. If the cluster id is not provided, picks a random one. """ batch = request.args.get("batch") if cluster_id is None: cluster = Cluster.get_some(batch=batch) else: cluster = Cluster.objects.get_or_404(pk=cluster_id) return jsonify({ "success": True, "data": { "cluster": json.loads(cluster.to_json()), }, })
def _fetch_normalized_shreds_tags(repeats): """Gets normalized tags for every cluster. Args: repeats: minimum number of tag occurences to be included in the result. Returns: Dict {obj_id: set(tags)} mapping cluster ids to sets of string tags. """ shreds = Cluster.objects().timeout(False).only( 'id', 'tags.tags', 'members.shred')[:SHREDS_CAP] shreds_tags = {} # TODO: on every iteration queries mongodb for # cluster->member->shred->auto_tags. That's too slow. for s in shreds: tags = s.get_repeated_tags(repeats) if tags: shreds_tags[s.id] = frozenset(tags) return shreds_tags
def create_cluster(): """Merges two clusters. Processes a POST request that should contain an object like: { "cluster": { "parents": ["parent1_id", "parent2_id"], "members": [{ "shred": "shred1_id", "position": [100, 500], "angle": 35, }, ... ] } } """ req = request.get_json().get('cluster') parents = [Cluster.objects.get_or_404(pk=pk) for pk in req['parents']] if len(parents) != 2: response = jsonify({ "success": False, "message": "Wrong number of good parents: %s" % req['parents']}) response.status_code = 400 return response if parents[0].batch != parents[1].batch: response = jsonify({ "success": False, "message": "Parents are from different batches: %s %s" % ( parents[0].batch, parents[1].batch)}) response.status_code = 400 return response member_fields = ['shred', 'position', 'angle'] for member in req['members']: for field in member_fields: if field not in member: response = jsonify({ "success": False, "message": "One of the members doesn't have all required " "fields (%s): %s" % (member_fields, member), }) response.status_code = 400 return response cluster = Cluster( id=str(uuid.uuid1()), users_count=0, users_skipped=[], users_processed=[], batch=parents[0].batch, tags=[], parents=parents, members=[ClusterMember( shred=m['shred'], position=m['position'], angle=m['angle']) for m in req['members']], ) cluster.save() return jsonify({ "success": True, "id": cluster.id, })
def add_cluster(): cluster = Cluster(jobflow_id=str(uuid.uuid1())) cluster.save() return cluster
def skip(): Cluster.objects(pk=request.form["_id"]).update_one( add_to_set__users_skipped=g.user.id) User.objects(pk=g.user.id).update_one(inc__skipped=1) return redirect(url_for("next"))
def load_new_batch(fname_glob, batch): if app.config["S3_ENABLED"]: storage = S3Storage(app.config) else: storage = LocalFSStorage(app.config) pages_processed = 0 shreds_created = 0 out_dir = os.path.join(app.config["SPLIT_OUT_DIR"], "batch_%s" % batch) storage.clear(out_dir) Cluster.objects(batch=batch).delete() for src_key in storage.list(fname_glob): fname = storage.get_file(src_key) sheet_name = os.path.splitext(os.path.basename(fname))[0] echo("\n\nProcessing file %s from %s" % (fname, sheet_name)) sheet = SheetIO(fname, sheet_name, [GeometryFeatures, ColourFeatures], out_dir, "png") image_path_fields = ["piece_fname", "mask_fname", "piece_in_context_fname"] # TODO: Remove when all field names match unshred's. field_name_map = { # Unshred-tag name: unshred name. "mask_fname": "features_fname", } drop_fields = ['simplified_contour', 'img_roi'] drop_features = ['on_sheet_height', 'on_sheet_width', 'on_sheet_angle', 'bottommost', 'topmost', 'on_sheet_x', 'on_sheet_y'] pages_processed += 1 for shred in sheet.get_shreds(): shred = shred._asdict() shred["id"] = "%s:%s_%s" % (batch, shred["sheet"], shred["name"]) shreds_created += 1 def _convert_opencv_contour(contour): """Converts opencv contour to a list of pairs.""" return contour.reshape((len(contour), 2)).tolist() shred["contour"] = _convert_opencv_contour( shred["simplified_contour"]) shred['tags'] = shred.pop('tags_suggestions') for field in drop_fields: del shred[field] for field in drop_features: del shred['features'][field] cluster = {} cluster["id"] = shred["id"] cluster["users_count"] = 0 cluster["batch"] = batch for model_field_name in image_path_fields: import_field_name = field_name_map.get(model_field_name, model_field_name) image_path = shred.pop(import_field_name) res = storage.put_file(image_path) shred[model_field_name] = res cluster["parents"] = [] try: shred_obj = Shred.objects.create(**shred) cluster_member = ClusterMember(shred=shred_obj, position=[0, 0], angle=0) cluster["members"] = [cluster_member] Cluster.objects.create(**cluster) except bson.errors.InvalidDocument: echo(shred) raise Cluster.ensure_index(["users_processed", "users_count", "batch"]) Cluster.ensure_index(["users_skipped", "users_count", "batch"])