def create_or_update_region(db_session, region_name, server_repositories, created_by, region=None): """ :param db_session: :param name: :param server_repositories: assumes server_repositories is a comma-delimited string of valid server names that exist in CSM :param region: :return: """ region_name = check_acceptable_string(region_name) if region is None: region = Region(created_by=created_by) db_session.add(region) region.name = region_name region.servers = [] if not is_empty(server_repositories): for server_name in server_repositories.split(','): valid_server = get_server(db_session, server_name.strip()) if valid_server is not None: region.servers.append(valid_server) else: raise ValueNotFound("Server repository '{}' does not exist in the database.".format(server_name)) db_session.commit() return region
def post(self, version): logging.debug("ServicesListHandler#post") if (self.valid_version(version)): name = self.request.get('name', default_value=None) description = self.request.get('description', default_value=None) region = self.request.get('region', default_value=None) if name and description and region: slug = Service.slugify(name, region) existing_s = Service.get_by_slug(slug) # Update existing resource if existing_s: existing_s.description = description if region: existing_s.region = Region.get_by_name(region) existing_s.slug = Service.slugify(name, region) existing_s.put() self.json(existing_s.rest(self.base_url(version))) # Create new service else: region = Region.get_by_name(region) s = Service(name=name, slug=slug, description=description, region=region) s.put() self.json(s.rest(self.base_url(version))) else: self.error(400, "Bad Data: Name: %s, Description: %s, Region: %s" % (name, description, region)) else: self.error(404, "API Version %s not supported" % version)
def addnew(request): groups = request.META.get('ADFS_GROUP','') groupsList = groups.split(';') ; userIsSuperUser = isSuperUser(groupsList) ## Check user cloudman resource manager privileges if not userIsSuperUser: message = "You don't have cloudman resource manager privileges. Hence you are not authorized to add new Region"; html = "<html><body> %s.</body></html>" % message return HttpResponse(html) if request.method == 'POST': form = RegionForm(request.POST) ### Check whether all the fields for creating a region are provided with non-empty values if form.is_valid(): redirectURL = '/cloudman/message/?msg=' name = form.cleaned_data['name'] regionExists = checkNameIgnoreCase(name) if regionExists: msgAlreadyExists = 'Region ' + name + ' already exists. Hence Add Region Operation Stopped' return HttpResponseRedirect(redirectURL + msgAlreadyExists) description = form.cleaned_data['description'] admin_group = form.cleaned_data['admin_group'] comment = form.cleaned_data['comment'] ## Check that name provided as admin_group exists in the EGroups TABLE ## If not, then check its existence in external egroup database through ldap ## If not present there also, then raise an alert or else add the group name to EGroups table also egroup = None try: egroup = Egroups.objects.get(name=admin_group) except Egroups.DoesNotExist: if not (checkEGroup(admin_group)): errorMessage = 'Selected Admin E-Group ' + admin_group + ' does not exists' return HttpResponseRedirect(redirectURL + errorMessage) egroup = Egroups(name=admin_group) egroup.save() ## Create the Region with all the required values regionObj = Region(name=name, description=description, admin_group=egroup) regionObj.save() regionObj = Region.objects.get(name=name) if addLog(request,name,comment,regionObj,None,'region','add',True): transaction.commit() ## Return the Success message msgSuccess = 'New region ' + name + ' added successfully' else: transaction.rollback() msgSuccess = 'Error in creating New region ' + name html = "<html><HEAD><meta HTTP-EQUIV=\"REFRESH\" content=\"4; url=/cloudman/region/list/\"></HEAD><body> %s.</body></html>" % msgSuccess return HttpResponse(html) else: ## If not POST operation, then return the Region Creation Form form = RegionForm() return render_to_response('region/addnew.html',locals(),context_instance=RequestContext(request))
def view_events_serialise(request, country, region): """ AJAX return list of events for given country and region if user is registered, than provide information about favorite and RSVP state """ if request.method == "GET": if request.GET.has_key("format") and request.GET["format"] == "csv": r = None c = None if country != u"all": c = Country.get_by_key_name(country.strip()) if c is None: return HttpResponse("") if region != u"all": r = Region.get_by_key_name(region.strip()) if r is None: return HttpResponse("") # if r defined, then c has to be defined as well assert not r or (r and c) q = Event.all() if c: q.filter("country =", c) if r: q.filter("region =", r) in_iframe = request.GET.has_key("in_iframe") return HttpResponse(serialise_events_for_query(q, request.user, in_iframe, True, True)) return HttpResponseNotFound()
def get(self): logging.debug("RootHandler#get") today = datetime.datetime.today() end = today start = end - timedelta(days=5) start_date = dateparser.parse(self.request.get('start', default_value=str(start))) end_date = dateparser.parse(self.request.get('end', default_value=str(end))) history_size = config.SITE['history_size'] if end_date > today or start_date > end_date or \ today.toordinal() - history_size > start_date.toordinal(): end_date = today start_date = end_date - timedelta(days=5) regions = Region.all_regions() td = default_template_data() td["start_date"] = start_date - timedelta(days=1) td["end_date"] = end_date - timedelta(days=1) td["history_size"] = history_size td["regions"] = regions self.render(td, 'index.html')
def post(self, version, service_slug): logging.debug("ServiceInstanceHandler#post") name = self.request.get('name', default_value=None) description = self.request.get('description', default_value=None) region = self.request.get('region', default_value=None) if (self.valid_version(version)): service = Service.get_by_slug(service_slug) if service: if description: service.description = description if name: service.name = name if region: service.region = Region.get_by_name(region) if name or description or region: service.slug = Service.slugify(service.name, service.region.name) service.put() self.json(service.rest(self.base_url(version))) else: self.error(404, "Service %s does not exist" % service_slug) else: self.error(404, "API Version %s not supported" % version)
def get(self, version): logging.debug("RegionsListHandler#get") if (self.valid_version(version)): self.json({"regions": Region.all_regions()}) else: self.error(404, "API Version %s not supported" % version)
def post(self, version): logging.debug("AnnouncementsListHandler#post") if (self.valid_version(version)): message = self.request.get('message', default_value=None) region = self.request.get('region', default_value=None) key = self.request.get('key', default_value=None) if message: if key: # modify an existing announcement a = Announcement.get(key) a.message = message else: # create a new announcement a = Announcement(message=message) if region: region = Region.get_by_name(region) a.region = region else: a.region = None a.put() self.json(a.to_json()) else: self.error(400, "Bad Data: Missing Message") else: self.error(404, "API Version %s not supported" % version)
def get_agencies(cls, truncate=True): """ Get a list of agencies """ with Lock("agencies"): request_params = { 'command': 'agencyList', } agencies_xml, api_call = cls.request(request_params, 'agency') if not agencies_xml: return [] db.session.begin() if truncate: db.session.query(Agency).delete() agencies = [] for agency in agencies_xml: region = Region.get_or_create(db.session, title=agency.get('regionTitle')) a = Agency.get_or_create(db.session, tag = agency.get('tag'), title = agency.get('title'), short_title = agency.get('shortTitle'), region = region, api_call = api_call) agencies.append(a) db.session.commit() return agencies
def post(self, version): logging.debug("RegionListHandler#post") if (self.valid_version(version)): name = self.request.get('name', default_value=None) if name: existing_r = Region.get_by_name(name) if existing_r: self.error(400, 'Bad Data: Name %s already in use.' % name) else: # Create new region r = Region(name=name) r.put() else: self.error(400, "Bad Data: Name: %s" % name) else: self.error(404, "API Version %s not supported" % version)
class TestEmployee(TestCase): def setUp(self): self.region = Region(name="test_region") self.region.save() self.company = Company(code="1001", name="test_company", region=self.region) self.company.save() self.store = Store(code="10011001", name="test_store", company=self.company) self.store.save() self.emp_region = Employee(username="******", organization=self.region) self.emp_company = Employee(username="******", organization=self.company) self.emp_store = Employee(username="******", organization=self.store) def test_organizations(self): organizations = self.emp_region.organizations() self.assertEquals([self.region], organizations) organizations = self.emp_company.organizations() self.assertEquals([self.region, self.company], organizations) organizations = self.emp_store.organizations() self.assertEquals([self.region, self.company, self.store], organizations) def test_in_xxx_org(self): # employee in region self.assertTrue(self.emp_region.in_region()) self.assertFalse(self.emp_region.in_company()) self.assertFalse(self.emp_region.in_store()) # employee in company self.assertFalse(self.emp_company.in_region()) self.assertTrue(self.emp_company.in_company()) self.assertFalse(self.emp_company.in_store()) # employee in company self.assertFalse(self.emp_store.in_region()) self.assertFalse(self.emp_store.in_company()) self.assertTrue(self.emp_store.in_store()) def test_org(self): self.assertEqual(self.emp_region.org(), self.region) self.assertEqual(self.emp_company.org(), self.company) self.assertEqual(self.emp_store.org(), self.store)
def setUp(self): self.region = Region(name="test_region") self.region.save() self.company = Company(code="1001", name="test_company", region=self.region) self.company.save() self.company2 = Company(code="1002", name="test_company_2", region=self.region) self.company2.save() self.store = Store(code="10011001", name="test_store", company=self.company) self.store.save()
def get(self, version): logging.debug("AnnouncementsListHandler#get") if (self.valid_version(version)): region = self.request.get('region', default_value=None) if region: region = Region.get_by_name(region) self.json({"announcements": Announcement.get_active(region)}) else: self.error(404, "API Version %s not supported" % version)
def setUp(self): self.region = Region(name="test_region") self.region.save() self.company = Company(code="1001", name="test_company", region=self.region) self.company.save() self.store = Store(code="10011001", name="test_store", company=self.company) self.store.save() self.emp_region = Employee(username="******", organization=self.region) self.emp_company = Employee(username="******", organization=self.company) self.emp_store = Employee(username="******", organization=self.store)
class TestOrganization(TestCase): def setUp(self): self.region = Region(name="test_region") self.region.save() self.company = Company(code="1001", name="test_company", region=self.region) self.company.save() self.company2 = Company(code="1002", name="test_company_2", region=self.region) self.company2.save() self.store = Store(code="10011001", name="test_store", company=self.company) self.store.save() def test_ancestor(self): self.assertTrue(self.region.pk == self.company.ancestor().pk) self.assertTrue(self.region.pk == self.store.ancestor().pk) def test_belong_to(self): self.assertTrue(self.store.belong_to(self.company)) self.assertFalse(self.region.belong_to(self.company)) self.assertFalse(self.store.belong_to(self.company2))
def __init__(self, *args, **kwargs): super(BaseEventFilterForm, self).__init__(*args, **kwargs) q = Country.all().order('long_name') choices = [('all', 'All')] for c in q: choices.append( (c.key().name(), '%s [%s]'%(c.long_name, c.key().name())) ) self.fields['country'].choices = choices q = Region.all().order('long_name') choices = [('all', 'All')] for r in q: choices.append( (r.key().name(), '%s'%(r.long_name)) ) self.fields['region'].choices = choices
def get(self, version): logging.debug("ServicesListHandler#get") if (self.valid_version(version)): region = self.request.get('region', default_value=None) query = Service.all().order('name') if region: r = Region.get_by_name(region) query.filter("region = ", r) data = [] for s in query: data.append(s.rest(self.base_url(version))) data = { "services": data } self.json(data) else: self.error(404, "API Version %s not supported" % version)
def view_subscribe(request): # login is required here # if not ( request.user and request.user.is_authenticated() ): # return HttpResponseRedirect() if request.user.subscription_set.count() >= Subscription._MAX_SUBSCRIPTIONS_PER_USER: Message( user=request.user, message=ugettext("You've reached maximum number of subscriptions. Delete some and try again."), ).put() elif request.method == "GET": search_form = AdvancedEventFilterForm(request.GET) if search_form.is_valid(): country = Country.get_by_key_name(search_form.cleaned_data["country"]) region = Region.get_by_key_name(search_form.cleaned_data["region"]) free = search_form.cleaned_data["forfree"] if search_form.cleaned_data["forwho"] == "custom": event_types = 0 for t in search_form.cleaned_data["typelist"]: event_types += 1 << int(t) elif search_form.cleaned_data["forwho"] == "everyone": event_types = Event.OPEN_EVENTS_MASK else: # assert(search_form.cleaned_data['forwho'] == 'members') event_types = Event.MEMBERS_EVENTS_MASK s = Subscription(user=request.user, country=country, region=region, event_types=event_types, free=free) s.put() Message(user=request.user, message=ugettext("Subscription created")).put() return HttpResponseRedirect( reverse("events.views.view_event_list", kwargs={"subscription_id": s.key().id()}) ) return HttpResponseRedirect(reverse("events.views.view_event_list"))
def post(self, version): logging.debug("RegionIndexHandler#post") if (self.valid_version(version)): order = self.request.get('regions', default_value=None) if order: order = order.split(',') indexes = {} for idx, el in enumerate(order): indexes[el] = idx updated = [] for region in Region.all().fetch(100): if indexes[region.name] != None: region.index = indexes[region.name] updated.append(region) db.put(updated) else: self.error(400, "Bad Data: Missing Order") else: self.error(404, "API Version %s not supported" % version)
def save(self): obj = super(_EventForm, self).save() # check if this country exists and create if necessary country_raw = self.cleaned_data['country'] # assuming 'country name [SHORT]' format of the name result = re.match('(.*)\[(.*)\]', country_raw) try: country_long = result.group(1).strip() country_short = result.group(2).strip() except (IndexError, AttributeError): country_long = '' country_short = country_raw country = Country.get_or_insert( country_short, long_name = country_long ) obj.country = country # check region region_raw = self.cleaned_data['region'] if region_raw != '': result = re.match('(.*)\[(.*)\]', region_raw) try: region_long = result.group(1).strip() region_short = result.group(2).strip() except (IndexError, AttributeError): region_long = region_raw region_short = region_raw region = Region.get_or_insert( region_short, long_name = region_long, country=country ) obj.region = region else: obj.region = None return obj
# The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. from google.appengine.ext import db from models import Status, Service, Event, Region from datetime import datetime, timedelta, date na = Region(name="North America") na.put() emea = Region(name="EMEA") emea.put() foo = Service(name="Service Foo", slug="service-foo", region=na, description="Scalable and reliable foo service across the globe") foo.slug = Service.slugify(foo.name, foo.region.name) foo.put() bar = Service(name="Service Bar", slug="service-bar", region=emea, description="Scalable and reliable foo service") bar.slug = Service.slugify(bar.name, bar.region.name) bar.put()
def topic_filter(region_id: str, unit: str, search: str = None, start: datetime = None, end: datetime = None, topic_limit=100, sum: bool = False, lw: float = 0, vw: float = 0, cw: float = 0, rw: float = 1, dw: float = 0): if unit not in ['week', 'day', 'month', 'year']: raise ValueError("Invalid unit value") today = datetime.now() today = datetime(year=today.year, month=today.month, day=today.day) if end is None: end = today else: end = datetime(year=end.year, month=end.month, day=end.day) if start is None: start = end - relativedelta(days=unit_value[unit] + 2) region = Region.get(Region.region_id == region_id) result = { 'id': region.region_id, 'name': region.name, 'topic': [], 'geo': { 'lat': region.lat, 'lon': region.lon } } daily_trends = DailyTrend.select().where((DailyTrend.time >= start) & (DailyTrend.time <= end) & (DailyTrend.region == region)) if search is not None and len(search) > 0: exp = NodeList([ SQL("jsonb_message_to_tsvector("), DailyTrend.metrics, SQL(") @@ '{}'".format(search)) ], glue='') daily_trends = daily_trends.where(exp) daily_metrics = [] for trend in daily_trends: stats = [] for metric in trend.metrics: m_ = metric['stats'] m_['tag'] = metric['tag'].replace('#', '') m_['date'] = trend.time m_['category'] = metric['category'] stats.append(m_) df = pd.DataFrame(stats) # df['date'] = pd.to_datetime(df['date']) daily_metrics.append(df) if end >= today: from cache import LatestTrend try: trend = LatestTrend.get(LatestTrend.region_id == region_id) today_stats = trend.metrics except: today_stats = [] stats = [] for metric in today_stats: m_ = metric['stats'] m_['date'] = today m_['tag'] = metric['tag'].replace('#', '') if 'category' not in metric: m_['category'] = [-1] else: m_['category'] = metric['category'] stats.append(m_) if len(stats): df = pd.DataFrame(stats) daily_metrics.append(df) if len(daily_metrics) > 0: df = pd.concat(daily_metrics, axis=0) if search is not None and len(search) > 0: df = df.loc[df['tag'].str.contains(search, regex=False)] df.set_index('tag') has_col = False if 'category' in df.columns: df['category'] = [','.join(map(str, l)) for l in df['category']] has_col = True df = df.groupby(['tag', 'date', 'category']).mean() else: df = df.groupby(['tag', 'date']).mean() df['weight'] = (101 - df['rank']) * rw + ( (df['view']) * vw + (df['comment']) * cw + (df['like']) * lw - (df['dislike'] * dw)) / df['view'] df['tag'] = list([r[0] for r in df.index]) df['date'] = list( [r[1].strftime("%Y-%m-%dT%HH:%MM:%SS") for r in df.index]) if has_col: df['category'] = list([[int(float(l)) for l in r[2].split(',')] for r in df.index]) topics = df.to_dict(orient='records') result['topic'] = topics return result
def import_region_json(r, frame_index, video_id, event_id, segment_index=None): dr = Region() dr.frame_index = frame_index dr.video_id = video_id dr.event_id = event_id dr.object_name = r['object_name'] dr.region_type = r.get('region_type', Region.ANNOTATION) dr.full_frame = r.get('full_frame', False) if segment_index: dr.segment_index = segment_index dr.x = r.get('x', 0) dr.y = r.get('y', 0) dr.w = r.get('w', 0) dr.h = r.get('h', 0) dr.confidence = r.get('confidence', 0.0) if r.get('text', None): dr.text = r['text'] else: dr.text = "" dr.metadata = r.get('metadata', None) return dr
def import_legacy_annotation(a, video_obj, frame, vdn_dataset=None): da = Region() da.region_type = Region.ANNOTATION da.video = video_obj da.x = a['x'] da.y = a['y'] da.h = a['h'] da.w = a['w'] da.vdn_key = a['id'] if vdn_dataset: da.vdn_dataset = vdn_dataset if a['label'].strip(): da.object_name = a['label'] da.frame = frame da.full_frame = a['full_frame'] da.metadata_text = a['metadata_text'] da.metadata_json = a['metadata_json'] da.save() if a['label'].strip(): dl = AppliedLabel() dl.region = da dl.label_name = a['label'] dl.video = da.video dl.frame = da.frame dl.save() return da
def import_legacy_annotation(a,video_obj,frame,vdn_dataset=None): da = Region() da.region_type = Region.ANNOTATION da.video = video_obj da.x = a['x'] da.y = a['y'] da.h = a['h'] da.w = a['w'] da.vdn_key = a['id'] if vdn_dataset: da.vdn_dataset = vdn_dataset if a['label'].strip(): da.label = a['label'] if vdn_dataset: label_object, created = VLabel.objects.get_or_create(label_name=a['label'], source=VLabel.VDN, video=video_obj, vdn_dataset=vdn_dataset) else: label_object, created = VLabel.objects.get_or_create(label_name=a['label'], source=VLabel.UI, video=video_obj) da.label_parent = label_object da.frame = frame da.full_frame = a['full_frame'] da.metadata_text = a['metadata_text'] da.metadata_json = a['metadata_json'] da.save() return da
def test_add_new_region_to_admin_list(self): add_admin_list_item('region', 10000050, current_user=self.admin) new_list = get_admin_list('region', current_user=self.admin) self.assertEqual(len(new_list['info']), 1) self.assertTrue(Region.get(10000050).redlisted)
def create_region(a,video_obj,vdn_dataset,event_to_pk=None): da = Region() da.video = video_obj da.x = a['x'] da.y = a['y'] da.h = a['h'] da.w = a['w'] da.vdn_key = a['id'] da.metadata_text = a['metadata_text'] da.metadata_json = a['metadata_json'] da.materialized = a.get('materialized',False) da.png = a.get('png',False) da.region_type = a['region_type'] da.confidence = a['confidence'] da.object_name = a['object_name'] da.full_frame = a['full_frame'] if a.get('event',None): da.event_id = event_to_pk[a['event']] da.parent_frame_index = a['parent_frame_index'] da.parent_segment_index = a.get('parent_segment_index',-1) if vdn_dataset: da.vdn_dataset = vdn_dataset return da
def import_frame_regions_json(regions_json, video, event_id): """ Import regions from a JSON with frames identified by immuntable identifiers such as filename/path :param regions_json: :param video: :param event_id: :return: """ video_id = video.pk filename_to_pk = {} frame_index_to_pk = {} if video.dataset: # For dataset frames are identified by subdir/filename filename_to_pk = { df.original_path(): (df.pk, df.frame_index) for df in Frame.objects.filter(video_id=video_id) } else: # For videos frames are identified by frame index frame_index_to_pk = { df.frame_index: (df.pk, df.segment_index) for df in Frame.objects.filter(video_id=video_id) } regions = [] for k in regions_json: r = Region() if k['target'] == 'filename': fname = k['filename'] if not fname.startswith('/'): fname = '/{}'.format(fname) pk, findx = filename_to_pk[fname] r.frame_id = pk r.frame_index = findx elif k['target'] == 'index': pk, sindx = frame_index_to_pk[k['frame_index']] r.frame_id = pk r.frame_index = k['frame_index'] r.segment_index = sindx else: raise ValueError('invalid target: {}'.format(k['target'])) r.video_id = video_id r.event_id = event_id r.region_type = k['region_type'] r.materialized = k.get('materialized', False) r.full_frame = k.get('full_frame', False) r.x = k['x'] r.y = k['y'] r.w = k['w'] r.h = k['h'] r.metadata = k['metadata'] r.text = k['text'] Region.objects.bulk_create(regions, 1000)
def create_region(a, video_obj, vdn_dataset, event_to_pk=None): da = Region() da.video = video_obj da.x = a['x'] da.y = a['y'] da.h = a['h'] da.w = a['w'] da.vdn_key = a['id'] da.metadata_text = a['metadata_text'] da.metadata_json = a['metadata_json'] da.materialized = a.get('materialized', False) da.png = a.get('png', False) da.region_type = a['region_type'] da.confidence = a['confidence'] da.object_name = a['object_name'] da.full_frame = a['full_frame'] if a.get('event', None): da.event_id = event_to_pk[a['event']] da.parent_frame_index = a['parent_frame_index'] da.parent_segment_index = a.get('parent_segment_index', -1) if vdn_dataset: da.vdn_dataset = vdn_dataset return da
def import_region(a, video_obj, frame, detection_to_pk, vdn_dataset=None): da = Region() da.video = video_obj da.x = a['x'] da.y = a['y'] da.h = a['h'] da.w = a['w'] da.vdn_key = a['id'] da.metadata_text = a['metadata_text'] da.metadata_json = a['metadata_json'] da.region_type = a['region_type'] da.confidence = a['confidence'] da.object_name = a['object_name'] da.full_frame = a['full_frame'] if vdn_dataset: da.vdn_dataset = vdn_dataset da.frame = frame da.save() if da.region_type == Region.DETECTION: detection_to_pk[a['id']] = da.pk return da
def create_annotation(form, object_name, labels, frame): annotation = Region() annotation.object_name = object_name if form.cleaned_data['high_level']: annotation.full_frame = True annotation.x = 0 annotation.y = 0 annotation.h = 0 annotation.w = 0 else: annotation.full_frame = False annotation.x = form.cleaned_data['x'] annotation.y = form.cleaned_data['y'] annotation.h = form.cleaned_data['h'] annotation.w = form.cleaned_data['w'] annotation.metadata_text = form.cleaned_data['metadata_text'] annotation.metadata_json = form.cleaned_data['metadata_json'] annotation.frame = frame annotation.video = frame.video annotation.region_type = Region.ANNOTATION annotation.save() for l in labels: if l.strip(): dl = AppliedLabel() dl.video = annotation.video dl.frame = annotation.frame dl.region = annotation dl.label_name = l.strip() dl.source = dl.UI dl.save()
def create_region(a, video_obj, vdn_dataset): da = Region() da.video = video_obj da.x = a['x'] da.y = a['y'] da.h = a['h'] da.w = a['w'] da.vdn_key = a['id'] da.metadata_text = a['metadata_text'] da.metadata_json = a['metadata_json'] da.region_type = a['region_type'] da.confidence = a['confidence'] da.object_name = a['object_name'] da.full_frame = a['full_frame'] da.parent_frame_index = a['parent_frame_index'] if 'parent_segment_index' in a: da.parent_frame_index = a['parent_segment_index'] if vdn_dataset: da.vdn_dataset = vdn_dataset return da
def topic_interest(region_id, unit: str, search: str = None, start: datetime = None, end: datetime = None, sum: bool = False, topic_limit=100, lw: float = 0, vw: float = 0, cw: float = 0, rw: float = 1, dw: float = 0): if unit not in ['week', 'day', 'month', 'year']: raise ValueError("Invalid unit value") region = Region.get(Region.region_id == region_id) result = { 'id': region.region_id, 'name': region.name, 'topic': [], 'geo': { 'lat': region.lat, 'lon': region.lon } } if end is None: end = datetime.now() # end = datetime(year=end.year, month=end.month, day=end.day, hour=end.hour) if start is None: start = end - relativedelta(days=unit_value[unit] + 2) videos = Video.select().where((Video.published >= start) & (Video.published <= end)) # for v in videos: statistic = Stats.select().where((Stats.trending_region == region) & Stats.video.in_(videos)) stats = [] for s in statistic: v = s.video if 'data' not in s.stats: continue sub_stats = s.stats['data'] t = pd.DataFrame(sub_stats) v.tags = extract_video_unique_keyword(v) t['video'] = v stats.append(t) if len(stats) == 0: return result df = pd.concat(stats, axis=0) df['date'] = pd.to_datetime(df['date']) df.set_index('date') df = df[(df['date'] > start) & (df['date'] < end)] tag_data = cluster_stats_date(df, unit) total_weight = 0 for key, data in tag_data.items(): if len(key) > 3 and len(key) < 30: df = pd.DataFrame(data) df['norm_view'] = df['view'] / df['view'].sum() # df['weight'] = (df['like'] + df['dislike'])/df['view'] + ((101-df['rank'])*1000)*df['norm_view'] df['weight'] = (101 - df['rank']) * rw + ( (df['comment'] * cw) + (df['view'] * vw) + (df['like'] * lw) - (df['dislike'] * dw)) / df['view'] interest_weight = df['weight'].mean() total_weight += interest_weight result['topic'].append({ 'tag': key, 'stats': stats, 'category': list(set(df['category'].tolist())), }) result['topic'] = result['topic'][:topic_limit] result['topic'].sort(key=lambda x: x[1], reverse=True) return result
def import_frame_json(f,frame_index,event_id,video_id): regions = [] df = Frame() df.video_id = video_id df.event_id = event_id df.frame_index = frame_index df.name = f['path'] for r in f.get('regions',[]): dr = Region() dr.frame_index = frame_index dr.video_id = video_id dr.event_id = event_id dr.object_name = r['object_name'] dr.region_type = r.get('region_type',Region.ANNOTATION) dr.full_frame = r.get('full_frame',False) dr.x = r.get('x',0) dr.y = r.get('y',0) dr.w = r.get('w',0) dr.h = r.get('h',0) dr.confidence = r.get('confidence',0.0) dr.text = r.get('text',None) dr.metadata = r.get('metadata',None) regions.append(dr) return df,regions
def create_annotation(form, object_name, labels, frame): annotation = Region() annotation.object_name = object_name if form.cleaned_data['high_level']: annotation.full_frame = True annotation.x = 0 annotation.y = 0 annotation.h = 0 annotation.w = 0 else: annotation.full_frame = False annotation.x = form.cleaned_data['x'] annotation.y = form.cleaned_data['y'] annotation.h = form.cleaned_data['h'] annotation.w = form.cleaned_data['w'] annotation.text = form.cleaned_data['text'] annotation.metadata = form.cleaned_data['metadata'] annotation.frame = frame annotation.video = frame.video annotation.region_type = Region.ANNOTATION annotation.save() for lname in labels: if lname.strip(): dl, _ = Label.objects.get_or_create(name=lname, set="UI") rl = RegionLabel() rl.video = annotation.video rl.frame = annotation.frame rl.region = annotation rl.label = dl rl.save()
def import_regions_json(regions_json, video_id, event_id): fname_to_pk = { df.name: df.pk for df in Frame.objects.filter(video_id=video_id) } regions = [] for k in regions_json: if k['target'] == 'filename': r = Region() r.frame_id = fname_to_pk[k['filename']] r.video_id = video_id r.event_id = event_id r.x = k['x'] r.y = k['y'] r.w = k['w'] r.h = k['h'] r.metadata = k['metadata'] r.text = k['text'] else: raise ValueError Region.objects.bulk_create(regions, 1000) raise NotImplementedError
def view_region(environ): region_list = Region.all() response = u'<option value="">---</option>' for region in region_list: response += u'<option value="{0}">{1}</option>'.format(region.id, region.name) return HttpResponse(response)
def get_region(region_id): r = Region() detail = r.get_region(kwargs={'id': region_id}) return json.dumps(detail)
def import_region(a,video_obj,frame,detection_to_pk,vdn_dataset=None): da = Region() da.video = video_obj da.x = a['x'] da.y = a['y'] da.h = a['h'] da.w = a['w'] da.vdn_key = a['id'] da.metadata_text = a['metadata_text'] da.metadata_json = a['metadata_json'] da.region_type = a['region_type'] da.confidence = a['confidence'] da.object_name = a['object_name'] da.full_frame = a['full_frame'] if vdn_dataset: da.vdn_dataset = vdn_dataset if 'label' in a and a['label'].strip(): da.label = a['label'] if vdn_dataset: label_object, created = VLabel.objects.get_or_create(label_name=a['label'], source=VLabel.VDN, video=video_obj, vdn_dataset=vdn_dataset) else: label_object, created = VLabel.objects.get_or_create(label_name=a['label'], source=VLabel.UI, video=video_obj) da.label_parent = label_object da.frame = frame da.save() if da.region_type == Region.DETECTION: detection_to_pk[a['id']]=da.pk return da
def create_region(self, a): da = Region() da.video_id = self.video.pk da.x = a['x'] da.y = a['y'] da.h = a['h'] da.w = a['w'] da.text = a['text'] da.metadata = a['metadata'] da.png = a.get('png', False) da.region_type = a['region_type'] da.confidence = a['confidence'] da.object_name = a['object_name'] da.full_frame = a['full_frame'] da.event_id = self.event_to_pk[a['event']] da.frame_index = a['frame_index'] da.segment_index = a.get('segment_index', -1) return da
def create_annotation(form, object_name, labels, frame): annotation = Region() annotation.object_name = object_name if form.cleaned_data['high_level']: annotation.full_frame = True annotation.x = 0 annotation.y = 0 annotation.h = 0 annotation.w = 0 else: annotation.full_frame = False annotation.x = form.cleaned_data['x'] annotation.y = form.cleaned_data['y'] annotation.h = form.cleaned_data['h'] annotation.w = form.cleaned_data['w'] annotation.text = form.cleaned_data['text'] annotation.metadata = form.cleaned_data['metadata'] annotation.frame = frame annotation.video = frame.video annotation.region_type = Region.ANNOTATION annotation.save() for l in labels: if l.strip(): dl = AppliedLabel() dl.video = annotation.video dl.frame = annotation.frame dl.region = annotation dl.label_name = l.strip() dl.source = dl.UI dl.save()
def import_legacy_detection(d, video_obj, frame, vdn_dataset=None): dd = Region() dd.region_type = Region.DETECTION dd.video = video_obj dd.x = d['x'] dd.y = d['y'] dd.h = d['h'] dd.w = d['w'] dd.frame = frame dd.confidence = d['confidence'] dd.object_name = d['object_name'] dd.metadata_json = d['metadata'] if vdn_dataset: dd.vdn_dataset = dd.video.vdn_dataset dd.vdn_key = d['id'] dd.save() return dd
def create_region(self, a): da = Region() da.video_id = self.video.pk da.x = a['x'] da.y = a['y'] da.h = a['h'] da.w = a['w'] da.vdn_key = a['id'] if 'text' in a: da.text = a['text'] elif 'metadata_text' in a: da.text = a['metadata_text'] if 'metadata' in a: da.metadata = a['metadata'] elif 'metadata_json' in a: da.metadata = a['metadata_json'] da.materialized = a.get('materialized', False) da.png = a.get('png', False) da.region_type = a['region_type'] da.confidence = a['confidence'] da.object_name = a['object_name'] da.full_frame = a['full_frame'] if a.get('event', None): da.event_id = self.event_to_pk[a['event']] if 'parent_frame_index' in a: da.frame_index = a['parent_frame_index'] else: da.frame_index = a['frame_index'] if 'parent_segment_index' in a: da.segment_index = a.get('parent_segment_index', -1) else: da.segment_index = a.get('segment_index', -1) return da
def get_regions_list(): r = Region() list = r.get_regions() return render_template('template.html', list=list)
def api_import_hosts(): region_id = int(request.form['region_id']) jump_host_id = int(request.form['jump_host_id']) software_profile_id = int(request.form['software_profile_id']) data_list = request.form['data_list'] db_session = DBSession() if region_id == -1: return jsonify({'status': 'Region has not been specified.'}) if region_id > 0: region = get_region_by_id(db_session, region_id) if region is None: return jsonify({'status': 'Region is no longer exists in the database.'}) if jump_host_id > 0: jump_host = get_jump_host_by_id(db_session, jump_host_id) if jump_host is None: return jsonify({'status': 'Jump Host is no longer exists in the database.'}) if software_profile_id > 0: software_profile = get_software_profile_by_id(db_session, software_profile_id) if software_profile is None: return jsonify({'status': 'Software Profile is no longer exists in the database.'}) error = [] reader = csv.reader(data_list.splitlines(), delimiter=',') header_row = next(reader) # header_row: ['hostname', 'location', 'roles', 'ip', 'username', 'password', 'connection', 'port'] # Check mandatory data fields if HEADER_FIELD_HOSTNAME not in header_row: error.append('"hostname" is missing in the header.') if HEADER_FIELD_IP not in header_row: error.append('"ip" is missing in the header.') if HEADER_FIELD_CONNECTION not in header_row: error.append('"connection" is missing in the header.') for header_field in header_row: if header_field not in HEADER_FIELDS: error.append('"' + header_field + '" is not a correct header field.') if error: return jsonify({'status': '\n'.join(error)}) error = [] data_list = list(reader) region_dict = get_region_name_to_id_dict(db_session) # Check if each row has the same number of data fields as the header row = 2 for row_data in data_list: if len(row_data) != len(header_row): error.append('line {} has wrong number of data fields - {}.'.format(row, row_data)) else: hostname = get_acceptable_string(get_row_data(row_data, header_row, HEADER_FIELD_HOSTNAME)) if is_empty(hostname): error.append('line {} has invalid hostname - {}.'.format(row, row_data)) # Validate the connection type connection_type = get_row_data(row_data, header_row, HEADER_FIELD_CONNECTION) if is_empty(connection_type) or connection_type not in [ConnectionType.TELNET, ConnectionType.SSH]: error.append('line {} has a wrong connection type (should either be "telnet" or "ssh") - {}.'.format(row, row_data)) region_name = get_acceptable_string(get_row_data(row_data, header_row, HEADER_FIELD_REGION)) if region_name is not None: # No blank region is allowed if len(region_name) == 0: error.append('line {} has no region specified - {}.'.format(row, row_data)) else: if region_name not in region_dict.keys(): # Create the new region try: region = Region(name=region_name, created_by=current_user.username) db_session.add(region) db_session.commit() # Add to region dictionary for caching purpose. region_dict[region_name] = region.id except Exception as e: logger.exception('api_import_hosts() hit exception') error.append('Unable to create region {} - {}.'.format(region_name, e.message)) row += 1 if error: return jsonify({'status': '\n'.join(error)}) # Import the data row = 2 for row_data in data_list: try: created_by = current_user.username hostname = get_acceptable_string(get_row_data(row_data, header_row, HEADER_FIELD_HOSTNAME)) # Check if the host already exists in the database. host = get_host(db_session, hostname) region_name = get_acceptable_string(get_row_data(row_data, header_row, HEADER_FIELD_REGION)) if region_name is None: alternate_region_id = region_id else: alternate_region_id = region_dict[region_name] location = get_row_data(row_data, header_row, HEADER_FIELD_LOCATION) if host and location is None: location = host.location roles = get_row_data(row_data, header_row, HEADER_FIELD_ROLES) if host and roles is None: roles = host.roles host_or_ip = get_row_data(row_data, header_row, HEADER_FIELD_IP) if host and host_or_ip is None: host_or_ip = host.connection_param[0].host_or_ip connection_type = get_row_data(row_data, header_row, HEADER_FIELD_CONNECTION) if host and connection_type is None: connection_type = host.connection_param[0].connection_type username = get_row_data(row_data, header_row, HEADER_FIELD_USERNAME) if host and username is None: username = host.connection_param[0].username password = get_row_data(row_data, header_row, HEADER_FIELD_PASSWORD) if host and password is None: password = host.connection_param[0].password enable_password = get_row_data(row_data, header_row, HEADER_FIELD_ENABLE_PASSWORD) if host and enable_password is None: enable_password = host.connection_param[0].enable_password port_number = get_row_data(row_data, header_row, HEADER_FIELD_PORT) if host and port_number is None: port_number = host.connection_param[0].port_number # If no software profile is selected, retain the existing one instead of overwriting it. if host and (software_profile_id is None or software_profile_id <= 0): alternate_software_profile_id = host.software_profile_id else: alternate_software_profile_id = software_profile_id # If no jump host is selected, retain the existing one instead of overwriting it. if host and (jump_host_id is None or jump_host_id <= 0): alternate_jump_host_id = host.connection_param[0].jump_host_id else: alternate_jump_host_id = jump_host_id create_or_update_host(db_session, hostname, alternate_region_id, location, roles, alternate_software_profile_id, connection_type, host_or_ip, username, password, enable_password, port_number, alternate_jump_host_id, created_by, host) except Exception as e: return jsonify({'status': 'Line {} - {} - {}.'.format(row, e.message, row_data)}) row += 1 return jsonify({'status': 'OK'})
def trending_topic(region_id, unit: str, search: str = None, start: datetime = None, end: datetime = None, sum: bool = False, topic_limit=100, lw: float = 1, vw: float = 1, cw: float = 1, rw: float = 1, dw: float = 1): today = datetime.now() today = datetime(year=today.year, month=today.month, day=today.day) if end is None: end = today if start is None: start = end - relativedelta(days=unit_value[unit] + 2) print(start, end) region = Region.get(Region.region_id == region_id) result = { 'id': region.region_id, 'name': region.name, 'topic': [], 'geo': { 'lat': region.lat, 'lon': region.lon } } daily_trends = DailyTrend.select().where((DailyTrend.time >= start) & (DailyTrend.time <= end) & (DailyTrend.region == region)) if search is not None and len(search) > 0: exp = NodeList([ SQL("jsonb_message_to_tsvector("), DailyTrend.metrics, SQL(") @@ '{}'".format(search)) ], glue='') daily_trends = daily_trends.where(exp) print('size', len(daily_trends)) daily_metrics = [] for trend in daily_trends: stats = [] for metric in trend.metrics: m_ = metric['stats'] m_['tag'] = metric['tag'].replace('#', '') m_['date'] = trend.time if 'category' not in metric: m_['category'] = [-1] else: m_['category'] = metric['category'] stats.append(m_) df = pd.DataFrame(stats) if len(df) > 0: daily_metrics.append(df) if end >= today: from cache import LatestTrend try: trend = LatestTrend.get(LatestTrend.region_id == region_id) today_stats = trend.metrics except: today_stats = [] stats = [] for metric in today_stats: m_ = metric['stats'] m_['tag'] = metric['tag'].replace('#', '') m_['date'] = today if 'category' not in metric: m_['category'] = [-1] else: m_['category'] = metric['category'] stats.append(m_) if len(stats): df = pd.DataFrame(stats) if len(df) > 0: daily_metrics.append(df) print('m size', len(daily_metrics)) if len(daily_metrics) > 0: df = pd.concat(daily_metrics, axis=0) if search is not None and len(search) > 0: df = df.loc[df['tag'].str.contains(search, regex=False)] df.set_index('tag') df = df.drop(columns=["date"]) if 'category' in df.columns: # df['category'] = [','.join(map(str, l)) for l in df['category']] # df = df.groupby(['tag', 'category'],as_index=False).mean() f2 = lambda x: [z for y in x for z in y] f1 = lambda x: ', '.join(x.dropna()) d = dict.fromkeys( df[['tag', 'category']].columns.difference(['tag', 'category']), f1) d['category'] = f2 df1 = df.groupby('tag', as_index=False).agg(d) df2 = df[['tag', 'rank', 'view', 'comment', 'like', 'dislike']].groupby(['tag'], as_index=False).mean() df = pd.concat([df1.set_index('tag'), df2.set_index('tag')], axis=1, join='inner').reset_index() else: df = df.groupby(['tag'], as_index=False).mean() df['weight'] = (101 - df['rank']) * rw + ( (df['view']) * vw + (df['comment']) * cw + (df['like']) * lw - (df['dislike'] * dw)) / df['view'] # df['tag'] = [ r[0] for r in df.index] # df['category'] = [ r[1] for r in df.index] topics = df.to_dict(orient='records') topics.sort(key=lambda x: x['weight'], reverse=True) result['topic'] = [] for t in topics[:topic_limit]: e = { 'tag': t['tag'], 'weight': t['weight'], 'rank': t['rank'], 'view': t['view'], 'like': t['like'], 'dislike': t['like'], 'comment': t['comment'] } if 'category' in t: e['category'] = list(set(t['category'])) result['topic'].append(e) return result