Exemple #1
0
def register(request):
    if request.method == 'POST':
        username = request.POST['username']
        password = request.POST['password1']
        domain = request.POST['domain']
        print "=== User: "******" password: ", password, 'domain: ', domain

        new_user = User.objects.create_user(username, '', password)

        # create hash for new user
        hasher = hashlib.md5()
        hasher.update(username + domain)
        h = hasher.hexdigest()
        site = Site(user=new_user, hash=h, domain=domain)
        site.save()
        new_user.save()

        return login_to_page(request, username, password)

    else:
        form = RegistrationForm()
        return render_to_response(
            'registration/registration_form.html',
            {'form' : form},
            context_instance=RC(request, {}),
    )
Exemple #2
0
	def delete(self, site_id = None):
		site = Site.query.filter_by(id = site_id).first()
		if site: 
			Site.delete(site.id)
			return jsonify(ApiObjects(site.json()))
		else:
			return jsonify(ApiObjects())
Exemple #3
0
def _create_example_site(user):
    site = Site(
        name='Example Site',
        owner = user,
        users = [user],
        admins = [user],
        example = True,
    )
    site.put()

    for v in [4, 5]:
        name = 'Html%d Example' % v
        style = Style(site=site, name=name)
        style.put()
        rev = StyleRevision(parent=style, rev=0)
        rev.put()
        rev.update(render_template('examples/blog-html%d.css' % v))
        style.published_rev = rev
        rev = StyleRevision(parent=style, rev=1)
        rev.put()
        rev.update(render_template('examples/blog-html%d-preview.css' % v))
        style.preview_rev = rev
        style.put()
        page = Page(site=site, name=name, url=url_for('example%d' % v, page_key=0, _external=True), _styles=[style.key()])
        page.put()
        page.url = url_for('example%d' % v, page_key=page.key(), _external=True)
        page.put()
        page.queue_refresh()
Exemple #4
0
 def add_site(self):
     self.template_path = 'site_add.html'
     
     # get the site code
     site_code = self.request.get('site_code').strip()
     self.vars['site_code'] = site_code
     
     # return if there is no site_code, or we are not a post
     if (not site_code) or self.request.method != 'POST' :
         return
                 
     # check that the site_code is valid
     if not Site.is_code_valid( site_code ):
         self.vars['site_code_error'] = "site code is not valid"
         return
     
     # check that the site_code is valid
     if Site.is_code_taken( site_code ):
         self.vars['site_code_error'] = "site code already exists"
         return
     
     # ok to create
     site = Site( site_code=site_code, owner=self.person )
     site.put()
     
     # create the first shared secret
     SharedSecret.new_for_site( site )
     
     self.redirect('/site/' + site_code )
Exemple #5
0
def load_json_file(filename):
    site = Site()
    rv = site.load_json(filename, True)
    if rv is None:
        print 'Error', filename
    else :
        print filename
Exemple #6
0
def getcurrentsite(http_post, path_info, query_string):
    """ Returns the site id and the page cache key based on the request.
    """
    url = u'http://%s/%s' % (smart_unicode(http_post.rstrip('/')), \
      smart_unicode(path_info.lstrip('/')))
    pagecachekey = '%s?%s' % (smart_unicode(path_info), \
      smart_unicode(query_string))
    hostdict = hostcache_get()

    if not hostdict:
        hostdict = {}
    if url not in hostdict:
        default, ret = None, None
        for site in Site.objects.all():
            if url.startswith(site.url):
                ret = site
                break
            if not default or site.default_site:
                default = site
        if not ret:
            if default:
                ret = default
            else:
                # Somebody is requesting something, but the user didn't create
                # a site yet. Creating a default one...
                ret = Site(name='Default Feedjack Site/Planet', \
                  url='www.feedjack.org', \
                  title='Feedjack Site Title', \
                  description='Feedjack Site Description. ' \
                    'Please change this in the admin interface.')
                ret.save()
        hostdict[url] = ret.id
        hostcache_set(hostdict)

    return hostdict[url], pagecachekey
def RetriveSongsBySite(site_name):    
    #if you dont pass anything in it gets all the sites
    if site_name == '':
        site = Site.all().fetch(150)
    else:
        site = Site.all().filter('name =', site_name).fetch(1)
        
    return site
Exemple #8
0
    def get_site():
        try:
            site = Site.objects.get(name=EbaySpider.name)
        except DoesNotExist as e:
            logging.info(">>>> Creating site")
            site = Site()
            site.name = EbaySpider.name

        return site
Exemple #9
0
def rewrite_result(result):
    '''\
    Rewrites the HTML in this result (question, answers and comments) so
    links to other StackExchange sites that exist in Stackdump are rewritten,
    links elsewhere are decorated with a CSS class, and all images are replaced
    with a placeholder.
    
    The JSON must have been decoded first.
    '''
    app_url_root = settings.APP_URL_ROOT
    
    # get a list of all the site base URLs
    sites = list(Site.select())
    sites_by_urls = dict([ (s.base_url, s) for s in sites ])
    
    # rewrite question
    question = result.get('question')
    if question:
        question['body'] = _rewrite_html(question.get('body'), app_url_root, sites_by_urls)
        for c in question.get('comments', [ ]):
            c['text'] = _rewrite_html(c.get('text'), app_url_root, sites_by_urls)
    
    # rewrite answers
    answers = result.get('answers')
    if answers:
        for a in answers:
            a['body'] = _rewrite_html(a.get('body'), app_url_root, sites_by_urls)
            for c in a.get('comments', [ ]):
                c['text'] = _rewrite_html(c.get('text'), app_url_root, sites_by_urls)
Exemple #10
0
def view_question(site_key, question_id, answer_id=None):
    context = { }
    
    try:
        context['site'] = Site.selectBy(key=site_key).getOne()
    except SQLObjectNotFound:
        raise HTTPError(code=404, output='No site exists with the key %s.' % site_key)
    
    # get the question referenced by this question id
    query = 'id:%s siteKey:%s' % (question_id, site_key)
    results = solr_conn().search(query)
    if len(results) == 0:
        raise HTTPError(code=404, output='No question exists with the ID %s for the site, %s.' % (question_id, context['site'].name))
    
    decode_json_fields(results)
    retrieve_users(results)
    retrieve_sites(results)
    
    result = results.docs[0]
    convert_comments_to_html(result)
    if settings.REWRITE_LINKS_AND_IMAGES:
        rewrite_result(result)
    sort_answers(result)
    context['result'] = result

    context['answer_id'] = answer_id
    
    return render_template('question.html', context)
Exemple #11
0
def login_recovery(site, host, netloc, csrf):
    """
    Recovery post url
    """

    # Logout
    session = request.environ.get('beaker.session')
    session['logged_in'] = False
    session['user_id_logged_in'] = None

    # POST parameters
    email = request.POST.getunicode('email')

    # User
    try:
        user = User.get(User.email == email)
    except User.DoesNotExist:
        return dict(status=False, info='Usuário não encontrado')

    # Site
    try:
        site = Site.get(Site.user == user)
    except Site.DoesNotExist:
        return dict(status=False, info='Site não encontrado')

    # Send recovery email
    send_recovery_email(host, DEFAULT_SENDER, user)

    # Return OK
    return dict(status=True, info='Solicitado senha com sucesso. Acesse seu email para recuperar sua senha.')
def latest_obs_and_forecast(site_id):
    result = memcache.get(site_id, "site_latest")
    if result:
        return result

    site = Site.get_by_key_name(site_id)
    if site is None:
        return None

    obs = ObservationTimestep.find_latest_by_site(site, limit=6)
    result = None

    if len(obs) > 0:
        forecasts = ForecastTimestep.find_by_site_closest_by_date(site, first(obs).observation_datetime,
                                                                  limit=50)
        closest_forecast = first(forecasts)
        if closest_forecast:
            matching_obs = first(filter(lambda o: o.observation_datetime == closest_forecast.forecast_datetime, obs))
            matching_forecasts = ifilter(lambda f: f.forecast_datetime == closest_forecast.forecast_datetime, forecasts)
            if matching_obs:
                #finally have both... a single obs report and multiple forecasts

                obs_dict = to_dict_excl_sites(matching_obs)
                obs_dict['best_forecast'] = map(to_dict_excl_sites,  make_five_day_list(matching_forecasts))
                result = {
                    'site': site.to_dict(),
                    'observation': obs_dict
                }
                memcache.set(site_id, result, 60 * 60, namespace='site_latest')

    return result
Exemple #13
0
	def seed_node(site_id = None, alias = None, latitude = None, longitude = None):
		if site_id:
			site = Site.query.filter_by(id = site_id).first()
		else:
			site = Site.create(name = 'Techrice site {}'.format(uuid4().hex))
		
		if not alias: 
			alias = 'Techrice node {}'.format(uuid4().hex)
		node = Node.create(name = alias, site = site, latitude = latitude, longitude = longitude)
		

		solar = Sensor.create(node = node, sensortype = SensorType.query.filter_by(name = 'solar voltage').first(), name = 'vsol')
		battery = Sensor.create(node = node, sensortype = SensorType.query.filter_by(name = 'battery voltage').first(), name = 'vbat')
		temperature = Sensor.create(node = node, sensortype = SensorType.query.filter_by(name = 'DHT11 temperature').first(), name = 'temperature')
		humidity = Sensor.create(node = node, sensortype = SensorType.query.filter_by(name = 'DHT11 humidity').first(), name = 'humidity')
		sonar = Sensor.create(node = node, sensortype = SensorType.query.filter_by(name = 'sonar HC SR-04').first(), name = 'sonar')

		header = Header.get_header(node_id = node.id)
		print header

		return {
			'node': 'name: {}, id: {}, longitude: {}, latitude: {}'.format(node.name, node.id, node.longitude, node.latitude),
			'sensors': map(lambda s: 'name: {}, id: {}'.format(s.name, s.id), node.sensors),
			'header' : header
			}
Exemple #14
0
def forecast_update2(site_key):
    site = Site.get_by_key_name(site_key)
    if site is None:
        return Response(status = 404)

    forecast_url = "http://www.metoffice.gov.uk/public/data/PWSCache/BestForecast/Forecast/%s?format=application/json" % site_key

    result = urlfetch.fetch(forecast_url)
    if result.status_code == 200:
        forecast = parse_forecast(result.content)
        issued_date = parse_date(forecast["@dataDate"])
        for date, data in timesteps(forecast):
            forecast_timestep = ForecastTimestep.find_by_site_and_dates(site, date, issued_date)
            if forecast_timestep is None:
                forecast_timestep = ForecastTimestep(site = site, forecast_datetime = date, issued_datetime = issued_date, forecast_date = date.date())

                for k,v in data.items():
                    prop_name = snake_case(k)
                    if hasattr(forecast_timestep, prop_name):
                        if v == "missing":
                            v = None
                        setattr(forecast_timestep, prop_name, v)

                forecast_timestep.save()

    return Response(status = 204)
Exemple #15
0
 def site_update(self, request):
     """Update info a site"""
     entity = Site.site_update(request)
     if entity:
         return entity
     else:
         raise endpoints.NotFoundException()
Exemple #16
0
    def get(self):

        sites = Site.all()

        context = {
            'sites': sites,
        }

        sites_for_output = {}
    
        # loop over the sites
        for site in sites:
            # and store each one in the output variable
            site_for_output = {
                "url": site.url,
                "issues": site.issue_set.count(),
            }
            sites_for_output[site.name] = site_for_output
    
        # create the JSON object we're going to return
        json = simplejson.dumps(sites_for_output, sort_keys=False)

        # serve the response with the correct content type
        #self.response.headers['Content-Type'] = 'application/json'
        # write the json to the response
        self.response.out.write(json)
Exemple #17
0
def observation_update2(site_key):
    site = Site.get_by_key_name(site_key)
    if site is None:
        return Response(status = 404)

    url = "http://www.metoffice.gov.uk/public/data/PWSCache/BestForecast/Observation/%s?format=application/json" % site_key

    result = urlfetch.fetch(url)
    if result.status_code == 200:
        observations = parse_observation(result.content)
#        issued_date = parse_date(forecast["@dataDate"])
        for date, data in timesteps(observations):
            obs_timestep = ObservationTimestep.get_by_site_and_datetime(site, date)
            if obs_timestep is None:
                obs_timestep = ObservationTimestep(site = site, observation_datetime = date, observation_date = date.date())

                for k,v in data.items():

                    prop_name = snake_case(k)
                    if hasattr(obs_timestep, prop_name):
                        if v == "missing":
                            v = None
                        elif prop_name == 'temperature':
                            v = float(v)
                        setattr(obs_timestep, prop_name, v)

                obs_timestep.save()
            #logging.info("%s, %s" % (str(date), str(ObservationTimestep)))

    return Response(status = 204)
Exemple #18
0
def user_recovery_page(site, host, netloc, csrf, user_id, user_hash):
    """
    User recovery page url
    """

    # Logout
    session = request.environ.get('beaker.session')
    session['logged_in'] = False
    session['user_id_logged_in'] = None

    # User
    try:
        user = User.get(User.id == user_id, User.user_hash == user_hash)
    except User.DoesNotExist:
        return dict(site=site, host=host, csrf=csrf, recovery=False)

    # Site
    try:
        site = Site.get(Site.user == user)
    except Site.DoesNotExist:
        return dict(site=site, host=host, csrf=csrf, recovery=False)

    # Verify actived user and actived site
    if (not user.active) or (not site.active):
        return dict(site=site, host=host, csrf=csrf, recovery=False)

    # Login
    session['logged_in'] = True
    session['user_id_logged_in'] = user.get_id()

    # Return OK
    return dict(site=site, host=host, csrf=csrf, recovery=True, user=user)
Exemple #19
0
def add_site():
    g.site = Site.get_by_hostname(request.host, app.config.get("DOMAIN_ROOT"))
    if g.site is None:
        if not app.config.get("DOMAIN_ROOT"):
            return _("Config is missing a DOMAIN_ROOT: the domain name of your " "main site.")
        if not app.config.get("ADMINS"):
            return _("Config is missing ADMINS: used for login to your " "first site and recipient of error messages.")
        name = app.config.get("ROOT_SITE_NAME", _("Your new site"))
        description = _(
            "This is your new site. This is also your root site, " "and this can be used to create other sites."
        )

        root_site, created = Site.objects.get_or_create(
            domain=app.config.get("DOMAIN_ROOT"),
            defaults={
                "name": name,
                "description": "<h1>%s</h1><p>%s</p>" % (name, description),
                "owner_email": None,
                "verified_email": True,
            },
        )
        port = app.config.get("PORT", None)
        url = "//%s" % root_site.domain
        if port:
            url += ":%d" % port
        if created:
            return redirect(url)
        else:
            return redirect("%s%s" % (url, url_for("sites")))

    g.user = session.get("username", None)
    if "menu" in g.site.active_modules:
        g.menu, created = Menu.objects.get_or_create(site=g.site.domain)
    else:
        g.menu = None
Exemple #20
0
def forecast_update(site_key):
    site = Site.get_by_key_name(site_key)
    if site is None:
        return Response(status = 404)

    forecast_url = "http://www.metoffice.gov.uk/public/data/PWSCache/BestForecast/Forecast/%s?format=application/json" % site_key

    result = urlfetch.fetch(forecast_url)
    if result.status_code == 200:
        forecast = parse_forecast(result.content)
        issued_date = parse_date(forecast["@dataDate"])
        for date, day in days(forecast):
            forecast_day = ForecastDay.get_by_key_name(make_key_name(site,date))
            if forecast_day is None:
                forecast_day = ForecastDay(key_name=make_key_name(site,date), forecast_date = date, site = site)
            forecast_day.site = site
            for timestep, data in day_timesteps(day):
                w = Forecast()
                w.issued = issued_date
                for k,v in data.items():
                    prop_name = snake_case(k)
                    if hasattr(w, prop_name):
                        if v == "missing":
                            v = None
                        setattr(w, prop_name, v)

                forecast_day.forecasts.add(timestep,w)

            forecast_day.save()
        site.save()
    return Response(status = 204)
Exemple #21
0
 def site_delete(self, request):
     """Delete site"""
     entity = Site.get_by_id(request.id)
     if entity:
         entity.key.delete()
         return BaseResponse(messages="")
     else:
         raise endpoints.NotFoundException()
Exemple #22
0
def create(request):
	url = request.GET.get('url', None)
	
	if url[:2] != 'http':
		url = 'http://' + url
	
	if url:
		try:
			site = Site()
			site.create(url=url)
		except ValueError, e:
			return HttpResponse(e)
		
		return HttpResponse(
			simplejson.dumps({'hash': site.hash}),
			content_type = 'application/javascript; charset=utf8'
		)
Exemple #23
0
	def post(self):
		parser = reqparse.RequestParser(bundle_errors = True)
		parser.add_argument('name', type=str, location='form', required=True, help='<str> name required')
		args = parser.parse_args()
		site = Site.create(name = args['name'])
		if site:
			return jsonify(ApiObjects(site.json()))
		else:
			return jsonify(ApiObjects())
Exemple #24
0
 def _tx():
     site = Site.get_by_key_name(loc["id"])
     if site is None:
         site = Site(key_name=loc["id"])
     site.location = GeoPt(lat = loc["location"][0], lon = loc["location"][1])
     site.name = loc["name"]
     site.region = loc["region"]
     site.save()
Exemple #25
0
    def get(self):
        "Ping each site in turn and record the response"
        # get all the sites we're going to ping
        sites = Site.all()

        # loop over all of the current sites
        for site in sites:
            # ping each site
            ping_site(site)

        self.redirect("/")
Exemple #26
0
def site_new(request, task_id):
    user = request.user
    task = Task.objects.get(id=task_id)
    if task.user != user and not user.has_perm('apps.list_all_task'):
        raise PermissionDenied
    if request.method == 'POST':
        new_site = Site()
        new_site.task = task
        #post_site = json.loads(request.POST['site'])
        form = SiteForm(request.POST, instance=new_site)
        if form.is_valid():
            new_site = form.save()
            return HttpResponseRedirect(reverse("site", args=(task.pk, new_site.pk)))
    else:
        form = SiteForm()
    return render_to_response('site.html', {
        'post_url':reverse("site_new", args=(task.pk,)),
        'form': form,
        'task': task
    }, RequestContext(request))
Exemple #27
0
    def get(self):
        
        query = self.request.get("q")
        
        if not query:
            sites = Site.all()
        else:
            sites = Site.gql("WHERE url=:1", "http://%s" % query)

        context = {
            'sites': sites,
            'query': query,
        }

        # prepare the context for the template
        # calculate the template path
        path = os.path.join(os.path.dirname(__file__), 'templates',
            'index.html')
        # render the template with the provided context
        self.response.out.write(template.render(path, context))
Exemple #28
0
def leave_site():
    site_id = int(request.form.get('site_id', -1))
    site = Site.get_or_404(site_id)
    user = users.get_current_user()
    them = [u for u in site.users if u.user_id() == user.user_id()]
    for user in them:
        site.users.remove(user)
        if user in site.admins:
            site.admins.remove(user)
    site.put()
    return 'OK'
Exemple #29
0
def index():
    user = users.get_current_user()
    if user:
        if not UserSettings.has_seen_example() and GqlQuery('SELECT __key__ FROM Site WHERE owner=:1 AND example=true', user).count(1) == 0:
            _create_example_site(user)
            UserSettings.mark_example_as_seen()
        sites = Site.gql('WHERE users=:1 ORDER BY name', user)
        site_form = PageForm()
        page_form = PageForm()
        return render_template('index.html', sites=sites, new_site_form=site_form, page_form=page_form)
    else:
        return home()
Exemple #30
0
    def save_to_db(self, dic):
        assert all(map(dic.has_key, ['title', 'original_price', 'price', 'detail', 'url'])),\
            "Information incomplete."
        
        url = dic['url']
        original_price = dic['original_price'].text.encode('utf8')
        price = dic['price'].text.encode('utf8')
        title = dic['title'].text # title is unicode
        detail = dic['detail'].renderContents(encoding='utf8')
        detail = utils.clean_detail(detail, self.home_url)
            
        # Data formatting & validation.
        try:
            original_price, price = map(lambda s: int(re.search(r'(\d+)', s).group()),
                                        [original_price, price])
        except TypeError:
            logging.error("Price conversion failed. Detailed info: %s", [original_price, price])
            return
        except AttributeError:
            logging.error("Regex failed on %s", [original_price, price])
            return
        
        if len(title) > 500 or len(title) < 10:
            logging.error("Title length too short or too long : %s", title)
            return
        
        if len(detail) < 20:
            logging.error("Detail too short. %s", detail)
            return

        # Save to db.
        try:
            site = Site.select(Site.q.url == self.home_url)
            assert(site.count() == 1), "%s not found or dups." % self.home_url
            
            title = utils.lstrip(title, [s.decode('utf8') for s in ('今日团购', '今日精选', ':')])
            title = title.strip()
            title='[%s] %s' % (site[0].name, title)
            
            city_name = self.index_urls[url]
            city = City.select(City.q.name == city_name.decode('utf8'))
            assert city.count() == 1, "%s not found or dups." % city_name
            cityID = city[0].id
            
            if Deal.select(AND(Deal.q.title == title, Deal.q.cityID == cityID)).count() > 0:
                logging.info("Title dups %s" % title)
                return
            deal = Deal(url=url, title=title, price=price, originalPrice=original_price,
                        detail=detail.decode('utf8'),cityID=cityID, siteID=site[0].id)
            logging.info('%s OK', url)
        except:
            # Simple handling for the moment.
            logging.error("Error occured while saving data : %s", sys.exc_info())
Exemple #31
0
    def test_gather_negative_sites(self):

        p = Protein(refseq='NM_007',
                    sequence='X---------X------------YXY--------')
        g = Gene(isoforms=[p], preferred_isoform=p)

        # one-based
        s = Site(position=11,
                 types={SiteType(name='methylation')},
                 residue='X',
                 protein=p)

        db.session.add_all([g, p, s])

        negative_sites = gather_negative_sites(residues={'X'}, exclude={s})

        # zero-based
        assert negative_sites == {NegativeSite(p, 0), NegativeSite(p, 24)}
Exemple #32
0
def forecast_import(site_id):
    site = Site.get_by_key_name(site_id)
    if site is None:
        return Response(status = 404)
    today = date.today()
    url = "http://metofficewatch.appspot.com/sites/%s/forecasts?day=%s" % (site_id, today.isoformat())

    result = urlfetch.fetch(url)

    if result.status_code == 200:
        forecasts = json.loads(result.content)
        forecast_day = ForecastDay.get_by(site, today, not_found_return_new = True)
        forecast_day.forecasts = Forecasts.from_json(forecasts['forecasts'])
        forecast_day.lastdata_datetime = parse_date(forecasts['lastdata_datetime']) if forecasts['lastdata_datetime'] is not None else None

        forecast_day.save()

    return Response(status = 204)
Exemple #33
0
def observation_import(site_id):
    site = Site.get_by_key_name(site_id)
    if site is None:
        return Response(status = 404)
    today = date.today()
    url = "http://metofficewatch.appspot.com/sites/%s/observations?day=%s" % (site_id, today.isoformat())

    result = urlfetch.fetch(url)

    if result.status_code == 200:
        obs = json.loads(result.content)
        obs_day = ObservationDay.get_by(site, today, not_found_return_new = True)
        obs_day.observations = Observations.from_json(obs['observations'])
        obs_day.lastdata_datetime = parse_date(obs['lastdata_datetime'])

        obs_day.save()

    return Response(status = 204)
Exemple #34
0
def site_search(site_key):
    context = { }
    # the template uses this to allow searching on other sites
    context['sites'] = get_sites()
    
    try:
        context['site'] = Site.selectBy(key=site_key).getOne()
    except SQLObjectNotFound:
        raise HTTPError(code=404, output='No site exists with the key %s.' % site_key)
    
    # perform the search limited by this site
    search_context = perform_search(site_key)
    if not search_context:
        raise HTTPError(code=500, output='Invalid query attempted.')
    
    context.update(search_context)
    
    return render_template('site_results.html', context)
Exemple #35
0
    def parser(line):

        refseq, position, residue, kinases_str, pmid, mod_type = line

        site_kinase_names = filter(bool, kinases_str.split(','))

        site_kinases, site_groups = get_or_create_kinases(
            site_kinase_names, known_kinases, known_groups)

        site = Site(position=int(position),
                    residue=residue,
                    pmid=pmid,
                    protein=proteins[refseq],
                    kinases=list(site_kinases),
                    kinase_groups=list(site_groups),
                    type=mod_type)

        sites.append(site)
Exemple #36
0
def cadastrarTribunal():
    form = FormularioCadastroTribunal()
    if form.validate_on_submit():
        tribunal_novo = Tribunal(nome=form.nome.data,
                                 sigla=form.sigla.data,
                                 codigo=form.codigo.data)
        site_novo = Site(grau=form.grau_1.data,
                         url=form.site_1.data,
                         tribunal=tribunal_novo)
        # print(tribunal_novo)
        # print(site_novo)
        db.session.add(tribunal_novo)
        db.session.add(site_novo)
        db.session.commit()
        flash("Tribunal cadastrado com sucesso!")
        return redirect(url_for('tribunais'))
    return render_template('cadastro_tribunal.html',
                           title='Cadastro',
                           form=form)
Exemple #37
0
    def test_sequence(self):

        p = Protein(refseq='NM_007',
                    id=1,
                    sequence='ABCDEFGHIJKLMNOPQRSTUVWXYZ')
        db.session.add(p)

        data = {
            0: '-------ABCDEFGH',
            10: 'DEFGHIJKLMNOPQR',
            25: 'STUVWXYZ-------'
        }

        for position, expected_sequence in data.items():
            site = Site(position=position + 1,
                        type='methylation',
                        residue=p.sequence[position],
                        protein=p)
            assert site.sequence == expected_sequence
Exemple #38
0
 def import_sites(modeladmin, request, queryset):
     with open('D:/Users/mcpherro/PycharmProjects/Sites/sites.csv', 'rb') as f:
         reader = csv.reader(f)
         for row in reader:
             country_name = row[2]
             for c in countries:
                 if row[2]==c[1]:
                     country_name = c[0]
             s = Site(id=row[0], site=row[1], country=country_name, data_source=row[3], site_type=row[7], 
                      display=row[8])
             if row[4] != 'NA':
                 s.latitude = row[4]
             if row[5] != 'NA':
                 s.longitude = row[5]
             if row[6] != 'NA':
                 s.altitude = row[6]
             if row[4] != 'NA' and row[5] != 'NA':
                 pnt = django.contrib.gis.geos.GEOSGeometry('POINT(%s %s)' % (row[5], row[4]))
                 s.map_location = pnt
             s.save()
     return None         # Return None to display the change list page again.
Exemple #39
0
def site_detail(site_id):
    site = Site.get_by_key_name(site_id)
    if site is None:
        return Response(status = 404)

    obs = ObservationTimestep.find_latest_by_site(site = site, limit = 24)
    forecasts = []
    if len(obs) > 0:
        first_obs = first(obs)
        last_obs = last(obs)

        forecasts = ForecastTimestep.find_by_site_between_dates( site = site,
                                                                 from_dt = last_obs.observation_datetime,
                                                                 to_dt = first_obs.observation_datetime)
    return Response(json.dumps({
       'site': site.to_dict(),
       'observations': map(lambda o: o.to_dict(excluding = ['site']), obs),
       'forecasts': map(lambda f: f.to_dict(excluding = ['site']), forecasts)
    }), content_type = "application/json")
Exemple #40
0
def write_sites_db(session, sites_list):
    """
    Cache the AGIS site information in the PanDA database
    """
    try:
        _logger.debug("Starting write_sites_db")
        for site in sites_list:
            _logger.debug("Site: {0}".format(site['site_name']))
            session.merge(
                Site(site_name=site['site_name'],
                     role=site['role'],
                     tier_level=site['tier_level']))
        session.commit()
        _logger.debug("Done with write_sites_db")
    except exc.SQLAlchemyError:
        session.rollback()
        _logger.critical(
            'write_sites_db: Could not persist information --> {0}'.format(
                sys.exc_info()))
def insert_all(s):
    site = Site(domain='evem.gov.si',
                robots_content='Allow: /',
                sitemap_content='<html><p>hello</p></html>')
    s.add(site)
    s.commit()
    site_id = site.id

    now = datetime.datetime.now().date()
    b = bytes("<html></html>", 'utf-8')
    m = hashlib.md5()
    m.update(b)
    hashed = m.digest()
    page = Page(
        site_id=site_id,
        page_type_code='HTML',
        url='https://www.rtvslo.si',  # UNIQUE
        html_content='<div>Hello</div>',
        http_status_code=200,
        accessed_time=now,
        hash=hashed)
    s.add(page)
    s.commit()
    page_id = page.id

    with open('data/chrome.png', "rb") as image_file:
        encoded = base64.b64encode(image_file.read())
    image = Image(page_id=page_id,
                  filename='image.jpeg',
                  content_type='JPEG',
                  data=encoded,
                  accessed_time=now)
    s.add(image)
    s.commit()

    page_data = PageData(page_id=page_id, data_type_code='PDF', data=encoded)
    s.add(page_data)
    s.commit()

    link = Link(from_page=page_id, to_page=page_id)
    s.add(link)
    s.commit()
    def test_in_disordered(self):
        methylation = SiteType(name='methylation')

        p = Protein(refseq='NM_007',
                    id=1,
                    disorder_map='10000000001000000000000001',
                    sequence='ABCDEFGHIJKLMNOPQRSTUVWXYZ')
        db.session.add(p)

        sites_in_disordered = {0, 10, 25}
        sites_not_disordered = {1, 9, 11, 24}

        sites = {}

        for position in sites_in_disordered | sites_not_disordered:
            print(position)
            print(len(p.sequence[position]))
            sites[position] = Site(position=position + 1,
                                   types={methylation},
                                   residue=p.sequence[position],
                                   protein=p)

        # Python side

        for position in sites_in_disordered:
            site = sites[position]
            assert site.in_disordered_region

        for position in sites_not_disordered:
            site = sites[position]
            assert not site.in_disordered_region

        # SQL side
        assert {
            site.position - 1
            for site in Site.query.filter_by(in_disordered_region=True)
        } == sites_in_disordered
        assert {
            site.position - 1
            for site in Site.query.filter_by(in_disordered_region=False)
        } == sites_not_disordered
Exemple #43
0
def retrieve_sites(results):
    '''\
    Retrieves the site objects associated with the results.
    '''
    # get a list of all the site keys
    site_keys = set()
    for r in results:
        site_keys.add(r['siteKey'])
    
    # retrieve the site objects from the database
    site_objects = Site.select(IN(Site.q.key, list(site_keys)))
    
    # convert results into a dict with site key as the key
    sites = { }
    for s in site_objects:
        sites[s.key] = s
    
    # place site objects into the dict
    for r in results:
        site_key = r['siteKey']
        r['site'] = sites[site_key]
Exemple #44
0
def create_test_models():
    protein = Protein(refseq='NM_0001',
                      gene=Gene(name='SOMEGENE'),
                      sequence='ABCD')
    mutation = Mutation(protein=protein, position=1, alt='E')

    MC3Mutation(mutation=mutation,
                cancer=Cancer(code='CAN'),
                samples='Some sample')
    InheritedMutation(
        mutation=mutation,
        clin_data=[ClinicalData(disease=Disease(name='Some disease'))])

    protein_kinase = Protein(refseq='NM_0002',
                             gene=Gene(name='OTHERGENE'),
                             sequence='ABCD')
    kinase = Kinase(name='Kinase name', protein=protein_kinase)
    site = Site(protein=protein, position=1, residue='A', kinases=[kinase])
    protein.sites = [site]

    return locals()
Exemple #45
0
def sites():
    if request.method == 'GET':
        query = Site.query()
        compact_sites = []
        for site in query:
            report = Report.query().filter(
                Report.site == site.key).order(-Report.created_on).fetch(1)
            compact_site = Site.to_compact(site)
            compact_site.update(Report.to_dict(report[0]))
            compact_sites.append(compact_site)
        return jsonify({'siteList': compact_sites}), 200
    elif request.method == 'POST':
        request_body = request.get_json()
        name = request_body.get('name', '')
        url = request_body.get('url', '')
        id_info = get_user_id_info(request)
        email = id_info['email']
        try:
            report_results = PageSpeedInights.run(url)
            user_key = User.query().filter(User.email == email).fetch(
                keys_only=True)[0]
            site = Site(name=name, url=url)
            site.created_by = user_key
            site.last_edited_by = user_key
            site_key = site.put()
            report = Report(
                site=site_key,
                accessibility_score=report_results['accessibility_score'],
                best_practices_score=report_results['best_practices_score'],
                desktop_performance_score=report_results[
                    'desktop_performance_score'],
                mobile_performance_score=report_results[
                    'mobile_performance_score'],
                seo_score=report_results['seo_score'],
                pwa_score=report_results['pwa_score'],
            )
            report.put()
            return jsonify({'success': True}), 200
        except:
            raise Exception('Page Speed Insights API returned an error')
    else:
        raise Exception('Method not supported')
Exemple #46
0
def get_rez(url):
    rez = {}
    data = get_data_by_url(url)
    if data is None:
        return None

    rez['head_url'] = get_head_url(url)
    rez['icon'] = data['icon']
    exists_site = db.session.query(
        Site.url).filter_by(url=rez['head_url']).scalar() is not None
    if not exists_site:
        site = Site(prob=0, num=0, url=rez['head_url'], icon=rez['icon'])
        db.session.add(site)
        db.session.commit()
    else:
        site = db.session.query(Site).filter_by(url=rez['head_url']).first()
    text = data['text']
    rez['prob'] = predict(get_vec(text))[0][0]
    rez['url'] = url
    rez['title'] = data['title']
    page = Page(prob=rez['prob'],
                uri=rez['url'],
                site_id=site.id,
                title=rez['title'])
    exists_page = db.session.query(
        Page.uri).filter_by(uri=rez['url']).scalar() is not None
    if not exists_page:
        db.session.add(page)
        db.session.commit()
        db.session.query(Site) \
            .filter_by(url=rez['head_url']) \
            .update({Site.prob: (Site.prob * Site.num + rez['prob']) / (Site.num + 1),
                     Site.num: Site.num + 1})
        db.session.commit()

    rez['site_stat'] = db.session.query(Site).filter_by(
        url=rez['head_url']).first().prob

    return rez
Exemple #47
0
def site(id):
    if request.method == 'DELETE':
        try:
            site_key = ndb.Key(urlsafe=id)
            site_key.delete()
            return jsonify({'success': True}), 200
        except:
            raise Exception('Site not found')
    elif request.method == 'GET':
        try:
            site_key = ndb.Key(urlsafe=id)
            site = site_key.get()
            return jsonify(Site.to_compact(site)), 200
        except:
            raise Exception('Site not found')

    elif request.method == 'PATCH':
        # Update the site with provided ID. Only support certain types of
        # update.
        pass
    else:
        raise Exception('Method not supported')
    def test_sequence(self):
        methylation = SiteType(name='methylation')

        p = Protein(refseq='NM_007',
                    id=1,
                    sequence='ABCDEFGHIJKLMNOPQRSTUVWXYZ')
        db.session.add(p)

        data = {
            0: '-------ABCDEFGH',
            10: 'DEFGHIJKLMNOPQR',
            25: 'STUVWXYZ-------'
        }

        sites = {}

        for position in data:
            sites[position] = Site(position=position + 1,
                                   types={methylation},
                                   residue=p.sequence[position],
                                   protein=p)

        db.session.add_all(sites.values())
        db.session.commit()

        for position, expected_sequence in data.items():
            site = sites[position]
            # Python side
            assert site.sequence == expected_sequence
            # SQL side
            assert Site.query.filter_by(
                sequence=expected_sequence).one() == site

        sequences = [
            s for (s, ) in db.session.query(Site.sequence).select_from(
                Site).join(Protein)
        ]
        assert set(sequences) == set(data.values())
Exemple #49
0
    def add_site(self,
                 refseq,
                 position: int,
                 residue,
                 mod_type,
                 pubmed_ids=None,
                 kinases=None):

        protein = self.proteins[refseq]
        site_key = (protein.id, position, residue)
        site_type = self.site_types_map[mod_type]

        if site_key in self.known_sites:
            site = self.known_sites[site_key]
            created = False
        else:
            site = Site(position=position,
                        residue=residue,
                        protein_id=protein.id)
            self.known_sites[site_key] = site
            created = True

        site.types.add(self.site_types_map[mod_type])
        site.sources.add(self.source)

        if pubmed_ids:
            site.pmid.update(pubmed_ids)

        if kinases:
            site_kinases, site_kinase_groups = get_or_create_kinases(
                kinases, self.known_kinases, self.known_groups)
            site.kinases.update(site_kinases)
            site.kinase_groups.update(site_kinase_groups)

            for kinase_or_group in chain(site_kinases, site_kinase_groups):
                kinase_or_group.is_involved_in.add(site_type)

        return site, created
Exemple #50
0
def update_all_sites():
    appengine_cron_header = request.headers.get('X-Appengine-Cron')
    if not appengine_cron_header:
        raise Exception(
            'This is a cron task which can only be called from within Appengine'
        )
    sites = Site.query()
    for site in sites:
        report_results = PageSpeedInights.run(site.url)
        report = Report(
            site=site.key,
            accessibility_score=report_results['accessibility_score'],
            best_practices_score=report_results['best_practices_score'],
            desktop_performance_score=report_results[
                'desktop_performance_score'],
            mobile_performance_score=report_results[
                'mobile_performance_score'],
            seo_score=report_results['seo_score'],
            pwa_score=report_results['pwa_score'],
        )
        report.put()

    return jsonify({'success': True}), 200
Exemple #51
0
def list_site_availability(camp_area, start_date, end_date, equipment_type):
    """
    Retrieve the Availability for all Sites in a Camp Area which can host the selected Equipment within a date range
    :param camp_area:
    :param start_date:
    :param end_date:
    :param equipment_type:
    :return:
    """
    data = {
        'mapId': camp_area.map_id,
        'bookingCategoryId': 0,
        'startDate': start_date.isoformat(),
        'endDate': end_date.isoformat(),
        'isReserving': True,
        'getDailyAvailability': True,
        'partySize': 1,
        'equipmentId': equipment_type,
        'subEquipmentId': equipment_type,
        'generateBreadcrumbs': False,
    }
    results = post_json('MAPDATA', data)
    sites_availability = {}
    for entry in results['resourcesOnMap']:
        site = Site(entry['resourceId'], entry['localizedValues'][0]['name'],
                    entry['localizedValues'][0]['description'])
        allowed_equipment = [
            Equipment(e['item1'], e['item2'], None)
            for e in entry['allowedEquipment']
        ]
        availability = [
            SiteAvailability(site, e['availability'], allowed_equipment)
            for e in results['resourceAvailabilityMap'][str(site.resource_id)]
        ]
        sites_availability[site] = availability
    return OrderedDict(
        sorted(sites_availability.items(), key=lambda sa: sa[0].name.zfill(3)))
Exemple #52
0
        def wrapper(*args, **kwargs):
            # Request host
            scheme = request.urlparts.scheme
            netloc = request.urlparts.netloc

            # Normalized host
            host = '%s://%s' % (scheme, netloc)
            if netloc.startswith('www.'):
                netloc = netloc[4:]
            normalized_host = 'http://%s' % netloc

            # Get site information
            try:
                site = Site.get(Site.id == 1)
            except Site.DoesNotExist:
                msg = 'Site %s não existe neste servidor' % host
                if json_response:
                    return dict(status=False, info=msg)
                else:
                    abort(404, msg)

            # callback function with site and host arguments injected
            kwargs.update({'site': site, 'host': host, 'netloc': netloc})
            return func(*args, **kwargs)
Exemple #53
0
def add_site():
    g.site = Site.get_by_hostname(request.host, app.config.get("DOMAIN_ROOT"))
    if g.site is None:
        if not app.config.get("DOMAIN_ROOT"):
            return _(
                "Config is missing a DOMAIN_ROOT: the domain name of your "
                "main site.")
        if not app.config.get("ADMINS"):
            return _("Config is missing ADMINS: used for login to your "
                     "first site and recipient of error messages.")
        name = app.config.get("ROOT_SITE_NAME", _("Your new site"))
        description = _("This is your new site. This is also your root site, "
                        "and this can be used to create other sites.")

        root_site, created = Site.objects.get_or_create(
            domain=app.config.get("DOMAIN_ROOT"),
            defaults={
                "name": name,
                "description": "<h1>%s</h1><p>%s</p>" % (name, description),
                "owner_email": None,
                "verified_email": True
            })
        port = app.config.get("PORT", None)
        url = "//%s" % root_site.domain
        if port:
            url += ":%d" % port
        if created:
            return redirect(url)
        else:
            return redirect("%s%s" % (url, url_for("sites")))

    g.user = session.get("username", None)
    if "menu" in g.site.active_modules:
        g.menu, created = Menu.objects.get_or_create(site=g.site.domain)
    else:
        g.menu = None
    def test_has_motif(self):

        engine = get_engine('bio')
        load_regex_support(engine)

        p = Protein(refseq='NM_007',
                    id=1,
                    sequence='ABCDEFGHIJKLMNOPQRSTUVWXYZ')
        s = Site(position=3, residue='C', protein=p)

        db.session.add(s)
        db.session.commit()

        # Python side
        assert s.has_motif('.{7}C[DX].{6}')
        assert not s.has_motif('.{7}C[XY].{6}')

        # SQL side
        assert Site.query.filter(Site.has_motif('.{7}C[DX].{6}')).one()
        assert not Site.query.filter(Site.has_motif('.{7}C[XY].{6}')).all()
Exemple #55
0
def load_activities(filename):
    def load_coordinators(activity_id, coordinators):
        for coordinator in coordinators:
            adder = DBModelAdder()
            adder.start()

            user = User.by_account_id(coordinator)
            print "User %s" % str(user)
            if user is None:
                print "Coordinator (%s) for activity (%s) was not found. Try to update next time." % (
                    str(coordinator), str(activity_id))
                continue
            if Activist.is_exist(adder, activity.id, user.id):
                print "Coordinator (%s) for activity (%s) already exists." % (
                    str(coordinator), str(activity_id))
                adder.done()
                continue

            activist = Activist.coordinator(user.id, activity_id)
            adder.add(activist)
            adder.done()
            print "Added coordinator (%s) for activity (%s)." % (
                str(coordinator), str(activity_id))

    with open(filename) as json_file:
        data = json.load(json_file)
        for item in data["activities"]:
            activity_type = int(item["activity_type"])
            site_api_name = item["site_api_name"]
            tab_name = item["tab_name"]
            title = item["title"]
            description = item["description"]
            meta_post_url = item["meta_post_url"]
            meta_post_title = item["meta_post_title"]
            chat_url = item["chat_url"]
            chat_name = item["chat_name"]

            coordinators = list()
            for coordinator in item["coordinators"]:
                coordinators.append(int(coordinator["account_id"]))

            site = Site.by_api_name(site_api_name)

            pg_session = db_session()
            activity = Activity.by_site_id_and_activity_type(
                site.id, activity_type)
            if activity is not None:
                print "Activity (%s;%s) found" % (str(activity_type),
                                                  site_api_name)

                activity.title = title
                activity.tab_name = tab_name
                activity.description = description
                activity.meta_post_url = meta_post_url
                activity.meta_post_title = meta_post_title
                activity.chat_url = chat_url
                activity.chat_name = chat_name

                pg_session.add(activity)
                pg_session.commit()
                activity_id = activity.id
                pg_session.close()
                load_coordinators(activity_id, coordinators)
                continue

            activity = Activity(site.id, title, description, activity_type,
                                meta_post_url, meta_post_title, chat_url,
                                chat_name, tab_name)
            pg_session.add(activity)
            pg_session.commit()
            activity_id = activity.id
            pg_session.close()
            print "Activity (%s;%s) added with id %s" % (
                str(activity_type), site_api_name, str(activity_id))
            load_coordinators(activity_id, coordinators)
Exemple #56
0
def site_by_id(site_id):
    site = Site.get_by_key_name(site_id)
    if site is None:
        return Response(status = 404)
    return json_response(site)
Exemple #57
0
def handle_mqtt_message(client, userdata, message):

    dic = {}
    topic = message.topic
    topicEnding = topic.split("/")
    payload = None

    ##################################################################
    ##     Audio Server                                             ##
    ##################################################################
    if 'hermes/audioServer/' in topic:
        #payload could be just a wav data so we dont load payload here at all
        try:
            dic['siteId'] = topicEnding[2]  #contains siteid
            dic['hermesTopic'] = topicEnding[3]

            if topicEnding[3] == 'playFinished':
                temppayload = json.loads(message.payload.decode('utf-8'))
                if 'sessionId' in temppayload:
                    dic['sessionId'] = temppayload['sessionId']

            dic["hermes"] = HermesComponent.audioServer.value

        except Exception as e:
            print(
                "ERROR: mqtt.on_message - AudioServer {}:".format(
                    topicEnding[3]), e)

    else:

        #these items in here have a payload

        payload = json.loads(message.payload.decode('utf-8'))
        topicEnding = topicEnding[len(topicEnding) - 1]

        ##################################################################
        ##     Hotword                                                  ##
        ##################################################################
        if 'hermes/hotword/' in topic:
            try:
                dic = {
                    "hermes": HermesComponent.hotword.value,
                    "hermesTopic": topicEnding
                }
            except Exception as e:
                print(
                    "ERROR: mqtt.on_message - Hotword {}:".format(topicEnding),
                    e)

        ##################################################################
        ##     ASR                                                      ##
        ##################################################################
        elif 'hermes/asr/' in topic:
            try:
                dic = {
                    "hermes": HermesComponent.asr.value,
                    "hermesTopic": topicEnding
                }
            except Exception as e:
                print("ERROR: mqtt.on_message ASR {}:".format(topicEnding), e)

        ##################################################################
        ##     ERROR / NLU                                              ##
        ##################################################################
        elif 'hermes/error/' in topic:
            try:
                dic = {
                    "hermes": HermesComponent.error.value,
                    "hermesTopic": topicEnding
                }
            except Exception as e:
                print("ERROR: mqtt.on_message ERROR nlu: ", e)

        ##################################################################
        ##     INTENT                                                   ##
        ##################################################################
        elif 'hermes/intent/' in topic:
            try:
                dic = {
                    "hermes": HermesComponent.intent.value,
                    "hermesTopic": topicEnding
                }
            except Exception as e:
                print("ERROR: mqtt.on_message INTENT : ", e)

        ##################################################################
        ##     NLU                                                      ##
        ##################################################################
        elif 'hermes/nlu/' in topic:
            try:
                dic = {
                    "hermes": HermesComponent.nlu.value,
                    "hermesTopic": topicEnding
                }
            except Exception as e:
                print("ERROR: mqtt.on_message NLU {}: ".format(topicEnding), e)

        ##################################################################
        ##     Dialogue Manager                                         ##
        ##################################################################
        elif 'hermes/dialogueManager/' in topic:
            try:
                dic = {
                    "hermes": HermesComponent.dialogueManager.value,
                    "hermesTopic": topicEnding
                }
            except Exception as e:
                print(
                    "ERROR: mqtt.on_message DialogueManager {}: ".format(
                        topicEnding), e)

    #convert payload json data into database fields
    intentSlots = []
    if payload:
        if 'siteId' in payload:
            dic['siteId'] = payload['siteId']
        if 'modelId' in payload:
            dic['modelId'] = payload['modelId']
        if 'sessionId' in payload:
            dic['sessionId'] = payload['sessionId']
        if 'reactivatedFromSessionId' in payload:
            dic['reactivatedFromSessionId'] = payload[
                'reactivatedFromSessionId']
        if 'text' in payload:
            dic['text'] = payload['text']
        if 'customData' in payload:
            dic['customData'] = payload['customData']
        if 'input' in payload:
            dic['intentinput'] = payload['input']
        if 'intentFilter' in payload:
            dic['intentFilter'] = str(payload['intentFilter'])
        if 'sendIntentNotRecognized' in payload:
            dic['sendIntentNotRecognized'] = payload['sendIntentNotRecognized']

        if 'init' in payload:
            #startSession has added info inside an inclosed dict
            dic['startsessiontype'] = payload['init']['type']  #not optional
            if 'text' in payload['init']:
                dic['text'] = payload['init']['text']
            if 'canBeEnqueued' in payload['init']:
                dic['canBeEnqueued'] = payload['init']['canBeEnqueued']
            if 'intentFilter' in payload['init']:
                dic['intentFilter'] = str(payload['init']['intentFilter'])
            if 'sendIntentNotRecognized' in payload['init']:
                dic['sendIntentNotRecognized'] = payload['init'][
                    'sendIntentNotRecognized']

        if 'intent' in payload:
            dic['intent'] = payload['intent']['intentName']
            dic['confidenceScore'] = payload['intent']['confidenceScore']
            if 'slots' in payload:
                for slotitem in payload['slots']:
                    new_slot = Slots(
                        rawValue=slotitem['rawValue'],
                        kind=slotitem['value']['kind'],
                        value=slotitem['value']['value'],
                        entity=slotitem['entity'],
                        slotName=slotitem['slotName'],
                        confidenceScore=slotitem['confidenceScore'])
                    intentSlots.append(new_slot)

        if 'termination' in payload:
            #startEnded has added info inside an inclosed dict
            dic['termination'] = SessionEndedReason[
                payload['termination']['reason']].value  #not optional
            if 'error' in payload['termination']:
                dic['terminationerror'] = payload['termination']['error']

    #we check to see if the device is in the device list
    #if its a new device that is unknown to the database
    #we add it
    global devices
    if 'siteId' in dic:
        if not dic['siteId'] in devices:
            new_site = Site(name=dic['siteId'])
            db.session.add(new_site)
            db.session.flush()
            #reload list to get new item ID
            loadSiteDictionary()

        #set the siteId to the INTEGER id field in the db
        dic['siteId'] = devices[dic['siteId']]

    logEntry = Mqttlog(**dic)  #create new db row record

    #db.session.flush()
    if len(intentSlots) > 0:
        for s in intentSlots:
            logEntry.slots_id.append(s)
            #s.mqttlogId = logEntry.id
            #db.session.add(s)

    # commit data
    db.session.add(logEntry)  # add new record
    db.session.commit()  #save database
Exemple #58
0
    def gather_siteinfo(self):
        user = app.config['SITEINFO_DATABASE_USER']
        password = app.config['SITEINFO_DATABASE_PASS']
        address = app.config['SITEINFO_DATABASE_HOST']
        DEBUG = True

        # Connect to gather the db list
        con = connect(host=address, user=user, passwd=password)
        curs = con.cursor()

        # find all the databases with a siteinfo table
        find = ("SELECT table_schema, table_name "
                "FROM information_schema.tables "
                "WHERE table_name =  'siteinfo' "
                "OR table_name = 'mdl_siteinfo';")

        curs.execute(find)
        check = curs.fetchall()
        con.close()

        # store the db names and table name in an array to sift through
        db_sites = []
        if len(check):
            for pair in check:
                db_sites.append(pair)

            # for each relevent database, pull the siteinfo data
            for database in db_sites:
                cherry = connect(user=user,
                                 passwd=password,
                                 host=address,
                                 db=database[0])

                # use DictCursor here to get column names as well
                pie = cherry.cursor(DictCursor)

                # Grab the site info data
                pie.execute("select * from `%s`;" % database[1])
                data = pie.fetchall()
                cherry.close()

                # For all the data, shove it into the central db
                for d in data:
                    # what version of moodle is this from?
                    version = d['siterelease'][:3]

                    # what is our school domain? take the protocol
                    # off the baseurl
                    school_re = 'http[s]{0,1}:\/\/'
                    school_url = re.sub(school_re, '', d['baseurl'])

                    # try to figure out what machine this site lives on
                    if 'location' in d:
                        if d['location'][:3] == 'php':
                            location = 'platform'
                        else:
                            location = d['location']
                    else:
                        location = 'unknown'

                    # get the school
                    school = School.query.filter_by(domain=school_url).first()
                    # if no school exists, create a new one with
                    # name = sitename, district_id = 0 (special 'Unknown'
                    # district)
                    if school is None:
                        school = School(name=d['sitename'],
                                        shortname=d['sitename'],
                                        domain=school_url,
                                        license='')
                        school.district_id = 0
                        db.session.add(school)
                        db.session.commit()

                    # find the site
                    site = Site.query.filter_by(baseurl=school_url).first()
                    # if no site exists, make a new one, school_id = school.id
                    if site is None:
                        site = Site(name=d['sitename'],
                                    sitetype=d['sitetype'],
                                    baseurl='',
                                    basepath='',
                                    jenkins_cron_job=None,
                                    location='')

                    site.school_id = school.id

                    site.baseurl = school_url
                    site.basepath = d['basepath']
                    site.location = location
                    db.session.add(site)
                    db.session.commit()

                    # create new site_details table
                    # site_id = site.id, timemodified = now()
                    now = datetime.datetime.now()
                    site_details = SiteDetail(siteversion=d['siteversion'],
                                              siterelease=d['siterelease'],
                                              adminemail=d['adminemail'],
                                              totalusers=d['totalusers'],
                                              adminusers=d['adminusers'],
                                              teachers=d['teachers'],
                                              activeusers=d['activeusers'],
                                              totalcourses=d['totalcourses'],
                                              timemodified=now)
                    site_details.site_id = site.id

                    # if there are courses on this site, try to
                    # associate them with our catalog
                    if d['courses']:
                        # quick and ugly check to make sure we have
                        # a json string
                        if d['courses'][:2] != '[{':
                            continue

                        """
                        @TODO: create the correct association
                               model for this to work

                        courses = json.loads(d['courses'])
                        associated_courses = []

                        for i, course in enumerate(courses):
                            if course['serial'] != '0':
                                course_serial = course['serial'][:4]
                                orvsd_course = Course.query
                                                     .filter_by(serial=
                                                                course_serial)
                                                     .first()
                                if orvsd_course:
                                    # store this association
                                    # delete this course from the json string
                                    pass

                        # put all the unknown courses back in the
                        # site_details record
                        site_details.courses = json.dumps(courses)
                        """

                        site_details.courses = d['courses']

                    db.session.add(site_details)
                    db.session.commit()
Exemple #59
0
def sites():
    return render_template("sites.html", sites=Site.all().fetch(limit=200))
Exemple #60
0
    # connect to solr
    print('Connecting to solr...')
    solr = Solr(settings.SOLR_URL)
    # pysolr doesn't try to connect until a request is made, so we'll make a ping request
    try:
        solr._send_request('GET', 'admin/ping')
    except socket.error, e:
        print('Failed to connect to solr - error was: %s' % str(e))
        print('Aborting.')
        sys.exit(2)
    print('Connected.\n')

    # ensure required tables exist
    print("Creating tables if they don't exist...")
    Site.createTable(ifNotExists=True)
    Badge.createTable(ifNotExists=True)
    User.createTable(ifNotExists=True)
    print('Created.\n')

    # SITE INFO
    # only look if they were not specified at the command line; also only if
    # readme.txt exists (they don't in dumps after Aug 2012)
    readme_path = get_file_path(xml_root, 'readme.txt')
    if not (site_name and dump_date) and readme_path:
        # get the site name from the first line of readme.txt. This could be fragile.
        with open(readme_path, 'r') as f:
            site_readme_desc = f.readline().strip()

        # assume if there's a colon in the name, the name part is before, and the date
        # part is after.