def main(): starlight.init() early_init() in_dev_mode = os.environ.get("DEV") image_server = os.environ.get("IMAGE_HOST", "") tornado.options.parse_command_line() application = tornado.web.Application(dispatch.ROUTES, template_path="webui", static_path="static", image_host=image_server, debug=in_dev_mode, is_dev=in_dev_mode, tle=models.TranslationEngine(starlight), enums=enums, starlight=starlight, tlable=webutil.tlable, webutil=webutil, analytics=analytics.Analytics()) http_server = tornado.httpserver.HTTPServer(application, xheaders=1) addr = os.environ.get("ADDRESS", "0.0.0.0") port = int(os.environ.get("PORT", 5000)) http_server.listen(port, addr) print("Current APP_VER:", os.environ.get("VC_APP_VER", "1.9.1 (warning: Truth updates will fail in the future if an accurate VC_APP_VER " "is not set. Export VC_APP_VER to suppress this warning.)")) print("Ready.") tornado.ioloop.IOLoop.current().start()
def setUp(self): # First, create an instance of the Testbed class. self.testbed = testbed.Testbed() # Then activate the testbed, which prepares the service stubs for use. self.testbed.activate() # Inject our own instance of bigquery. self.build_big_query_replacement = ReplaceFunction( analytics.Analytics, '_build_bigquery_object', self.fake_build_bigquery_object) # Inject our own time function self.now = time.time() self.time_replacement = ReplaceFunction(time, 'time', lambda: self.now) # Instanciate an instance. self.tics = analytics.Analytics() bqschema = json.load( open( os.path.join(os.path.dirname(__file__), 'bigquery', 'analytics_schema.json'))) self.bq_field_type = {} for field in bqschema: self.bq_field_type[field['name']] = self.BQ_TYPE_MAP[field['type']]
def main(): early_init() in_dev_mode = os.environ.get("DEV") image_server = os.environ.get("IMAGE_HOST", "") version = get_ssdb_version() application = tornado.web.Application( dispatch.ROUTES, template_path="webui", static_path="static", image_host=image_server, autoreload=1 if in_dev_mode else 0, is_dev=in_dev_mode, tle=tl_models.TranslationEngine(cached_keyed_db( private_data_path("names.csv")), use_satellite=1), enums=enums, starlight=starlight, tlable=endpoints.tlable, icon=endpoints.icon, audio=endpoints.audio, analytics=analytics.Analytics(), version=version) http_server = tornado.httpserver.HTTPServer(application, xheaders=1) addr = os.environ.get("ADDRESS", "0.0.0.0") port = int(os.environ.get("PORT", 5000)) http_server.listen(port, addr) print("Ready.") tornado.ioloop.IOLoop.instance().start()
def main(): if len(sys.argv) != 2: exit() coins = analytics.Research(sys.argv[1]) side = analytics.Analytics(coins.read_file()) side_summ = side.summ() side_counts = side.counts() side_fract = side.fractions(side_counts) count_rand = side.count_random(side.predict_random(config.num_of_steps)) string = f"""Report We have made {side_summ} observations of tossing a coin: {side_counts[0]} \ of them were tails and {side_counts[1]} of them were heads. \ The probabilities are {side_fract[0]} and {side_fract[1]}, respectively. \ Our forecast is that in the next {config.num_of_steps} observations we will have: \ {count_rand[0]} tail and {count_rand[1]} heads.""" side.save_file(string, config.report_name)
def setUp(self): # First, create an instance of the Testbed class. self.testbed = testbed.Testbed() # Then activate the testbed, which prepares the service stubs for use. self.testbed.activate() # Inject our own instance of bigquery. self.buildBigQueryReplacement = ReplaceFunction( analytics.Analytics, '_build_bigquery_object', self.fake_build_bigquery_object) # Inject our own time function self.now = time.time() self.timeReplacement = ReplaceFunction(time, 'time', lambda: self.now) # Instanciate an instance. self.tics = analytics.Analytics()
def data_file2(): fname = tk.filedialog.askopenfilename(filetypes=[("JSON", ".json")], defaultextension='.json') if fname is not '': data_file2.analytics = analytics.Analytics(fname) scr3.insert(tk.END, datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') + '\n打开文件 ' + fname + '\n\n')
def buildGraph(option, dep): if option is not None and dep != '': print(option) graph = analytics.Analytics() return dcc.Graph(figure=graph.DeathDepGraph(option, dep))
def test_report_pageview(self): ga = analytics.Analytics(ANALYTICS_ID) code = ga.report_pageview('/unit-test') self.assertEqual(code, 200)
def test_report_pageview_extra_info(self): ga = analytics.Analytics(ANALYTICS_ID) code = ga.report_pageview('/unit-test', '/', '127.0.0.1', 'Test-Agent') self.assertEqual(code, 200)
# -*- coding: utf-8 -*- import dash from dash.dependencies import Input, Output, Event import dash_core_components as dcc import dash_html_components as html import datetime import plotly import consumer import analytics app = dash.Dash(__name__) consumer = consumer.TweetConsumer() analytics = analytics.Analytics() app.layout = html.Div( html.Div([ html.H4('Live Tweet Monitoring'), html.Div(id='live-update-text'), dcc.Graph(id='live-update-graph'), dcc.Interval( id='interval-component', interval=1 * 3000 # in milliseconds ) ])) @app.callback(Output('live-update-text', 'children'), events=[Event('interval-component', 'interval')]) def update_tweet(): print "trying to fetch messages" tweets = consumer.get_tweets(5)