def create_server(self, region_dcid, plan_id, osid, project_name, server_num): ''' 创建服务器 ''' for i in range(int(server_num)): server.create_server(self.platform_name, self.name, region_dcid, plan_id, osid, project_name, project_name) wx.CallAfter(self.callback_update)
def init_server(host=None, port=None): if host and port: server = create_server(host, port) else: server = create_server() thread = Thread(target=server.serve_forever) thread.start() host, port = server.server_address return host, port, server, thread
def make_server(config, debug=False): app = create_server(config, debug) app.wsgi_app = ProxyFix(app.wsgi_app) app.wsgi_app = HeaderRewriterFix(app.wsgi_app, remove_headers=['Date'], add_headers=[('X-Powered-By', 'WSGI')]) return app
def main(args): parser = argparse.ArgumentParser( description='Continuous integration build bot service.') parser.add_argument('-p', '--port', help='port for server to start', type=int, default=8080) parser.add_argument('-l', '--log-file', help='log file to dump requests payload', type=str, default='log.log') args = parser.parse_args() port = args.port log_file = args.log_file app = server.create_server(tc, phab, slackbot, cirrus, db_file_no_ext=db_file_no_ext) formater = logging.Formatter( '[%(asctime)s] %(levelname)s in %(module)s: %(message)s') fileHandler = RotatingFileHandler(log_file, maxBytes=10000, backupCount=1) fileHandler.setFormatter(formater) app.logger.addHandler(fileHandler) app.run(host="0.0.0.0", port=port)
def server_setup(): instance = server.create_server() # process = Process(target=instance.serve_forever) process = Thread(target=instance.serve_forever) process.setDaemon(True) process.start()
def setUp(self): self.phab = test.mocks.phabricator.instance() self.slackbot = test.mocks.slackbot.instance() self.teamcity = test.mocks.teamcity.instance() self.travis = test.mocks.travis.instance() self.app = server.create_server( self.teamcity, self.phab, self.slackbot, self.travis, test.mocks.fixture.MockJSONEncoder).test_client()
async def testBalancing(self): s1 = create_server(['127.0.0.1:9902']) s2 = create_server(['127.0.0.1:9903']) self.channel = aiogrpc.insecure_channel( 'ipv4:///127.0.0.1:9902,127.0.0.1:9903', loop=self.loop) self.stub = TestServiceStub(self.channel) s1.start() try: result = await self.stub.NormalMethod(StandardRequest(name='test1') ) self.assertEqual(result.message, 'test1') finally: s1.stop(None) s2.start() try: result = await self.stub.NormalMethod(StandardRequest(name='test1') ) self.assertEqual(result.message, 'test1') finally: s1.stop(None)
def test(): server = create_server() thread = Thread(target=server.serve_forever) thread.start() host, port = server.server_address client = KeyValueClient(host, port) # {} test_values = client.get_keys(["test"]) assert test_values is None, f"Should be empty: {test_values}" # {"test": "data"} test_data = {"test": "data"} assert client.set_keys(test_data), "Can't set keys" # {"test": "data"} response_1 = client.get_keys(["test"]) assert response_1 == test_data, f"Data not equal: {response_1} != {test_data}" # {"test": "data"} response_2 = client.get_keys(["test", "test2"]) assert response_2 is None, f"Should be empty: {response_2}" # {"test": "data", "test2": ["array"]} test_data["test2"] = ["array"] assert client.set_keys(test_data), "Can't set keys" # {"test": "data", "test2": ["array"]} response_3 = client.get_keys(["test", "test2"]) assert response_3 == test_data, f"Data not equal: {response_3} != {test_data}" # {"test2": ["array"]} test_data.pop("test") assert client.delete_keys(["test"]), "Can't delete keys" # {"test2": ["array"]} response_4 = client.get_keys(["test", "test2"]) assert response_4 is None, f"Should be empty: {response_4}" # {"test2": ["array"]} response_5 = client.get_keys(["test2"]) assert response_5 == test_data, f"Data not equal: {response_5} != {test_data}" # {"test2": ["array"]} response_6 = client.get_all_keys() assert response_6 == list( test_data), f"Data not equal: {response_6} != {test_data}" print("Tests OK") server.shutdown() thread.join()
def __init_server(self): self.__server = server.create_server(self.__cfg['webhook_listen_ip'], int(self.__cfg['webhook_listen_port'])) server.set_payload_handler(self.__receive_payload) allowed_networks = self.__cfg['webhook_allowed_networks'] if allowed_networks: ip_list = [ip.strip() for ip in allowed_networks.split(',')] server.set_allowed_networks(ip_list) else: logger.fatal('No webhook/allowed_networks defined') sys.exit(1)
def main(): home_thermostat = Thermostat(update_server) thermostat_server = create_server(home_thermostat.update_set_temperature) try: thermostat_thread = Thread(target=home_thermostat.run) #server_thread.daemon = True thermostat_thread.start() thermostat_server.run(host='0.0.0.0') except Exception as e: print('An Exception Occured!') print(e.message) finally: home_thermostat.shutdown()
def setUp(self): self._server = create_server("localhost", 9022, 10123) self._server.start() time.sleep(1) transport = CloudTasksGrpcTransport( channel=grpc.insecure_channel("127.0.0.1:9022")) self._client = CloudTasksClient( transport=transport, client_options=ClientOptions(api_endpoint="127.0.0.1:9022")) self._parent = self._client.location_path('[PROJECT]', '[LOCATION]')
def setUp(self): shutil.rmtree(self.test_output_dir, ignore_errors=True) os.makedirs(self.test_output_dir, exist_ok=True) self.phab = test.mocks.phabricator.instance() self.slackbot = test.mocks.slackbot.instance() self.teamcity = test.mocks.teamcity.instance() self.cirrus = test.mocks.cirrus.instance() self.app = server.create_server( self.teamcity, self.phab, self.slackbot, self.cirrus, db_file_no_ext=self.db_file_no_ext, jsonEncoder=test.mocks.fixture.MockJSONEncoder).test_client()
def run(): config = server.load_config("swisher.conf") current_dir = winstart.find_current_dir() log = server.Logger( open(os.path.dirname(current_dir) + "\\swisher-log.txt", "a")) sys.stdout = log sys.stderr = log webcontrolx = webcontrol.create_factory(config) #mpdplayerx = mpdplayer.create_factory(config) spotifyx = spotify.create_factory(config) itunesx = itunesplayer.create_factory(config) players = [webcontrolx, spotifyx, itunesx] #mpdplayerx #mpdserverx = mpdserver.MpdServer(current_dir) #mpdserverx.start() instance = server.create_server(current_dir, config, players) winstart.run(instance, []) #[mpdserverx]
def test_default_queue_name(self): server = create_server("localhost", 9023, 10124, "projects/[P]/locations/[L]/queues/[Q]") server.start() time.sleep(1) transport = CloudTasksGrpcTransport( channel=grpc.insecure_channel("127.0.0.1:9023")) client = CloudTasksClient( transport=transport, client_options=ClientOptions(api_endpoint="127.0.0.1:9023")) queues = list(client.list_queues(parent="projects/[P]/locations/[L]")) self.assertEqual(len(queues), 1) queue = queues[0] self.assertEqual(queue.name, "projects/[P]/locations/[L]/queues/[Q]") server.stop()
def run(config): current_dir = os.path.dirname(os.path.abspath(__file__)) #extra_pages = [ ("CardPrinter", lambda c: printer.CardPrinterPage(c)), ] webcontrolx = webcontrol.create_factory(config) mpdplayerx = mpdplayer.create_factory(config) instance = server.create_server(current_dir, config, [mpdplayerx, webcontrolx]) grabdevice = config.get("grab-device", "") cardreader = linuxcardreader.LinuxCardReader( grabdevice, instance.cardmanager.on_card, instance.cardmanager.update_devices_count ) def signal_handler(signal, frame): instance.stop() cardreader.stop() signal.signal(signal.SIGINT, signal_handler) signal.signal(signal.SIGTERM, signal_handler) instance.start() signal.pause()
def setUp(self): self.server = server.create_server('localhost', 0, performance=False) self.host, self.port = self.server.server_address self.client = client.Client((self.host, self.port)) self.gest_a = '192.168.1.1' self.gest_b = '192.168.1.2'
def test_persist_diff_targets(self): queryData = buildRequestQuery() queryData.abcBuildName = BUILD_NAME queryData.buildTypeId = BUILD_TYPE_ID queryData.PHID = BUILD_TARGET_PHID triggerBuildResponse = test.mocks.teamcity.buildInfo( test.mocks.teamcity.buildInfo_changes(['test-change']), buildqueue=True) self.teamcity.session.send.return_value = triggerBuildResponse response = self.app.post('/build{}'.format(queryData), headers=self.headers) self.assertEqual(response.status_code, 200) # Check the diff target state was persisted with shelve.open(self.db_file_no_ext, flag='r') as db: self.assertIn('diff_targets', db) self.assertIn(BUILD_TARGET_PHID, db['diff_targets']) self.assertIn(DEFAULT_BUILD_ID, db['diff_targets'][BUILD_TARGET_PHID].builds) self.assertEqual( db['diff_targets'] [BUILD_TARGET_PHID].builds[DEFAULT_BUILD_ID].build_id, DEFAULT_BUILD_ID) self.assertEqual( db['diff_targets'] [BUILD_TARGET_PHID].builds[DEFAULT_BUILD_ID].status, BuildStatus.Queued) self.assertEqual( db['diff_targets'] [BUILD_TARGET_PHID].builds[DEFAULT_BUILD_ID].name, BUILD_NAME) # Restart the server, which we expect to restore the persisted state del self.app self.app = server.create_server( self.teamcity, self.phab, self.slackbot, self.cirrus, db_file_no_ext=self.db_file_no_ext, jsonEncoder=test.mocks.fixture.MockJSONEncoder).test_client() data = statusRequestData() data.buildName = BUILD_NAME data.buildId = DEFAULT_BUILD_ID data.buildTypeId = BUILD_TYPE_ID data.buildTargetPHID = BUILD_TARGET_PHID statusResponse = self.app.post('/status', headers=self.headers, json=data) self.assertEqual(statusResponse.status_code, 200) self.phab.harbormaster.createartifact.assert_called_with( buildTargetPHID=BUILD_TARGET_PHID, artifactKey="{}-{}".format(BUILD_NAME, BUILD_TARGET_PHID), artifactType="uri", artifactData={ "uri": self.teamcity.build_url( "viewLog.html", { "buildTypeId": BUILD_TYPE_ID, "buildId": DEFAULT_BUILD_ID, }, ), "name": BUILD_NAME, "ui.external": True, }, ) # Check the diff target was cleared from persisted state with shelve.open(self.db_file_no_ext, flag='r') as db: self.assertNotIn(BUILD_TARGET_PHID, db['diff_targets'])
if __name__ == '__main__': repository_factory = RepositoryFactory(ignore_duplicates=config.IGNORE_DUPLICATES) with gzip_open(argv[1], mode='rt') as file: # skip header next(file) repository_factory.add_products( DictReader( file, fieldnames=RepositoryFactory.field_names(), quotechar='"', quoting=QUOTE_ALL, delimiter=',', skipinitialspace=True ) ) with urlopen(argv[2]) as response: repository_factory.add_products(ijson.items(response, 'item')) if len(argv) > 3: port = int(argv[3]) else: port = 8080 repository = repository_factory.get_repository() app = create_server(repository, config.DEFAULT_LENGTH) app.listen(port) print(f'Server is listening on port {port} with {len(repository)} products') tornado.ioloop.IOLoop.current().start()
s1 = create_server(['127.0.0.1:9902']) s2 = create_server(['127.0.0.1:9903']) self.channel = aiogrpc.insecure_channel( 'ipv4:///127.0.0.1:9902,127.0.0.1:9903', loop=self.loop) self.stub = TestServiceStub(self.channel) s1.start() try: result = await self.stub.NormalMethod(StandardRequest(name='test1') ) self.assertEqual(result.message, 'test1') finally: s1.stop(None) s2.start() try: result = await self.stub.NormalMethod(StandardRequest(name='test1') ) self.assertEqual(result.message, 'test1') finally: s1.stop(None) if __name__ == "__main__": #import sys;sys.argv = ['', 'Test.testName'] s = create_server(['127.0.0.1:9901']) s.start() try: unittest.main() finally: waiter = s.stop(None) waiter.wait()
def server_setup(): instance = server.create_server() process = Process(target=instance.serve_forever) yield process.start() process.terminate()
""" SeoulAI October Hackaton Checkers Board Arena Emilio Coronado, [email protected] seoulai.com 2018 """ from server import create_server if __name__ == '__main__': app = create_server() app.run(debug=True)
:license: MIT, see LICENSE for more details. """ import os from flask_migrate import Migrate, MigrateCommand from flask_script import Manager, Server, Shell from server import create_server from server.exts import db from server.models import User, Article, Topic, Comment, Subscription from algorithm import start_processing from spider import start_spider server = create_server(os.getenv('BBK_SERVER_ENV') or 'development') manager = Manager(server) migrate = Migrate(server, db) def make_shell_context(): """Make references in shell interactive context.""" return dict(server=server, db=db, User=User, Article=Article, Topic=Topic, Comment=Comment, Subscription=Subscription)
def create_server_func(): server.create_server()
def main(self): self.server = server.create_server(bind_host='', bind_port=cfg.RPC_PORT) self.server.serve_forever()
def setUp(self): self._server, port = server.create_server('[::]:0') self._server.start() self._channel = grpc.insecure_channel('localhost:%d' % port)
def __init__(self, t_onset): self.t_onset = t_onset self.f_onset = np.ones_like(t_onset) # self.percent = 0 # self.idx = np.arange(C.shape[0]) # self.onsets = beatViz # self.chroma = C[:,0] # self.curbeat = 0 # # # synchronizes with song in browser # def update_chroma(self): # new_idx = int(self.percent * C.shape[1]) # self.chroma = C[:,new_idx] # self.curbeat = self.onsets[new_idx] # song_duration = len(y)/sr # dt = song_duration/C.shape[1] # for idx in range(new_idx,C.shape[1]): # time.sleep(dt) # self.chroma = C[:,idx] # self.curbeat = self.onsets[idx] # # def main(self): # pass main = Main(t_onset) if __name__ == '__main__': app = create_server([main]) app.run(threaded=True)
from werkzeug.middleware.dispatcher import DispatcherMiddleware from components_page import register_apps as register_component_apps from examples import register_apps as register_example_apps from markdown_to_html import convert_all_markdown_files from server import create_server convert_all_markdown_files() server = create_server() component_routes = register_component_apps() example_routes = register_example_apps() routes = {**component_routes, **example_routes} application = DispatcherMiddleware( server, {slug: app.server for slug, app in routes.items()} ) if __name__ == "__main__": import os from werkzeug.serving import run_simple os.environ["DBC_DOCS_MODE"] = "dev" run_simple("localhost", 8815, application, use_reloader=True)
def get_app(self): return server.create_server()
'timeline': timeline, 'paste': paste_count, 'undo': undo_count, 'add': total_add, 'del': total_del }) tla.finished = True analysis_api.tl_analysis_controller_update_by_id(rid, timeline_analysis_partial=tla) pprint('record %s successfully updated' % rid) except ApiException as e1: pprint("Bad record at id: %s\n" % rid) # print("Exception when calling Api: %s\n" % e1) # api_response = api.ping_controller_ping() except ApiException as e: pprint("Exception when calling Api: %s\n" % e) if __name__ == '__main__': # Defining host is optional and default to http://localhost:3000 conf = configuration.Configuration() conf.host = BACKEND_URL conf.verify_ssl = SSL api_client = openapi_client.ApiClient(conf) with create_server(PORT) as server: pprint('listening on *:%s' % PORT) while True: socket, address = server.accept() handle_request(socket, address) do(openapi_client.TlAnalysisControllerApi(api_client), openapi_client.RecordControllerApi(api_client))
fs = self.frameCapture.fs face = [] face = self.faceTracker.crop_to_face(frame) skin, pixels = self.SkinClassifier.apply_skin_classifier(face) self.sensor.sense_ppg(skin, pixels) self.pulseDetector.detect_pulse(fs, self.sensor.rppg) self.fps = 1 / (time.time() - self.tprev) self.tprev = time.time() if self.display == VideoOutput.PostSkin: return skin elif self.display == VideoOutput.PostFace: return face else: return frame main = Main() #serialization.LoadFromJson(main.SkinClassifier) video_capture.main = main #host="0.0.0.0" if __name__ == '__main__': app = create_server( [main, main.SkinClassifier, main.pulseDetector, main.faceTracker], lambda: video_capture.Camera()) app.run(threaded=True)
def setUp(self): self.app = create_server(TestConfig) self.context = self.app.app_context() self.context.push() db.create_all()
from server import create_server load_dotenv() BUCKET = os.getenv("AWS_PROCESSED_BUCKET") if BUCKET is not None: data_latest_path = f"s3://{BUCKET}/latest.csv" else: logging.warning( "No bucket name provided via the AWS_PROCESSED_BUCKET env var. " + "Trying to load data locally.") data_latest_path = str(Path(__file__).parent / "latest.csv") data = Dataset(data_latest_path) server, oidc = create_server() register_pages(data, server) # Add authentication requirements for all dashboard pages for view_func in server.view_functions: if view_func.startswith("/pages/"): server.view_functions[view_func] = oidc.require_login( server.view_functions[view_func]) # Scheduled tasks scheduler = APScheduler() scheduler.init_app(server) scheduler.start() @scheduler.task("interval",