def test_add_score_to_solution(self): back = Backend() back.add_solution('Team 1') back.add_score_by_solution_name('Team 1', 4) self.assertEqual(back.get_score_by_solution_name('Team 1'), 4)
def main(): video_path = sys.argv[1] if not os.path.exists(video_path): return be = Backend() vl = VideoLoader(video_path) fps = vl.fps # loop through the whole video # todo use threading to handle it when having gpus while True: frames = vl.get_frames() # vl.get_frames if frames is None: return x = frames_to_tensor(frames) y = be.predict(x) for label in y: if label == 'violent': time = vl.pos / fps h, m, s = sec_to_hms(time) print('violent scene at time:\n%d:%d:%d' % (h, m, s))
class EditContactScreen(Screen): Builder.load_file('.\EditContact.kv') db = Backend() def on_enter(self): self.getData() def check_requirements(self): if len(self.ids.name_field.text) > 1 and len(self.ids.phone_field.text) > 1: self.ids.save.disabled= False def getData(self): self.ID = mainScreen.contactID[-1] details = self.db.fetch(self.ID)[0] self.ids.name_field.text = f"{details['name']}" self.ids.email_field.text = f"{details['email']}" self.ids.phone_field.text = f"{details['phone']}" self.ids.address_field.text = f"{details['address']}" def edit_contact_backend(self): details = {} details['name'] = self.ids.name_field.text details['email'] = self.ids.email_field.text details['phone'] = self.ids.phone_field.text details['address'] = self.ids.address_field.text details['contact_id'] = self.ID self.db.edit_contact(**details) self.ids.name_field.text = "" self.ids.email_field.text = "" self.ids.phone_field.text = "" self.ids.address_field.text = ""
def main(): import argparse parser = argparse.ArgumentParser(description='set password for AppleID. CAUTION: Use AppleIDs with payment credentials at you own risk!!! EVERY purchase will be done if possible!!! The password will be stored UNENCRYPTED!!!') parser.add_argument('-b','--backend', required=True, help='the backend url') parser.add_argument('-a','--appleId', required=True, help='the AppleId') parser.add_argument('-p','--password', required=True, help='the password') args = parser.parse_args() logger.debug(args) backend = Backend(args.backend) accounts = backend.get_accounts() passwordUpdated = False for accId, acc in accounts.items(): if 'appleId' in acc and acc['appleId'] == args.appleId: logger.debug(str(acc)) acc['password'] = args.password passwordUpdated = backend.post_account(acc) break if passwordUpdated: print "password updated for AppleId '%s'" % args.appleId else: print "unable to update password for AppleId '%s'" % args.appleId
def __init__(self, array, lifts): scene = bge.logic.getCurrentScene() self._scene = scene self._array = array self._levels = [set() for _ in range(len(array))] for zi, z in enumerate(array): for yi, y in enumerate(z): for xi, x in enumerate(y): t, c = x objs = { parking.FLOOR: "floor", parking.ENTRANCE: "entrance", parking.EXIT: "exit", } if t in objs: obj = scene.addObject(objs[t]) obj.worldPosition = (xi, yi, zi) self._levels[zi].add(obj) self._cars = {} self._lifts = {} for i, l in enumerate(lifts): lift = scene.addObject("lift") lift.worldPosition = [*reversed(l)] self._lifts[i] = lift self._backend = Backend(array, lifts) self._tick_period = scene.objects["control"]["tick_period"]
def test_add_client_with_role(self): back = Backend() back.add_client_with_role('192.168.1.1', 'ANALYZER') back.add_client_with_role('192.168.1.2', 'CONVEYOR') self.assertEqual(self.get_clients_number(back), 2)
def setup(user): os.environ['SSHPROXY_WIZARD'] = 'running' import config configdir = config.inipath if not os.path.isdir(configdir): print 'Creating config dir %s' % configdir os.makedirs(os.path.join(configdir, 'log')) config.get_config = config.Config(config.inifile) cfg = config.get_config('sshproxy') Wizard() print 'Setup done.' from backend import Backend from plugins import init_plugins init_plugins() clients = Backend().list_clients() print options = ' -c %s' % configdir if user: options += ' -u ' + user if not len(clients): print 'You can now add an administrator:' print ' sshproxy-setup%s --add-admin admin' % options print print 'To start sshproxy daemon, run the following command:' print ' ' + os.environ.get('INITD_STARTUP', 'sshproxyd%s' % (options))
def test_add_solution(self): back = Backend() back.add_solution('Team 1') back.add_solution('Team 2') self.assertEqual(back.get_solution_count(), 2)
def __init__(self): # jack client self.client = jack.Client("palette", no_start_server=True) # interface entities = [ Entity.KEYBOARD, Entity.SAMPLER, Entity.DRUM_MACHINE, Entity.PUSH ] self.display = Interface(entities) # metronome self.metronome = Metronome(self.display, self.client) # backend constructors = [Keyboard, Sampler, DrumMachine, Push] self.be = Backend(self.client, self.metronome, constructors) # misc self.pressed_keys = [] self.fifo = open("palette.pipe", mode="rt") self.current_inst_number = 0 # let's go self.client.activate() self.display.paint_pad(0) self.metronome.sync_transport()
def __init__(self): QtWidgets.QApplication.__init__(self, sys.argv) # --- members self.presentation = MainWindow() self.backend = Backend() self.backend_thread = QtCore.QThread() self.backend.moveToThread(self.backend_thread) # --- signal/slot connections # front to back self.presentation.set_cache_path.connect(self.backend.set_cache_path) self.presentation.refresh.connect(self.backend.refresh) self.presentation.rebuild.connect(self.backend.rebuild) self.presentation.request_preview.connect(self.backend.preview_request) self.presentation.request_save.connect(self.backend.save_bitmap) # back to front self.backend.thumbnail_available.connect( self.presentation.thumbnail_view.add_thumbnail) self.backend.bitmap_available.connect(self.presentation.save_bitmap) self.backend.preview_available.connect(self.presentation.show_preview) # --- start backend thread self.backend_thread.start() # --- misc setup self.setStyle(APPLICATION_STYLE)
def backup_az(az_domain, backup_az_domain, ceph_host, backup_ceph_host): # get ceph conf and keyring LOG.info("connect to ceph: host=%s" % ceph_host) ceph = Ceph(ceph_host=ceph_host, ceph_user=CEPH_USER, ceph_key_file=CEPH_KEYPAIR) LOG.info("get %s from %s" % (CEPH_CONF, ceph_host)) ceph.get_file(LOCAL_CEPH_PATH + "/" + CEPH_CONF, REMOTE_CEPH_PATH + "/" + CEPH_CONF) LOG.info("get %s from %s" % (CEPH_KEYRING, ceph_host)) ceph.get_file(LOCAL_CEPH_PATH + "/" + CEPH_KEYRING, REMOTE_CEPH_PATH + "/" + CEPH_KEYRING) ceph.close() # get backup ceph conf and keyring LOG.info("connect to backup_ceph: host=%s" % backup_ceph_host) backup_ceph = Ceph(ceph_host=backup_ceph_host, ceph_user=CEPH_USER, ceph_key_file=CEPH_KEYPAIR) LOG.info("get %s from %s" % (CEPH_BACKUP_CONF, backup_ceph_host)) backup_ceph.get_file(LOCAL_CEPH_PATH + "/" + CEPH_BACKUP_CONF, REMOTE_CEPH_PATH + "/" + CEPH_BACKUP_CONF) LOG.info("get %s from %s" % (CEPH_BACKUP_KEYRING, backup_ceph_host)) backup_ceph.get_file(LOCAL_CEPH_PATH + "/" + CEPH_BACKUP_KEYRING, REMOTE_CEPH_PATH + "/" + CEPH_BACKUP_KEYRING) backup_ceph.close() backend = Backend() # update volume_backend_name volume_backend_name = CEPH_VOLUME_PREFIX + ":" + az_domain + ":" + backup_az_domain LOG.info("ceph storage backend update: volume_backend_name = %s" % volume_backend_name) backend.update_ceph_param("volume_backend_name", volume_backend_name) # update iscsi_server_ip LOG.info("ceph storage backend update:iscsi_server_ip=%s" % ceph_host) backend.update_ceph_param("iscsi_server_ip", ceph_host) # backend.commit() ''' update_params = {} volume_backend_name = CEPH_VOLUME_PREFIX+":"+az_domain+":"+backup_az_domain update_params["volume_backend_name"] = volume_backend_name update_params["iscsi_server_ip"] = ceph_host backend.update_ceph_params(update_params) ''' # set volume_type key # volume_type=VOLUME_TYPE_PREFIX+"@"+az_domain shell_file = CURRENT_PATH + "/script/volume_backend_name.sh" # os.system("/bin/bash " + shell_file + " " + volume_type + " " + volume_backend_name) os.system("/bin/bash " + shell_file + " " + az_domain + " " + backup_az_domain) # restart Service restart_component("cinder", "cinder-volume") restart_component("cinder", "cinder-backup")
def test_add_repeated_client(self): back = Backend() back.add_client('192.168.1.1') back.add_client('192.168.1.2') back.add_client('192.168.1.1') self.assertEqual(self.get_clients_number(back), 2)
def __init__(self, *args): GObject.GObject.__init__(self) self.config = Config() self.backend = Backend(self.config) self.timer = Timer(self.config) # get all projects for p in models.Project.select(): self.projects.append(p)
def __init__(self, main_dict=None, backend=None, frontend=None): super(AssembleModel, self).__init__() if main_dict is not None: self.frontend = Frontend_mine(main_dict["frontend_dict"]) self.backend = Backend(main_dict) else: self.frontend = frontend self.backend = backend
def test_add_repeated_client_2(self): back = Backend() back.add_client('192.168.1.1') back.add_client('192.168.1.2') back.add_client('192.168.1.1') clients = ['192.168.1.1', '192.168.1.2'] self.assertEqual(self.get_clients_ips(back), clients)
class TestBackend(unittest.TestCase): backend = Backend() def test_is_special_json(self): self.assertTrue(self.backend._is_special_json(1)) self.assertTrue(self.backend._is_special_json(1.0)) self.assertTrue(self.backend._is_special_json(1.0)) self.assertTrue(self.backend._is_special_json({"name": 4})) self.assertTrue(self.backend._is_special_json({"name": "string"})) self.assertTrue(self.backend._is_special_json({"name": {"name": 4}})) self.assertFalse( self.backend._is_special_json({"name": { "not_name": 4 }})) def test_list_valid(self): self.assertTrue(self.backend._list_valid([1, 2, 3])) self.assertTrue( self.backend._list_valid(["string", "string2", "string3"])) self.assertFalse( self.backend._list_valid( ["string", "string2", { "string3": "string4" }])) def test_cmp(self): inputs = [[2, 1], ["b", "a"], [{ "name": 4 }, { "name": 3 }], [{ "name": "b" }, { "name": "a" }], [{ "name": "b" }, { "name": 1 }], [{ "name": "b" }, 1], [{ "name": "b" }, "some string"], [{ "name": { "name": 4 } }, 1], [{ "name": { "name": 4 } }, "some string"]] for x, y in inputs: self.assertEqual(self.backend._cmp(x, y), 1) self.assertEqual(self.backend._cmp(y, x), -1)
def test_rr_ctx_one_dead(): """ testing the round robin context to make sure that it picks each REMAINING backend equally (one dead) """ LOOP_COUNT = 10 DEAD_BACKEND = 2 EXPECTED_COUNT = 15 # set up backends backends = [ Backend({"host": f"backend-{x}"}, alive=True) for x in range(3) ] backends[DEAD_BACKEND].set_alive(False) # create context and queue for results test_ctx = RoundRobinContext(backends) result_queue = queue.Queue() # create threads threads = [ threading.Thread(target=rr_thread, args=(test_ctx, result_queue, LOOP_COUNT)) for backend in backends ] # run threads for thread in threads: thread.start() # join threads for thread in threads: thread.join() # gather results results = [] while not result_queue.empty(): results.extend(result_queue.get()) # check if the counts for all the hosts are equal counts = defaultdict(int) for backend in backends: counts[backend.host] = 0 for result in results: counts[result.host] += 1 for idx, count in enumerate(counts.values()): if idx == DEAD_BACKEND: assert count == 0 else: assert count == EXPECTED_COUNT
def test_add_client_with_repeated_role_2(self): back = Backend() back.add_client_with_role('192.168.1.1', 'ANALYZER') back.add_client_with_role('192.168.1.2', 'CONVEYOR') back.add_client_with_role('192.168.1.2', 'CONVEYOR') clients = ['192.168.1.1', '192.168.1.2'] self.assertEqual(self.get_clients_ips(back), clients) roles = ['ANALYZER', 'CONVEYOR'] self.assertEqual(self.get_clients_roles(back), roles)
def __init__(self, backendUrl): super(Worker, self).__init__() self.name = socket.gethostname() self.backend = Backend(backendUrl) worker = self.backend.worker_for_name(self.name) if '_id' in worker: self.workerId = worker['_id'] self.backend.workerId = self.workerId else: raise Exception('Worker has no id!!!!') self._stop = Event()
def get_result(task_id): backend = Backend() t = backend.dequeue(queue_name=task_id) dequeued_item = json.loads(t) class Info: def __init__(self, state, meta): self.state = state self.meta = meta info = Info(dequeued_item["state"], dequeued_item["meta"]) return info
def check_backend(): json_str = open('src/backend/db.json').read( ) # TODO: Sibling files not recognizing each other when called from another file path. db_dict = json.loads(json_str) ip = db_dict['ip'] user = input("cluster username: "******"cluster password: ") ssh_connection = SSH(host=ip) ssh_connection.connect(username=user, password=password) backend = Backend(ssh=ssh_connection) return backend
def backend(self): '''Return D-BUS backend client interface. This gets initialized lazily. Set self.search_only to True to suppress a full system hardware detection, especially if you use only search_driver() to find a remote driver for a selected, already detected device. ''' if self._dbus_iface is None: try: if self.argv_options.no_dbus: self._dbus_iface = Backend() else: self._dbus_iface = Backend.create_dbus_client() except Exception as e: if hasattr(e, '_dbus_error_name') and e._dbus_error_name in ( 'org.freedesktop.DBus.Error.FileNotFound', 'org.freedesktop.DBus.Error.NoServer'): if self.have_ui: self.error_message( self._( 'Cannot connect to D-BUS,' ' please use the --no-dbus option as root to' ' use jockey without it.'), str(e)) else: self.error_msg(str(e)) sys.exit(1) else: raise self._check_repositories() self._call_progress_dialog( self._('Searching for available drivers...'), self.search_only and self._dbus_iface.db_init or self._dbus_iface.detect, timeout=600) else: # handle backend timeouts try: self._dbus_iface.handler_info(' ') except Exception as e: if hasattr(e, '_dbus_error_name') and e._dbus_error_name == \ 'org.freedesktop.DBus.Error.ServiceUnknown': self._dbus_iface = Backend.create_dbus_client() self._check_repositories() self._call_progress_dialog( self._('Searching for available drivers...'), self.search_only and self._dbus_iface.db_init or self._dbus_iface.detect, timeout=600) return self._dbus_iface
def run(self): self.create_model_accessor() self.wait_for_ready() # Don't import backend until after the model accessor has been initialized. This is to support sync steps that # use `from xossynchronizer.modelaccessor import ...` and require the model accessor to be initialized before # their code can be imported. from backend import Backend log_closure = self.log.bind(synchronizer_name=Config().get("name")) backend = Backend(log=log_closure, model_accessor=self.model_accessor) backend.run()
def func_authenticate(self, _chan, *args, **kw): backend = Backend() if not backend.authenticate(username=kw['username'], auth_tokens=kw, ip_addr=kw['ip_addr']): return False else: if not self.namespaces.has_key(_chan): self.namespaces[_chan] = {} if not self.backend.has_key(_chan): self.backend[_chan] = backend self.namespaces[_chan]['client'] = backend.get_client_tags() return True
def add_contact_backend(self): db = Backend() details = {} details['name'] = self.ids.name_field.text details['email'] = self.ids.email_field.text details['phone'] = self.ids.phone_field.text details['address'] = self.ids.address_field.text db.add_contact(**details) self.ids.name_field.text = "" self.ids.email_field.text = "" self.ids.phone_field.text = "" self.ids.address_field.text = ""
def add_product_get(**kwargs): backend = Backend() return page(config, backend, state, body=template( 'tpl/events_add', tags=backend.get_tags(), extra_attributes=extra_attributes['product'], event_type='product', helptext=helptext['product'], recommended_tags=[], handler='vimeo_product', **kwargs), page='add_product', **kwargs)
def test_handler_put(mock_http_server, mock_connection): """ testing PUT method """ # set up request REQUEST_COMMAND = "PUT" REQUEST_PATH = "/" REQUEST_CONTENT = "a=1&b=2" REQUEST_CONTENT_BYTES = bytes(REQUEST_CONTENT, "utf-8") REQUEST_HEADERS = { "Content-Length": str(len(REQUEST_CONTENT)), "Content-Type": "application/x-www-form-urlencoded", } REQUEST_HEADER_STR = "\n".join( [f"{key}: {val}" for key, val in REQUEST_HEADERS.items()]) request_str = f"{REQUEST_COMMAND} {REQUEST_PATH} HTTP/1.1\n{REQUEST_HEADER_STR}\n\n{REQUEST_CONTENT}" request_bytes = bytes(request_str, "utf-8") mock_request = Mock() mock_request.makefile.return_value = io.BytesIO(request_bytes) # set up response conn = MagicMock() conn.getresponse.return_value = DummyResponse(200, [], b"dummy") mock_connection.return_value = conn # set up backend context_mock = MagicMock() context_mock.get_next_backend.return_value = Backend( { "host": "dummy", "port": 1234 }, alive=True) # set up handler HandlerClass = partial(LoadBalancerHandler, context_mock) handler = HandlerClass(mock_request, ("request_host", 1234), mock_http_server) # make assertions assert handler.command == REQUEST_COMMAND assert handler.path == REQUEST_PATH for header in handler.headers: assert REQUEST_HEADERS[header] == handler.headers[header] assert handler.body == REQUEST_CONTENT_BYTES
def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # MENU AND APPEARANCE SETUP logger.info("GUI init") self.title("Auto Basya") self.configure(bg="grey") self.columnconfigure((0, ), weight=1) self.main_menu = Menu(self) self.config(menu=self.main_menu) self.options_menu = Menu(self.main_menu, tearoff=0) self.set_options_menu() self.backend = Backend() # MAIN VARIABLES self.pf_names = self.backend.get_pf_names() self.pf_count = len(self.pf_names) self.path_to_config = 'campaigns_config.yml' self.input_data = self.load_config() self.rows = {pf: row for row, pf in enumerate(self.pf_names)} self.vars = { pf: BooleanVar(value=(pf in self.input_data)) for pf in self.pf_names } self.frames = { pf: PlatformFrame(self, name=pf, row=self.rows[pf], init_states=self.input_data[pf]) for pf in self.input_data } # WELCOME MESSAGE self.welcome_lbl = None if not self.input_data: logger.info('Welcome Message') self.welcome_lbl = Label( self, text= u"Добро пожаловать в Авто Басю.\n\nПохоже, что это первый запуск программы (либо не было сохранено ни одной кампании).\n\nЧтобы приступить к работе, зайдите в\nНастройки > Платформы,\nчтобы выбрать платформы, с которыми предстоит работать, а также в\nНастройки > Google Таблица\nчтобы внести параметры Google таблицы, в которую предстоит экспортировать данные.", padx=10, pady=10) self.welcome_lbl.grid(row=0, column=0, padx=20, pady=30) # must be in the end of init self.set_footer()
def __init__(self, **kwargs): super(MessengerWindow, self).__init__(size=(500, 300),**kwargs) self.backend = Backend() self.cols = 2 self.username = (Label(text="Username: "******"", size_hint=(.1, .3), pos =(100,490))) self.add_widget(self.chat_log) Clock.schedule_interval(self.show_messages, 1.0) self.input_box = TextInput(size_hint=(.7, .5), pos=(220,10), multiline=False) self.add_widget(self.input_box) self.send = Button(text="Send", size_hint=(.25, .5), pos=(10,10)) self.send.bind(on_press=self.btn_pressed) self.add_widget(self.send)
def add_analytics_get(**kwargs): backend = Backend() return page(config, backend, state, body=template( 'tpl/events_add', tags=backend.get_tags(), extra_attributes=extra_attributes['analytics'], event_type='analytics', helptext=helptext['analytics'], recommended_tags=[], handler='vimeo_analytics', timestamp_feeder=True, **kwargs), page='add_analytics', **kwargs)