def __init__(self, app, machine_name_prefix): self.app = app self.instance_id = app['instance_id'] self.log_store = app['log_store'] self.scheduler_state_changed = app['scheduler_state_changed'] self.db = app['db'] self.gservices = app['gservices'] self.machine_name_prefix = machine_name_prefix # set in async_init self.worker_type = None self.worker_cores = None self.worker_disk_size_gb = None self.max_instances = None self.pool_size = None self.instances_by_last_updated = sortedcontainers.SortedSet( key=lambda instance: instance.last_updated) self.healthy_instances_by_free_cores = sortedcontainers.SortedSet( key=lambda instance: instance.free_cores_mcpu) self.n_instances_by_state = { 'pending': 0, 'active': 0, 'inactive': 0, 'deleted': 0 } # pending and active self.live_free_cores_mcpu = 0 self.name_instance = {}
def medianSlidingWindow(self, nums: List[int], k: int) -> List[float]: small = sortedcontainers.SortedSet(key=lambda it: nums[it]) large = sortedcontainers.SortedSet(key=lambda it: nums[it]) for i in range(k): large.add(i) for _ in range(k >> 1): small.add(large.pop(0)) result = [ float(nums[large[0]]) if k & 1 else (nums[small[-1]] + nums[large[0]]) / 2 ] for hi in range(k, len(nums)): old_small_len = len(small) small.discard(hi - k) if len(small) == old_small_len: large.remove(hi - k) large.add(hi) small.add(large.pop(0)) while len(small) > len(large): large.add(small.pop(-1)) result.append( float(nums[large[0]]) if k & 1 else (nums[small[-1]] + nums[large[0]]) / 2) return result
def test_shifted_sparse_circuit(self): circuit = quasar.Circuit() circuit.add_gate(quasar.Gate.H, -2, times=-2, copy=False) circuit.add_gate(quasar.Gate.CX, (0, 1), times=(2, ), copy=False) circuit.add_gate(quasar.Gate.CX, qubits=(3, 4), times=(4, ), copy=False) self.assertEqual(circuit.ngate, 3) self.assertEqual(circuit.ngate1, 1) self.assertEqual(circuit.ngate2, 2) self.assertEqual(circuit.ngate3, 0) self.assertEqual(circuit.ngate4, 0) self.assertEqual(circuit.ngate_nqubit(0), 0) self.assertEqual(circuit.ngate_nqubit(1), 1) self.assertEqual(circuit.ngate_nqubit(2), 2) self.assertEqual(circuit.ngate_nqubit(3), 0) self.assertEqual(circuit.ngate_nqubit(4), 0) self.assertEqual(circuit.max_gate_nqubit, 2) self.assertEqual(circuit.max_gate_ntime, 1) self.assertEqual(circuit.min_time, -2) self.assertEqual(circuit.max_time, 4) self.assertEqual(circuit.ntime, 7) self.assertEqual(circuit.ntime_sparse, 3) self.assertEqual(circuit.min_qubit, -2) self.assertEqual(circuit.max_qubit, 4) self.assertEqual(circuit.nqubit, 7) self.assertEqual(circuit.nqubit_sparse, 5) self.assertFalse(circuit.is_controlled) self.assertFalse(circuit.is_composite) self.assertIsInstance(circuit, quasar.Circuit) self.assertIsInstance(circuit.gates, sortedcontainers.SortedDict) self.assertIsInstance(circuit.times, sortedcontainers.SortedSet) self.assertIsInstance(circuit.qubits, sortedcontainers.SortedSet) self.assertIsInstance(circuit.times_and_qubits, sortedcontainers.SortedSet) self.assertEqual( circuit.gates, sortedcontainers.SortedDict([ (((-2, ), (-2, )), quasar.Gate.H), (((2, ), (0, 1)), quasar.Gate.CX), (((4, ), (3, 4)), quasar.Gate.CX), ])) self.assertEqual(circuit.times, sortedcontainers.SortedSet([-2, 2, 4])) self.assertEqual(circuit.qubits, sortedcontainers.SortedSet([-2, 0, 1, 3, 4])) self.assertEqual( circuit.times_and_qubits, sortedcontainers.SortedSet([(-2, -2), (2, 0), (2, 1), (4, 3), (4, 4)])) self.assertIsInstance(str(circuit), str)
def solve(input_, k=100): field, rides = input_ drivers = {0: [i for i in range(field.cars)]} drivers_positions = [(0, 0) for _ in range(field.cars)] times = s.SortedSet([0]) rides_by_time = s.SortedSet(rides, key=lambda r: r.start) answer = [[] for i in range(field.cars)] # print("begin") # print([i.id for i in rides_by_time]) while len(times) > 0: t = times[0] times.pop(0) if t >= field.maxtime: break while len(rides_by_time) > 0 and (rides_by_time[0].finish - dist( rides_by_time[0].origin, rides_by_time[0].destination)) < t: rides_by_time.pop(0) for driver in drivers[t]: # FIX max_dist = 0 ride_č = None real_free = -1 current_pos = drivers_positions[driver] for i, ride in enumerate(rides_by_time): if i > k and ride_č is not None: break # validation to_start_dist = dist(drivers_positions[driver], ride.origin) c_dist = dist(ride.origin, ride.destination) real_start_time = max(ride.start, t + c_dist) real_finish_time = real_start_time + c_dist if real_finish_time > ride.finish: continue # FIX if dist(ride.origin, ride.destination) > max_dist: max_dist = dist(ride.origin, ride.destination) ride_č = ride real_free = real_finish_time if ride_č is not None: rides_by_time.remove(ride_č) drivers_positions[driver] = ride_č.destination times.add(real_free) if drivers.get(real_free) is None: drivers[real_free] = [] drivers[real_free].append(driver) answer[driver].append(ride_č.id) # print(ride_č.id) # print("after remove") # print([i.id for i in rides_by_time]) logger.info(f"Answer {answer}") return answer
def test_ghz_circuit(self): circuit = quasar.Circuit() circuit.add_gate(quasar.Gate.H, 0, copy=False) circuit.add_gate(quasar.Gate.CX, (0, 1), copy=False) circuit.add_gate(quasar.Gate.CX, qubits=(1, 2), copy=False) self.assertEqual(circuit.ngate, 3) self.assertEqual(circuit.ngate1, 1) self.assertEqual(circuit.ngate2, 2) self.assertEqual(circuit.ngate3, 0) self.assertEqual(circuit.ngate4, 0) self.assertEqual(circuit.ngate_nqubit(0), 0) self.assertEqual(circuit.ngate_nqubit(1), 1) self.assertEqual(circuit.ngate_nqubit(2), 2) self.assertEqual(circuit.ngate_nqubit(3), 0) self.assertEqual(circuit.ngate_nqubit(4), 0) self.assertEqual(circuit.max_gate_nqubit, 2) self.assertEqual(circuit.max_gate_ntime, 1) self.assertEqual(circuit.min_time, 0) self.assertEqual(circuit.max_time, 2) self.assertEqual(circuit.ntime, 3) self.assertEqual(circuit.ntime_sparse, 3) self.assertEqual(circuit.min_qubit, 0) self.assertEqual(circuit.max_qubit, 2) self.assertEqual(circuit.nqubit, 3) self.assertEqual(circuit.nqubit_sparse, 3) self.assertFalse(circuit.is_controlled) self.assertFalse(circuit.is_composite) self.assertIsInstance(circuit, quasar.Circuit) self.assertIsInstance(circuit.gates, sortedcontainers.SortedDict) self.assertIsInstance(circuit.times, sortedcontainers.SortedSet) self.assertIsInstance(circuit.qubits, sortedcontainers.SortedSet) self.assertIsInstance(circuit.times_and_qubits, sortedcontainers.SortedSet) self.assertEqual( circuit.gates, sortedcontainers.SortedDict([ (((0, ), (0, )), quasar.Gate.H), (((1, ), (0, 1)), quasar.Gate.CX), (((2, ), (1, 2)), quasar.Gate.CX), ])) self.assertEqual(circuit.times, sortedcontainers.SortedSet([0, 1, 2])) self.assertEqual(circuit.qubits, sortedcontainers.SortedSet([0, 1, 2])) self.assertEqual( circuit.times_and_qubits, sortedcontainers.SortedSet([(0, 0), (1, 0), (1, 1), (2, 1), (2, 2)])) self.assertIsInstance(str(circuit), str)
def __init__(self, cfiles, prefixes, deps): self.cfiles = cfiles self.prefixes = prefixes self.deps = deps self.possble_path_prefixes = sortedcontainers.SortedSet() self.dep_paths = sortedcontainers.SortedSet() self._source_functions = sortedcontainers.SortedDict() self._source_vars = sortedcontainers.SortedDict() self._macros = sortedcontainers.SortedDict() self.__function_calls_cache = sortedcontainers.SortedDict()
def __init__(self, client, refresh_time, max_size=100): self.client = client self.refresh_time = refresh_time self.max_size = max_size self.secrets = {} self.secret_ids = sortedcontainers.SortedSet( key=lambda id: self.secrets[id][1]) self.secret_locks = {} self.service_accounts = {} self.service_account_ids = sortedcontainers.SortedSet( key=lambda id: self.service_accounts[id][1]) self.service_account_locks = {}
def extract_relevant_automata(logger, automata, automata_peers, peers, sb_type=None): """ Determine which automata can receive signals from the given instance or send signals to it. :param logger: Logger object. :param automata: List with Automaton objects. :param automata_peers: Dictionary {'Automaton.identfier string' -> {'states': ['relevant State objects'], 'automaton': 'Automaton object'} :param peers: List of relevant Process objects: [{'process': 'Process obj', 'action': 'Receive or Dispatch obj'}] :param sb_type: Receive or Dispatch class to choose only those automata that reseive or send signals to the given one :return: None, since it modifies the first argument. """ for peer in peers: relevant_automata = [a for a in automata if a.process == peer["process"]] if relevant_automata: for automaton in relevant_automata: if automaton not in automata_peers: automata_peers[automaton] = { "automaton": automaton, "actions": sortedcontainers.SortedSet() } for action in [n for n in automaton.process.actions.filter(include={Action}) if n == peer["action"]]: if not sb_type or isinstance(action, sb_type): automata_peers[automaton]["actions"].add(action) else: logger.debug("No automata peers found for {!r}, total available: {}". format(str(peer["process"]), ', '.join({str(a.process) for a in automata})))
def read_config(self): parser = configparser.ConfigParser() parser.optionxform = str parser.read(self.user.config_file) if Config._SECTION_CONFIG in parser: config_section = parser[Config._SECTION_CONFIG] if Config._KEY_APPEND_EXTENSION in config_section: with contextlib.suppress(ValueError): self.append_extension = bool( distutils.util.strtobool( config_section[Config._KEY_APPEND_EXTENSION])) if Config._KEY_USE_ORIGINAL_FILENAME in config_section: with contextlib.suppress(ValueError): self.use_original_filename = bool( distutils.util.strtobool( config_section[Config._KEY_USE_ORIGINAL_FILENAME])) if Config._KEY_UPDATE_DELAY in config_section: with contextlib.suppress(ValueError): update_delay = int( config_section[Config._KEY_UPDATE_DELAY]) if update_delay <= 0 or (update_delay * 1000 ) > numpy.iinfo(numpy.int32).max: raise ValueError() self.update_delay = update_delay self.excluded_desktop_entries =\ sortedcontainers.SortedSet(filter(None, open(self.user.excluded_file).read().splitlines()))
def __init__(self, db, k8s, batch_bucket, batch_gsa_key=None): self.db = db self.k8s = k8s self.batch_bucket = batch_bucket self.pods = None # populated in run self.complete_queue = asyncio.Queue() self.ready_queue = asyncio.Queue(maxsize=1000) self.ready = sortedcontainers.SortedSet(key=lambda pod: pod.cores_mcpu) self.ready_cores_mcpu = 0 self.changed = asyncio.Event() self.pool = None # created in run deploy_config = get_deploy_config() self.base_url = deploy_config.base_url('batch2') self.inst_pool = InstancePool(self) if batch_gsa_key is None: batch_gsa_key = os.environ.get('BATCH_GSA_KEY', '/batch-gsa-key/privateKeyData') credentials = google.oauth2.service_account.Credentials.from_service_account_file( batch_gsa_key) self.gservices = GServices(self.inst_pool.machine_name_prefix, credentials)
def qubits(self): # TODO: Might want to dynamically memoize this qubits = sortedcontainers.SortedSet() for string in self.keys(): for qubit in string.qubits: qubits.add(qubit) return qubits
def __init__(self, app, name, machine_name_prefix, is_pool): self.app = app self.db: Database = app['db'] self.compute_client: aiogoogle.GoogleComputeClient = self.app[ 'compute_client'] self.zone_monitor: ZoneMonitor = self.app['zone_monitor'] self.name = name self.machine_name_prefix = f'{machine_name_prefix}{self.name}-' self.is_pool = is_pool self.name_instance: Dict[str, Instance] = {} self.live_free_cores_mcpu_by_zone: Dict[ str, int] = collections.defaultdict(int) self.instances_by_last_updated = sortedcontainers.SortedSet( key=lambda instance: instance.last_updated) self.n_instances_by_state = { 'pending': 0, 'active': 0, 'inactive': 0, 'deleted': 0 } # pending and active self.live_free_cores_mcpu = 0 self.live_total_cores_mcpu = 0 self.boot_disk_size_gb = None self.max_instances = None self.max_live_instances = None self.task_manager = aiotools.BackgroundTaskManager()
def __init__(self, n: int, entries: List[List[int]]): self.t_price = dict() self.t_valid = defaultdict(sortedcontainers.SortedSet) # key=movie,value=(price, shop) self.t_rent = sortedcontainers.SortedSet() for shop, movie, price in entries: self.t_price[(shop, movie)] = price self.t_valid[movie].add((price, shop))
def __init__(self, k8s_client: kube.client.CoreV1Api): self.entries: Dict[str, CacheEntry[List[AddressAndPort]]] = dict() self.locks: DefaultDict[str, asyncio.Lock] = defaultdict(asyncio.Lock) self.keys = sortedcontainers.SortedSet( key=lambda key: self.entries[key].expire_time) self.k8s_client: kube.client.CoreV1Api = k8s_client self.task_manager = aiotools.BackgroundTaskManager()
def __copy__(self): new = Actions() # Copy items new.data = {n: copy.copy(v) for n, v in self.data.items()} # Explicitly clear operators (replacement forbidden by the API) # todo: Avoid using private methods. But now this is the simplest wat to clean values for action in new.data.values(): action.my_operator = None if isinstance(action, Receive) or isinstance(action, Dispatch): # They contain references to other processes in peers action.parameters = copy.copy(action.parameters) elif isinstance(action, Parentheses) or isinstance(action, Subprocess): action._action = None elif isinstance(action, Concatenation): action._actions = collections.deque() elif isinstance(action, Choice): action._actions = sortedcontainers.SortedSet() # Set new references for action in self.data.values(): if isinstance(action, Parentheses) or isinstance(action, Subprocess): new.data[action.name].action = new.data[action.action.name] elif isinstance(action, Concatenation): new.data[action.name].actions = [new.data[act.name] for i, act in enumerate(action.actions)] elif isinstance(action, Choice): new.data[action.name].actions = {new.data[act.name] for act in action.actions} return new
def unused_labels(self): used_labels = set() def extract_labels(expr): for m in self.label_re.finditer(expr): used_labels.add(m.group(1)) for action in self.actions.filter(include={Action}): if (isinstance(action, Call) or isinstance(action, CallRetval)) and action.callback: extract_labels(action.callback) if isinstance(action, Call): for param in action.parameters: extract_labels(param) if isinstance(action, Receive) or isinstance(action, Dispatch): for param in action.parameters: extract_labels(param) if isinstance(action, CallRetval) and action.retlabel: extract_labels(action.retlabel) if isinstance(action, Block): for statement in action.statements: extract_labels(statement) if action.condition: for statement in action.condition: extract_labels(statement) return sortedcontainers.SortedSet(self.labels.keys()).difference(used_labels)
def add_function(self, func, scope, fs, deps, cfiles): fs_desc = fs[scope][func] if scope == 'unknown': key = list(fs_desc['declarations'].keys())[0] signature = fs_desc['declarations'][key]['signature'] func_intf = Function(func, signature) # Do not set definition file since it is out of scope of the target program fragment else: signature = fs_desc.get('signature') func_intf = Function(func, signature) func_intf.definition_file = scope # Set static if fs_desc.get('type') == "static": func_intf.static = True else: func_intf.static = False # Add declarations files = sortedcontainers.SortedSet() if func_intf.definition_file: files.add(func_intf.definition_file) if fs_desc['declarations']: files.update({ f for f in fs_desc['declarations'] if f != 'unknown' and f in deps }) for file in files: if file not in cfiles and file not in func_intf.header_files: func_intf.header_files.append(file) for cfile in deps[file]: self.set_source_function(func_intf, cfile) func_intf.declaration_files.add(cfile)
def add_image(self, image): ''' Adds a new image name into the trace. ''' self.images.append(image) #self.bbls_per_image[image] = [] self.set_per_image[image] = sc.SortedSet()
def __init__(self, periodType=AbstractPeriod, periods=None): ## Used for emplacing. self.periodType = periodType ## This is the set of periods. # It remains sorted by timestamp. # Index 0 is the newest period. self.periods = sc.SortedSet(periods)
def qubits(self): """SortedSet: The unique occupied qubit indices over all strings in the Pauli object.""" # TODO: Might want to dynamically memoize this qubits = sortedcontainers.SortedSet() for string in self.keys(): for qubit in string.qubits: qubits.add(qubit) return qubits
def __init__(self): self._timers = sc.SortedSet() self._cancelling_timers = {} self._lock = threading.Lock() self._wakeup_queue = Queue.Queue() self._thr = threading.Thread(target=self._check_and_execute) self._thr.daemon = True self._started = False
def __init__(self, pool): """ Parameters ---------- pool: multiprocessing.pool.Pool (or the multiprocessing.pool.ThreadPool subclass) """ self._alive = True # `global_priorities` contains all the methods necessary to establish the priority of a # `prod_job` or a `cache_job`. This object is updated by # `receive_global_priorities_update` as soon as there is an update. self._global_priorities = dummy_priorities # type: Priorities # Tokens ***************************************************** pool_id = id(pool) self._pool_id = pool_id self._token_count = pool._processes + OVERLOAD short_id = short_id_of_id(pool_id) self._tokens = { # This has no particular meaning, the only hard requirement is just to have # different tokens in a pool. _PoolToken(short_id * 1000 + i) for i in range(self._token_count) } self._all_tokens = set(self._tokens) # Rank 0 jobs ************************************************ self._jobs_maxprio = set() # type: Set[MaxPrioJobWaiting] # Rank 1 jobs ************************************************ # For low complexity operations self._jobs_prod = set() # type: Set[ProductionJobWaiting] self._jobs_cache = set() # type: Set[CacheJobWaiting] self._dict_of_prio_per_r1job = { } # type: Dict[PoolJobWaiting, Tuple[int, ...]] self._sset_of_prios = sortedcontainers.SortedSet() self._dict_of_r1jobs_per_prio = { } # type: Dict[Tuple[int], Set[PoolJobWaiting]] self._prod_jobs_of_query = { } # type: Dict[CachedQueryInfos, Set[ProductionJobWaiting]] self._cache_jobs_of_cache_fp = { } # type: Dict[Tuple[uuid.UUID, Footprint], Set[CacheJobWaiting]] # Shortcuts ************************************************** # For fast iteration / cleanup self._job_sets = [ self._jobs_maxprio, self._jobs_prod, self._jobs_cache ] self._data_structures = self._job_sets + [ self._dict_of_prio_per_r1job, self._sset_of_prios, self._dict_of_r1jobs_per_prio, self._prod_jobs_of_query, self._cache_jobs_of_cache_fp, ] self.address = '/Pool{}/WaitingRoom'.format(self._pool_id)
def __init__(self, driver): self.driver = driver self.worker_type = WORKER_TYPE self.worker_cores = WORKER_CORES if WORKER_TYPE == 'standard': m = 3.75 elif WORKER_TYPE == 'highmem': m = 6.5 else: assert WORKER_TYPE == 'highcpu', WORKER_TYPE m = 0.9 self.worker_memory = 0.9 * m self.worker_capacity = 2 * WORKER_CORES self.worker_disk_size_gb = WORKER_DISK_SIZE_GB self.pool_size = POOL_SIZE self.max_instances = MAX_INSTANCES log.info(f'WORKER_CORES={WORKER_CORES}') log.info(f'WORKER_TYPE={WORKER_TYPE}') log.info(f'WORKER_DISK_SIZE_GB={WORKER_DISK_SIZE_GB}') log.info(f'POOL_SIZE={POOL_SIZE}') log.info(f'MAX_INSTANCES={MAX_INSTANCES}') self.token = new_token() self.machine_name_prefix = f'batch2-worker-{BATCH_NAMESPACE}-' self.worker_logs_directory = f'gs://{self.driver.batch_bucket}/{BATCH_NAMESPACE}/{INSTANCE_ID}' log.info(f'writing worker logs to {self.worker_logs_directory}') self.instances = sortedcontainers.SortedSet( key=lambda inst: (inst.healthy, inst.last_updated)) # for active instances only self.instances_by_free_cores = sortedcontainers.SortedSet( key=lambda inst: inst.free_cores) self.n_pending_instances = 0 self.n_active_instances = 0 # for pending and active self.free_cores = 0 self.token_inst = {}
def __init__(self): """Create a new MemoryAccessor.""" super(_MemoryAccessor, self).__init__("memory") self._metric_to_points = collections.defaultdict( sortedcontainers.SortedDict) self._name_to_metric = {} self._directory_names = sortedcontainers.SortedSet() self.__downsampler = _downsampling.Downsampler() self.__delayed_writer = _delayed_writer.DelayedWriter(self)
def kEmptySlots(self, bulbs: List[int], K: int) -> int: s = sortedcontainers.SortedSet() for i, bulb in enumerate(bulbs): s.add(bulb) prev = s.index(bulb) - 1 next = s.index(bulb) + 1 if prev >= 0 and bulb - s[prev] == K + 1: return i + 1 elif next < len(s) and s[next] - bulb == K + 1: return i + 1 return -1
def __init__(self, edges=None, nodes=None, frequency=1, start=None, end=None): """ Instanciate a dynamic graph A start and end dates can be used to give a "duration" to the graph independently from its nodes and edges (for instance, to study activity during a whole year, the graph might start on January 1st at 00:00 while the first recorded activity occurs in the afternoon or on another day) :param start: set a start time, by default will be the first added time :param end: set an end time, by default will be the last added time :param frequency: minimal time difference between two observations. Default: 1 :param edges: data to initialize the dynamic graph, dictionary {(n1,n2):[int]}. Keys are edges, time is ordered list of int :param nodes: data to initialize the dynamic graph, dictionary {n:time}. Keys are nodes, time is Intervals object (see interval graph) """ self._start = start self._end = end self.frequency(frequency) if start == None: self._start = math.inf if end == None: self._end = -math.inf self._graph = nx.Graph() if nodes != None: self._graph.add_nodes_from(nodes.keys()) nx.set_node_attributes(self._graph, nodes, "t") if start == None or end == None: times = set([x.start() for x in nodes.values()] + [x.end() for x in nodes.values()]) if start == None: self._start = min(times) start = min(times) if end == None: self._end = max(times) end = max(times) if edges != None: self._graph.add_weighted_edges_from( [(k[0], k[1], sortedcontainers.SortedSet(v)) for k, v in edges.items()], "t") #nx.set_edge_attributes(self._graph,edges,"t") if start == None or end == None: if start == None or end == None: times = set([min(x) for x in edges.values()] + [max(x) for x in edges.values()]) if start == None: self._start = min(times) if end == None: self._end = max(times)
def clear_cache(time): """ Deletes old information from interpretation cache. All interpretations with time_point < time are removed, and therefore cannot be eligible for merging. """ global _INCACHE _INCACHE = sortedcontainers.SortedSet( (i for i in _INCACHE if i.time_point >= time), key=lambda i: (len(i.observations), len(i.focus), len( i.unintelligible), len(i.abstracted)))
def nodes_interactions(self): to_return = {} for e, pres in self.edge_presence().items(): elist = list(e) to_return.setdefault(elist[0], []) to_return[elist[0]] += list(pres) to_return.setdefault(elist[1], []) to_return[elist[1]] += list(pres) for n, pres in to_return.items(): to_return[n] = sortedcontainers.SortedSet(pres) return to_return
def test_empty_circuit(self): circuit = quasar.Circuit() self.assertEqual(circuit.ngate, 0) self.assertEqual(circuit.ngate1, 0) self.assertEqual(circuit.ngate2, 0) self.assertEqual(circuit.ngate3, 0) self.assertEqual(circuit.ngate4, 0) self.assertEqual(circuit.ngate_nqubit(0), 0) self.assertEqual(circuit.ngate_nqubit(1), 0) self.assertEqual(circuit.ngate_nqubit(2), 0) self.assertEqual(circuit.ngate_nqubit(3), 0) self.assertEqual(circuit.ngate_nqubit(4), 0) self.assertEqual(circuit.max_gate_nqubit, 0) self.assertEqual(circuit.max_gate_ntime, 0) self.assertEqual(circuit.min_time, 0) self.assertEqual(circuit.max_time, -1) self.assertEqual(circuit.ntime, 0) self.assertEqual(circuit.ntime_sparse, 0) self.assertEqual(circuit.min_qubit, 0) self.assertEqual(circuit.max_qubit, -1) self.assertEqual(circuit.nqubit, 0) self.assertEqual(circuit.nqubit_sparse, 0) self.assertFalse(circuit.is_controlled) self.assertFalse(circuit.is_composite) self.assertIsInstance(circuit, quasar.Circuit) self.assertIsInstance(circuit.gates, sortedcontainers.SortedDict) self.assertIsInstance(circuit.times, sortedcontainers.SortedSet) self.assertIsInstance(circuit.qubits, sortedcontainers.SortedSet) self.assertIsInstance(circuit.times_and_qubits, sortedcontainers.SortedSet) self.assertEqual(circuit.gates, sortedcontainers.SortedDict([])) self.assertEqual(circuit.times, sortedcontainers.SortedSet([])) self.assertEqual(circuit.qubits, sortedcontainers.SortedSet([])) self.assertEqual(circuit.times_and_qubits, sortedcontainers.SortedSet([])) self.assertIsInstance(str(circuit), str)
def polarize_controls(circuit, ): """ Add necessary X corrector gates to polarize all ControlledGate controls to True. Preconditions: Non-composite Circuit Transformation class: Idempotent """ if circuit.is_composite: raise RuntimeError('circuit is composite') # Which starting times need X gates? expanders = sortedcontainers.SortedSet() for key, gate in circuit.gates.items(): times, qubits = key if isinstance(gate, ControlledGate) and not all(gate.controls): expanders.add(times[0]) # Map of old time -> new time with room for X corrector gates time_map = [] counter = circuit.min_time for time in circuit.times: if time in expanders: time_map.append(counter + 1) counter += 3 else: time_map.append(counter) counter += 1 # Circuit with room for X corrector gates circuit2 = circuit.slice( times=circuit.times, times_to=time_map, ) # Perform the control polarization and mark X correctors X_gate_positions = [] for key, gate in circuit2.gates.items(): times, qubits = key if not isinstance(gate, ControlledGate) or all(gate.controls): continue for index, control in enumerate(gate.controls): if control: continue # Mark X correctors X_gate_positions.append((times[0] - 1, qubits[index])) X_gate_positions.append((times[0] + 1, qubits[index])) # Polarize the controls gate.controls = [True] * gate.ncontrol # Add the X correctors for time, qubit in X_gate_positions: circuit2.X(qubit=qubit, times=time) return circuit2