Example #1
0
    def __init__(self,
                 logger,
                 env,
                 packageName,
                 jobName,
                 endpoint,
                 jobMetaData,
                 ocpCertFile,
                 sendToKafka,
                 parameters,
                 protocolType,
                 protocols,
                 shellConfig=None):
        """Initialize supporting variables for job execution (I/O and status).

		Arguments:
		  logger             : handler for the client log
		  jobName (str)      : name of the discovery/integration job
		  endpoint (str)     : target endpoint for client
		  jobMetaData (dict) : input parameters for the job
		  sendToKafka (dict) : function for sending results to kafka topic

		"""
        self.jobStatus = 'UNKNOWN'  ## value in statusEnums
        self.statusEnums = {
            1: 'SUCCESS',
            2: 'WARNING',
            3: 'FAILURE',
            4: 'UNKNOWN',
            5: 'INFO'
        }
        self.jobMessages = set()  ## set instead of list, avoids duplicate msgs
        self.setupComplete = False
        self.startTime = datetime.datetime.now()
        self.packageName = packageName
        self.jobName = jobName
        self.results = Results(jobName,
                               realm=jobMetaData.get('realm', 'default'))
        self.endpoint = endpoint
        self.env = env
        self.jobMetaData = jobMetaData
        self.ocpCertFile = ocpCertFile
        self.parameters = parameters
        self.protocolType = protocolType
        self.protocols = protocols
        self.shellConfig = shellConfig
        self.clearMessages()
        self.endTime = None
        self.sendToKafka = sendToKafka
        ## Create a log utility that in addition to standard log functions, will
        ## conditionally report messages based on the setting of a job parameter
        self.logger = jobParameterizedLogger(
            logger, self.parameters.get('printDebug', False))
        if self.jobMetaData.get('updateParameters', False):
            ## Update parameters for endpoint (globals, config group, OS type)
            self.setParameters()
        self.setupComplete = True
Example #2
0
def matrixRepresentation():
    if request.method == 'POST':
        r = Results(c.circuit)
        try:
            matrix = r.matrixRepresentation(decimals=4)
            matrix = c.reversedMatrix(matrix, c.num_qubits)
            return jsonify({"error": False, "matrixRepresentation": matrix})
        except Exception:
            return jsonify({"error": True})
Example #3
0
 def conditionalLoop(self, column, index):
     r = Results(self.circuit)
     newInitialization = Normalization.buildLoopCond(
         self.num_qubits, column, r.reversedStatevector)
     if all(ele == 0 for ele in newInitialization):
         raise Exception("conditional loop at column " + str(index) +
                         " will never be satisfied (infinite loop)")
     self.circuit = QuantumCircuit(self.num_qubits, self.num_qubits)
     self.circuit.initialize(newInitialization,
                             list(range(self.num_qubits - 1, -1, -1)))
Example #4
0
    def build(self):

        sm = ScreenManager()
        sm.add_widget(Login(name='login'))
        sm.add_widget(MainMenu(name='main_menu'))
        sm.add_widget(Profile(name='profile'))
        sm.add_widget(Results(name='results'))
        sm.add_widget(Tests(name='tests'))

        return sm
def test_results_sorting(driver):
    driver.maximize_window()
    driver.get("https://shop.by/")
    actions = ActionChains(driver)
    laptops = get_catalog_item(driver, COMPUTERS_MENU_ITEM,
                               LAPTOP_SUBMENU_ITEM)
    actions.move_to_element(laptops).click().perform()
    actions.reset_actions()

    if "noutbuki" not in driver.current_url:
        raise Exception("Redirect to /%s page doesn't happened" % "noutbuki")

    set_price_range(driver, min_price, max_price)
    manufacture_filter = BaseFilterMenu(driver, MANUFACTURE_EXPAND_XPATH)
    manufacture_filter.expand()
    manufacture_filter.click_on_items(MANUFACTORY_LIST)

    time.sleep(COMMON_TIMEOUT)

    display_size_filter = BaseFilterMenu(driver, DISPLAY_SIZE_EXPAND_XPATH)
    display_size_filter.expand()
    display_size_filter.click_on_items(DISPLAY_SIZE_LIST)

    time.sleep(COMMON_TIMEOUT)

    filter_url = driver.current_url
    apply_button = driver.find_element_by_class_name(SHOW_RES_BUTTON)
    apply_button.click()

    if driver.current_url == filter_url:
        raise Exception("Redirect on result page is not happened.")

    sorter = SortMenu(driver, SORTER_XPATH, SORTER_ITEMS)
    sorter.select("price_asc")

    results = Results(driver, RESULTS_XPATH)
    assert results, "Got empty result list"
    print("%s results on the page" % len(results))

    flink = result_item_link(results[0])

    sorter.select("price_desc")
    time.sleep(3)

    paginator = Paginator(driver, PAGINATOR_XPATH)
    paginator.move_to_the_last()

    time.sleep(COMMON_TIMEOUT)

    llink = result_item_link(results[-1])
    print(flink)
    print(llink)
    assert flink == llink, "First item in results sorted by price ascending is not equal " \
                           "to the latest item in results sorted by price descending"
Example #6
0
 def search_concept_predicate_object(self, concept, predication, object):
     concept_node = self.graph.get_node_by_name(concept)
     object_node = self.graph.get_node_by_name(object)
     self.graph.get_edge_by_predication(concept_node, predication, object_node)
     self.graph.load_nodes_from_source(object_node, max_level=1)
     self.graph.load_source_edges(object_node)
     self.graph.load_source_edges(concept_node)
     # Get the results
     results = Results(self.config, self.graph)
     results.create_table(concept_node)
     return results
Example #7
0
 def __init__(
     self, ideology_low, ideology_high, no_party
 ):  #Maybe make ideology modular so it could be multi dimminesional
     self.ideology_low = ideology_low
     self.ideology_high = ideology_high
     self.no_agent = 169
     self.no_party = no_party
     #self.agent = []
     self.party = []  #store in environment
     self.environment = None
     self.results = Results()
Example #8
0
	def is_receptacle_overloaded(self, receptacle_id):
		receptacle = self.e_model.get_receptacle_by_id(receptacle_id)
		total_amp_load = receptacle.get_amp_load()
		amp_rating = receptacle.get_amp_rating()
		res = (total_amp_load > amp_rating)
		if True == DEBUG:
			print('Support.is_receptacle_overloaded() total_amp_load:{0} > amp_rating:{1} => {2}'.format(total_amp_load, amp_rating, res))

		r = Results()
		r.set_object_id(res)
		return r
Example #9
0
    def __init__(self, data_set='ny'):
        self.data_set = data_set
        if data_set[0] == '.':
            self.data = DataSet(pat=data_set[1:])
        else:
            self.data = DataSet(data_set)

        self.results = Results()
        self.tests = {}
        self.end = 0
        self.first = None
        self.snd = None
Example #10
0
 def test_get_results_paginated_endpoint_one_page(self, get_mock):
     self.assertEqual(Results([{
         'a': 1
     }, {
         'b': 2
     }]), TVmaze.get_results('shows'))
     self.assertEqual(2, get_mock.call_count)
     get_mock.assert_has_calls([
         call('http://api.tvmaze.com/shows?page=0'),
         call('http://api.tvmaze.com/shows?page=1'),
     ],
                               any_order=False)
Example #11
0
	def is_light_string_overloaded(self, light_string_id):
		ls = self.e_model.get_light_string_by_id(light_string_id)
		res = False
		if None != ls:
			total_amp_load = ls.get_amp_load()
			amp_rating = ls.get_amp_rating()
			res = (total_amp_load > amp_rating)
			if True == DEBUG:
				print('Support.is_light_string_overloaded() total_amp_load:{0} > amp_rating:{1} => {2}'.format(total_amp_load, amp_rating, res))
		r = Results()
		r.set_object_id(res)
		return r
Example #12
0
    def bt(self, s):
        """
        Main method for backtesting.
        :return: Results object
        """

        print('Launching backtest')

        # Initialization
        for st in self.strats:
            st.prices = PU.get_prices(st, s)
            st.ind, st.signals = st.get_ind_and_signals()
        [earliest_start, latest_end] = self.timespan()
        bt_timespan = pd.date_range(s.start_dt_utc, latest_end, freq=s.freq)
        results = Results(self.strats, bt_timespan)
        schedules = []

        if s.instr != 'crypto':
            for st in self.strats:
                schedules.append(st.cal.schedule(earliest_start, latest_end))

        print('Backtest started!')
        now = dt.datetime.now()

        # Signal algos
        for st in self.strats:
            for algo in s.signal_algos:
                st.signals = algo.treat_signal(st, s)

        # Proper backtest bars
        for bar in bt_timespan:
            if s.lb.do_lb:
                self.lb_bt()
            for st_index, st in enumerate(self.strats):
                signal = 0
                try:
                    signal = st.signals[bar]
                except KeyError:
                    signal = 0
                finally:
                    trade = self.get_trade(signal, bar, st_index, s)
                    if trade != "No trade":
                        self.dashboard.update_trade(trade)
                        results.update_trade(trade)
            self.dashboard.update_bar(self.strats, bar)
            results.update_bar(self.dashboard, self.strats, bar)

        time = dt.datetime.now() - now
        print('Backtest performed. Computation  time: ' + str(time))
        print('Backtest contained ' + str(len(self.strats)) + ' strategies.')

        return results
Example #13
0
    def ASTAR(self):
        # Time how long it takes to get a path
        results = Results()
        start = time.clock()
        results.path = self.__ASTAR(results)
        end = time.clock()

        # Determine results
        results.runningTime = end - start
        if results.path:
            results.problemsSolved = 1
            results.avgPathLen = self.getPathDistance(results.path)
        return results
Example #14
0
    def get_results(cls, endpoint: Union[None, str], data_file_path: Union[None, str]=None) -> Results:
        '''Returns an instance of Results fetched from either the specified endpoint or file.
        Note that exactly one of (`endpoint`, `data_file_path`) must be a string and the other must be None.'''
        # Do we want to load the data from a file instead of downloading it?
        if data_file_path is not None:
            assert endpoint is None
            return cls.get_results_from_file(data_file_path)

        assert endpoint is not None
        sanitized_endpoint = cls._sanitize_endpoint(endpoint)
        uri = f'{cls.API_BASE_URL}/{sanitized_endpoint}'

        # This is the only paginated API endpoint.
        if endpoint == 'shows':
            return Results(cls._get_paginated_results(uri))

        # Endpoint is not paginated.
        results_page = cls._get_results_page(uri)
        if not results_page.is_found:
            # TODO: Use a more appropriate exception name or define a custom one.
            raise ValueError(f'Resource "{uri}" not found')
        return Results(results_page.results)
Example #15
0
 def search(self, criteria):
     """ ElasticSearch simple search (only query lesser than 10000 results)
      :params string criteria: simple query criterias
      :returns: objects in elasticsearch result
      """
     results = Results(self.logger,
                       current=str(inspect.stack()[0][1]) + "." +
                       str(inspect.stack()[0][3]))
     result = self.es.search(index=self.index,
                             q=criteria,
                             request_timeout=self.timeout)
     results.add_success(result)
     return [result, results.results]
Example #16
0
	def is_outlet_overloaded(self, outlet_id):
		o = self.e_model.get_outlet_by_id(outlet_id)
		total_amp_load = o.get_amp_load()
		amp_rating = (0.8 * o.get_amp_rating())
		max_amp_load = o.get_max_receptacle_amp_load()

		too_much_total_amp_load = (total_amp_load > amp_rating)
		too_much_amp_load_one_receptacle = (max_amp_load > amp_rating)
		res = too_much_total_amp_load or too_much_amp_load_one_receptacle
		if True == DEBUG:
			print('Support.is_outlet_overloaded() total_amp_load:{0}, max_amp_load:{1}, amp_rating:{2} => {3}'.format(total_amp_load, max_amp_load, amp_rating, res))
		r = Results()
		r.set_object_id(res)
		return r
Example #17
0
def alter(building, location, standard):
	sys.stdout.write('%s, %s, %s\n' % (building, location, standard))
	row = Row.objects.get(set=set, building=building, standard=standard, location=location)
	costs = row.costs.all()
	new = min(map(residual, costs))
	sys.stdout.write('\t')
	for cost in costs:
		r = Results(building, standard, location, cost.period, carbon, '0.03')
		old = r.base_residual.cost()
		cost.lifecycle_cost -= old
		cost.lifecycle_cost += new
		cost.save()
		sys.stdout.write('%d,' % int(cost.period.item()))
	sys.stdout.write('Success\n')
Example #18
0
 def update(self,data,item_id,dtype="doc",parent=None):
     """ Update existing object
     :params dic data: object data to update
     :params string item_id: id of object to update
     :params string dtype: object type **source** or **doc**
     :params string parent: parent unic identifier (mandatory for type doc, it's source id)
     :returns: elasticsearch updated object
     """
     results=Results(self.logger,1,str(inspect.stack()[0][1])+"."+str(inspect.stack()[0][3]))
     #When you have a parent child relationship, you need to specify the parent in the URL each time you try to access it a child, since routing now depends on the parent.
     #json serialize with special date parser otherwise ES index fail
     result=self.es.update(index=self.index,doc_type=dtype,id=item_id,parent=parent,routing=parent,body=json.dumps(data,default=self.serializer.to_json),ignore=400)
     results.add_success(result["_id"])
     return results.results
Example #19
0
    def __init__(self, config_element):
        pysage.Arena.__init__(self, config_element)

        self.exploitation_rate = 1 if config_element.attrib.get(
            "exploitation_rate") is None else float(
                config_element.attrib["exploitation_rate"])

        self.timestep_length = 0.5 if config_element.attrib.get(
            "timestep_length") is None else float(
                config_element.attrib.get("timestep_length"))

        self.integration_step = 0.001 if config_element.attrib.get(
            "integration_step") is None else float(
                config_element.attrib.get("integration_step"))

        self.size_radius = 0.7506 if config_element.attrib.get(
            "size_radius") is None else float(
                config_element.attrib.get("size_radius"))

        self.results_filename = "CRWLEVY" if config_element.attrib.get(
            "results") is None else config_element.attrib.get("results")
        self.results = Results()

        # initialise num runs from the configuration file
        nnruns = config_element.attrib.get("num_runs")
        if nnruns is not None:
            self.num_runs = int(nnruns)
        else:
            self.num_runs = 1

        #  size_radius
        ssize_radius = config_element.attrib.get("size_radius")
        if ssize_radius is not None:
            self.dimensions_radius = float(ssize_radius)
        elif ssize_radius is None:
            self.dimensions_radius = float(self.dimensions.x / 2.0)

        # initialise targets from the configuration file
        self.targets = []
        self.num_targets = 0
        for target_element in config_element.iter("target"):  # python 2.7
            num_targets_to_configure = 1 if target_element.attrib.get(
                "num_elements") is None else int(
                    target_element.attrib.get("num_elements"))
            for i in range(num_targets_to_configure):
                new_target = Target(target_element)
                self.targets.append(new_target)
                new_target.id = self.num_targets
                self.num_targets += 1
                print "Initalised target", new_target.id, "(quality value:", new_target.value, ")"
Example #20
0
 def get(self,item_id,parent=None):
     """ get object from it's elasticsearch id
     :params string item_id: unique identifier string
     :returns: elasticsearch object
     """
     results=Results(self.logger,1,str(inspect.stack()[0][1])+"."+str(inspect.stack()[0][3]))
     if parent is not None:
         result=self.es.get(index=self.index,doc_type='_all',routing=parent,id=item_id,ignore=[400,404])
         results.add_success({"id":result["_id"]})
     else:
         result=self.es.get(index=self.index,doc_type='_all',id=item_id,ignore=[400,404])
         results.add_success({"id":result["_id"]})
     results.finish()
     return [result,results.results]
Example #21
0
    def __init__(self, logger, redisHost, redisPort):
        """
        Class constructor.

        :param logger logger: the logger
        :param str redis_host: Redis host where the Redis Q is running
        :param int redis_port: Redis port where the Redis Q is running
        """
        self.logger = logger
        self.config = Config()
        self.redis_host = redisHost
        self.redis_port = redisPort
        self.results = Results(logger, redisHost, redisPort)
        self.workloadTracker = WorkloadTracker(self.logger)
Example #22
0
    def bulk(self, doc_list):
        """ Simple elasticsearch bulk  wrapper

        :params doc doc_list of elasticsearch docs to update
        :returns: elasticsearch bulk result
        """
        results = Results(
            self.logger, len(doc_list),
            str(inspect.stack()[0][1]) + "." + str(inspect.stack()[0][3]))
        ready_doc_list = []
        for doc in doc_list:
            if "origin" in doc:
                doc['_routing'] = doc['origin']
                doc['_parent'] = doc['origin']
            if "_index" not in doc:
                doc['_index'] = self.index
            if "_type" not in doc:
                doc['_type'] = 'doc'
            if "_id" not in doc:
                if "link" in doc:
                    result_uuid = self.generate_uuid(doc)
                    doc['_id'] = result_uuid[0]
                    results.add_success(result_uuid[0])
                else:
                    result_uuid = self.generate_uuid(doc["doc"])
                    doc['_id'] = result_uuid[0]
                    results.add_success(result_uuid[0])
            if "doc" in doc:
                #json serialize date with special date parser otherwise ES index fail
                doc["_source"] = json.loads(
                    json.dumps(doc["doc"], default=self.serializer.to_json))
                #remove source from doc
                doc.pop("doc")

            ready_doc_list.append(doc)
        try:
            for result in helpers.parallel_bulk(self.es, ready_doc_list):
                if int(result[1]["index"]["status"]) >= 200 and int(
                        result[1]["index"]["status"]) < 300:
                    results.add_success(result)
                else:
                    results.add_fail(result)
        except Exception as e:
            results.add_error(e)
        del ready_doc_list, doc_list
        results.finish()
        gc.collect()
        return results.results
Example #23
0
def main(argv):

    long_option_list = ['plec=', 'srednia=',
                        'zdawalnosc', 'najlepsze=',
                        'regresja', 'wojewodztwo=',
                        'lokalne']
    try:
        options, args = getopt.getopt(argv, 'p:s:zn:rw:x:y:l', long_option_list)
    except getopt.GetoptError as err:
        print(err)
        options = []

    cmd_args_dic = {}
    local_data = False
    data = []

    for opt, arg in options:
        if opt in ('-p', '--plec'):
            cmd_args_dic['gender'] = arg
        if opt in ('-s', '--srednia'):
            cmd_args_dic['average'] = arg
        if opt in ('-z', '--zdawalnosc'):
            cmd_args_dic['pass_rate'] = ''
        if opt in ('-n', '--najlepsze'):
            cmd_args_dic['best'] = arg
        if opt in ('-r', '--regresja'):
            cmd_args_dic['regression'] = ''
        if opt in ('-w', '--wojewodztwo'):
            cmd_args_dic['state'] = arg
        if opt in '-x':
            cmd_args_dic['state1'] = arg
        if opt in '-y':
            cmd_args_dic['state2'] = arg

        # choose local or remote data
        if opt in ('-l', '--lokalne'):
            local_data = True
            if os.path.isfile('matura.db'):
                data = Data.load_from_local_db()
            else:
                data_obj = Data()
                data_obj.load_to_local_db()
                data = data_obj.load_from_local_db()

    if not local_data:
        data = Data().data_dicts

    Results(data, cmd_args_dic)
Example #24
0
     def query(self,criteria):
         """ Elasticsearch complex query (manage results over 10000 results)
         :params string criteria: complex query criterias
         :returns: objects in elasticsearch result
         """
         global_results=Results(self.logger,current=str(inspect.stack()[0][1])+"."+str(inspect.stack()[0][3]))

         limit=self.limit
         max_retry=10

         header_criteria=criteria.copy()
         header_criteria['size']=0
         results=self.es.search(index=self.index,body=header_criteria,request_timeout=self.timeout)

         if "size" in criteria:
             query_size=criteria["size"]
         else:
             query_size=results['hits']['total']

         global_results.set_total(query_size)

         #init loop variables
         results_status=0
         current_retry=0
         current_timeout=self.timeout
         timeout_exit=False

         #work around for nested it seems to not work properly with helpers
         # ES Error while using helpers.scan nested: SearchParseException[failed to parse search source
         # Issue opened https://github.com/elastic/elasticsearch-py/issues/466
         self.logger.debug("storage.query es.search:"+json.dumps(criteria))
         if query_size<limit or ("topics.score" in json.dumps(criteria)):
             results=self.es.search(index=self.index,body=criteria,request_timeout=self.timeout,size=query_size)
             global_results.set_total(1)
             global_results.add_success(criteria)
         else:
             self.logger.debug("storage.query helpers.scan:"+json.dumps(criteria))
             #org.elasticsearch.search.query.QueryPhaseExecutionException: Batch size is too large, size must be less than or equal to: [10000]. Scroll batch sizes cost as much memory as result windows so they are controlled by the [index.max_result_window] index level setting.
             results_gen=helpers.scan(self.es,query=criteria,scroll=self.config['ES_SEARCH_CACHING_DELAY'],preserve_order=True,request_timeout=self.timeout,size=1000,raise_on_error=False)
             global_results.add_success(criteria)

#             for result in results_gen:
             results['hits']['hits'].append(results_gen)
#                 global_results.add_success({'id':result['_id']})
#             del results_gen

#         gc.collect()
         return [results,global_results]
Example #25
0
    def __init__(self,config_element ):
        pysage.Arena.__init__(self,config_element)


        self.decision_quorum = 1 if config_element.attrib.get("decision_quorum") is None else float(config_element.attrib["decision_quorum"])

        self.timestep_length = 0.5 if config_element.attrib.get("timestep_length") is None else float(config_element.attrib.get("timestep_length"))

        self.time_scale = 0.008 if config_element.attrib.get("time_scale") is None else float(config_element.attrib.get("time_scale"))

        self.decision_step = 100 if config_element.attrib.get("decision_step") is None else int(config_element.attrib.get("decision_step"))

        self.size_radius = 0.7506 if config_element.attrib.get("size_radius") is None else float(config_element.attrib.get("size_radius"))

        # self.steps_run = int(config_element.attrib["steps_run"])
        self.time_incr = float(config_element.attrib["time_incr"])

        # is the experiment finished?
        self.has_converged = False
        self.convergence_time = float('nan')
        self.save_num = 0

        self.results_filename   = "CRWLEVY.dat" if config_element.attrib.get("results") is None else config_element.attrib.get("results")
        self.results = Results(config_element)

        # initialise targets from the configuration file
        self.targets = []
        self.num_targets = 0
        for target_element in config_element.iter("target"): # python 2.7
            new_target = Target(target_element)
            self.targets.append(new_target)
            self.num_targets += 1
            print "Initalised target", new_target.id, "(quality value:", new_target.value, ")"


        # initialise num runs from the configuration file
        nnruns = config_element.attrib.get("num_runs")
        if nnruns is not None:
            self.num_runs = int(nnruns)
        else:
            self.num_runs = 1

        #  size_radius
        ssize_radius = config_element.attrib.get("size_radius");
        if ssize_radius is not None:
            self.dimensions_radius = float(ssize_radius)
        elif ssize_radius is None:
            self.dimensions_radius = float(self.dimensions.x/2.0)
Example #26
0
 def test_get_results_paginated_endpoint_two_pages(self, get_mock):
     self.assertEqual(Results([{
         'b': 2
     }, {
         'd': 6
     }, {
         'cc': 5,
         'E': 11
     }]), TVmaze.get_results('shows'))
     self.assertEqual(3, get_mock.call_count)
     get_mock.assert_has_calls([
         call('http://api.tvmaze.com/shows?page=0'),
         call('http://api.tvmaze.com/shows?page=1'),
         call('http://api.tvmaze.com/shows?page=2'),
     ],
                               any_order=False)
Example #27
0
    def search(self, search_terms, page_size=25, **kwargs):
        """Perform a search and get back a results object
        """
        url = self.description.get_best_template()
        query = Query(url)

        # set up initial values
        query.searchTerms = search_terms
        query.count = page_size

        # add any additional parameters to the query
        for name, value in kwargs.items():
            setattr(query, name, value)

        # run the results
        return Results(query, agent=self.agent)
Example #28
0
	def is_splitter_overloaded(self, splitter_id):
		sp = self.e_model.get_splitter_by_id(splitter_id)
		res = False
		if None != sp:
			amp_rating = (0.8 * sp.get_amp_rating())
			total_amp_load = sp.get_amp_load()
			max_amp_load = sp.get_max_receptacle_amp_load()

			too_much_total_amp_load = (total_amp_load > amp_rating)
			too_much_amp_load_one_receptacle = (max_amp_load > amp_rating)
			res = too_much_total_amp_load or too_much_amp_load_one_receptacle
			if True == DEBUG:
				print('Support.is_splitter_overloaded() total_amp_load:{0}, max_amp_load:{1}, amp_rating:{2} => {3}'.format(total_amp_load, max_amp_load, amp_rating, res))
		r = Results()
		r.set_object_id(res)
		return r
Example #29
0
    def put(self, data, item_id=None, type="doc", source=None):
        """ add an object to storage
         "link" field inside "data" object is used to generate "Unique Idenfifier" for the object.
         This field is mandatory for all objects of any types put in the index.
         :params dic data: data for object creation
         :params string item_id: unique identifier for the object
         :params string type: type of object **source** or **doc****source** or **doc**
         :params string source: parent unique identifier (mandatory for type doc, it's source id)
         :returns: elasticsearch object
         """
        results = Results(
            self.logger, 1,
            str(inspect.stack()[0][1]) + "." + str(inspect.stack()[0][3]))

        if item_id is None:
            result_uuid = self.generate_uuid(data)
            item_id = result_uuid[0]
            results.add_success(result_uuid[1])

        try:
            if source is not None:
                data['origin'] = source
                result = self.es.index(index=self.index,
                                       doc_type=type,
                                       id=item_id,
                                       parent=source,
                                       body=json.dumps(
                                           data,
                                           default=self.serializer.to_json),
                                       ignore=[400, 404, 409])
                results.add_success(result)
            else:
                result = self.es.index(index=self.index,
                                       doc_type=type,
                                       id=item_id,
                                       body=json.dumps(
                                           data,
                                           default=self.serializer.to_json),
                                       ignore=[400, 404, 409])
                results.add_success(result)
        except (TransportError, ConnectionError, ConnectionTimeout,
                RequestError) as e:
            results.add_error(e)

        results.finish()
        return results.results
Example #30
0
def subCircuitCustomGate():
    if request.method=='POST':
        receivedDictionary=request.get_json()
        c2=Circuit()
        c2.gatesObjects=c.gatesObjects
        c2.subCircuitSetter(receivedDictionary)
        try:
            circuit=c2.createDraggableCircuit()
        except Exception as e:
            return jsonify({"conditionalLoopError":str(e)})
        r=Results(circuit)
        matrix=r.matrixRepresentation()
        complexMatrix=f.strToComplex(matrix)
        isUnitary=is_unitary_matrix(complexMatrix)
        if isUnitary:
            c.gatesObjects[receivedDictionary["gateName"]]=f.matrixToGateObject(complexMatrix,receivedDictionary["gateName"])
        return  jsonify({"isUnitary":isUnitary})