Example #1
0
    def test__startup_services(self):

        class FakeBrokerClient(object):
            def __init__(self):
                self.messages = {}
                
            def invoke_async(self, msg):
                self.messages[msg['service']] = msg
                
        broker_client = FakeBrokerClient()
        
        startup_services = Bunch()
        for x in range(10):
            name =  rand_string()
            payload = rand_string()
            startup_services[name] = payload
        
        ps = ParallelServer()
        ps.broker_client = broker_client
        ps.fs_server_config = Bunch()
        ps.fs_server_config.startup_services = startup_services
        
        ps.invoke_startup_services()
        
        for expected_service, expected_payload in startup_services.items():
            msg = Bunch(broker_client.messages[expected_service])
            eq_(msg.action, SERVICE.PUBLISH)
            eq_(msg.channel, CHANNEL.STARTUP_SERVICE)
            eq_(msg.payload, expected_payload)
            eq_(msg.service, expected_service)
            ok_(msg.cid.startswith('K'))
            self.assertEquals(len(msg.cid), 40)
def gen_updates_rmsprop_by_graves(all_parameters, all_grads,
                                  learning_rate,
                                  rho=0.95, momentum=0.9,
                                  epsilon=0.0001,
                                  get_diffs=False,
                                  what_stats=None
                                  ):
    updates = []
    infos = []
    for param_i, grad_i in zip(all_parameters, all_grads):
        delta_i = theano.shared(get_p(param_i).get_value() * 0.)
        n_i = theano.shared(get_p(param_i).get_value() * 0.)
        g_i = theano.shared(get_p(param_i).get_value() * 0.)

        n_i_new = rho * n_i + (1 - rho) * grad_i ** 2
        g_i_new = rho * g_i + (1 - rho) * grad_i
        delta_i_new = momentum * delta_i - get_lr(param_i) * learning_rate * grad_i / T.sqrt(n_i_new - (g_i_new ** 2) + epsilon)

        updates.append((n_i, n_i_new))
        updates.append((g_i, g_i_new))
        updates.append((delta_i, delta_i_new))
        update = delta_i_new
        updates.append((get_p(param_i), get_p(param_i) + update))

        info = Bunch()
        info.grad = grad_i

        if get_diffs:
            info.diff = update

        infos.append(info)

    return final_result(updates, all_parameters, infos, what_stats)
Example #3
0
def main():
    vct = CountVectorizer(encoding='ISO-8859-1', min_df=5, max_df=1.0, binary=True, ngram_range=(1, 1),
                          token_pattern='\\b\\w+\\b', tokenizer=StemTokenizer())
    vct_analizer = vct.build_analyzer()
    print("Start loading ...")
    # data fields: data, bow, file_names, target_names, target

    ########## NEWS GROUPS ###############
    # easy to hard. see "Less is More" paper: http://axon.cs.byu.edu/~martinez/classes/678/Presentations/Clawson.pdf
    categories = [['alt.atheism', 'talk.religion.misc'],
                  ['comp.graphics', 'comp.windows.x'],
                  ['comp.os.ms-windows.misc', 'comp.sys.ibm.pc.hardware'],
                  ['rec.sport.baseball', 'sci.crypt']]

    if "imdb" in args.train:
        ########## IMDB MOVIE REVIEWS ###########
        data = Bunch(load_imdb(args.train, shuffle=True, rnd=2356, vct=vct))  # should brind data as is
    elif "aviation" in args.train:
        raise Exception("We are not ready for that data yet")
    elif "20news" in args.train:
        ########## 20 news groups ######
        data = Bunch(load_20newsgroups(categories=categories[0], vectorizer=vct, min_size=50))  # for testing purposes
    elif "dummy" in args.train:
        ########## DUMMY DATA###########
        data = Bunch(load_dummy("C:/Users/mramire8/Documents/code/python/data/dummy", shuffle=True,rnd=2356,vct=vct))
    else:
        raise Exception("We do not know that dataset")

    print("Data %s" % args.train)
    total = len(data.train.data)
    print("Data size %s" % total)
    #print(data.train.data[0])

    ## prepare pool for the sampling
    pool = Bunch()
    pool.data = data.train.bow.tocsr()   # full words, for training
    pool.target = data.train.target
    pool.predicted = []
    pool.remaining = set(range(pool.data.shape[0]))  # indices of the pool

    bt = randomsampling.BootstrapFromEach(87654321)
    for i in range(7):
        query_index = bt.bootstrap(pool=pool, k=args.packsize)  # get instances from each class
        filename = "{0}-P{1}.txt".format(args.train,i)
        f = codecs.open(filename, 'a+', 'utf-8')
        #print documents in file
        random.shuffle(query_index)
        for di in query_index:
            x = unicode(data.train.data[di].replace("\n","<br>"))
            #y = data.train.target[di]
            y = data.train.target_names[data.train.target[di]]
            #f.write(str(i))
            #f.write("\t")
            #f.write(str(y))
            #f.write("\t")
            #f.write(x)
            #f.write("\n")

        f.close()
        pool.remaining.difference_update(query_index) # remove the used ones
Example #4
0
 def test_sio_list_data_type_input_json(self):
     cid = rand_string()
     data_format = DATA_FORMAT.JSON
     transport = rand_string()
     
     sio_config = {'int_parameters': [rand_string()]} # Not really used but needed
     
     service_sio = Bunch()
     service_sio.input_required = ('first_name', 'last_name', List('emails'))
     
     expected_first_name = faker.first_name()
     expected_last_name = faker.last_name()
     expected_emails = sorted([faker.email(), faker.email()])
     
     r = Request(getLogger(__name__), sio_config)
     r.payload = {
         'first_name': expected_first_name,
         'last_name': expected_last_name,
         'emails': expected_emails,
         }
     
     r.init(True, cid, service_sio, data_format, transport, {})
     
     eq_(r.input.first_name, expected_first_name)
     eq_(r.input.last_name, expected_last_name)
     eq_(r.input.emails, expected_emails)
Example #5
0
 def test_sio_list_data_type_input_xml(self):
     cid = rand_string()
     data_format = DATA_FORMAT.XML
     transport = rand_string()
     
     sio_config = {'int_parameters': [rand_string()]} # Not really used but needed
     
     service_sio = Bunch()
     service_sio.input_required = ('first_name', 'last_name', List('emails'))
     
     expected_first_name = faker.first_name()
     expected_last_name = faker.last_name()
     expected_emails = sorted([faker.email(), faker.email()])
     
     r = Request(getLogger(__name__), sio_config)
     r.payload = etree.fromstring("""<request>
       <first_name>{}</first_name>
       <last_name>{}</last_name>
       <emails>
        <item>{}</item>
        <item>{}</item>
       </emails>
     </request>""".format(
         expected_first_name, expected_last_name, expected_emails[0], expected_emails[1]))
     
     r.init(True, cid, service_sio, data_format, transport, {})
     
     eq_(r.input.first_name, expected_first_name)
     eq_(r.input.last_name, expected_last_name)
     eq_(r.input.emails, expected_emails)
    def process(self, sentence, strs, inspect=False):
        if not len(strs):
            raise Exception("\"strs\" should at least have one item.")

        env = Bunch()
        env.sentence         = sentence
        env.inspect          = inspect

        # 1. generate SplitBlock list
        self.generate_SplitBlock_list(env)

        # 2. fix params strs
        env.params_strs = ParamsStrs(strs)
        env.params_strs.load_env(env.split_block_group)

        #env.base_chars_len = chars_len(env.params_strs.original_strs + [env.sentence])

        self.fix_params_strs(env)

        # 3. generate possible patterns, for multiple fill-able BLANKS.
        self.generate_possible_patterns_with_strs_list(env)

        # 4. generate possible patterns
        self.generate_candidate_patterns_vs_word_groups_s(env)

        # 5. generate regular sentence
        results = self.generate_regular_sentence(env)

        results = SentenceProcess.select_most_fit_sentence(results, read_attr_lambda=lambda item: item[0])  # , base_chars_len=env.base_chars_len)
        if env.inspect:
            print "[results] len", len(results)
        return results
Example #7
0
    def run(self):
        """ Handle action (other then builds) - like rename or delete of project """
        result = Bunch()
        result.id = self.data["id"]

        action_type = self.data["action_type"]

        if action_type == ActionType.DELETE:
            if self.data["object_type"] == "copr":
                self.handle_delete_copr_project()
            elif self.data["object_type"] == "build":
                self.handle_delete_build()

            result.result = ActionResult.SUCCESS

        elif action_type == ActionType.LEGAL_FLAG:
            self.handle_legal_flag()

        elif action_type == ActionType.RENAME:
            self.handle_rename(result)

        elif action_type == ActionType.CREATEREPO:
            self.handle_createrepo(result)

        if "result" in result:
            if result.result == ActionResult.SUCCESS and \
                    not getattr(result, "job_ended_on", None):
                result.job_ended_on = time.time()

            self.frontend_callback.update({"actions": [result]})
Example #8
0
    def capture(self):
        """
        Capture output of the whole build process.
        Return stacktrace of Python driver and output of make command, if any.
        """

        # The result object containing build success flag and error outputs.
        # This gets bound to the context manager variable.
        result = Bunch()
        self.build_result['capture'] = result

        # A temporary file to redirect make output to
        self.stream = NamedTemporaryFile()
        try:
            # Pass execution flow to context manager body,
            # effectively running the main build process
            yield result

            # Signal success if build engine completed
            result.success = True

        except Exception as ex:

            # Signal failure
            result.success = False

            # Capture Python traceback
            result.error = last_error_and_traceback()

        # Capture make output
        self.stream.seek(0)
        result.output = self.stream.read()
    def testExcludeWordsInQuery(self, requestsMock):
        web.app.template_folder = "../templates"

        expectedItems = self.prepareSearchMocks(requestsMock, 1, 1)
        with web.app.test_request_context('/'):
            response = self.app.get("/api?t=search&q=query+!excluded")
            entries, _, _ = newznab.NewzNab(Bunch.fromDict({"name": "forTest", "score": 0, "host": "host"})).parseXml(response.data)
            self.assertSearchResults(entries, expectedItems)
            calledUrls = sorted([x.url for x in requestsMock.request_history])
            self.assertTrue(compare('http://www.newznab1.com/api?apikey=apikeyindexer.com&t=search&extended=1&offset=0&limit=100&q=query+!excluded', calledUrls[0]))

            response = self.app.get("/api?t=search&q=query+--excluded")
            entries, _, _ = newznab.NewzNab(Bunch.fromDict({"name": "forTest", "score": 0, "host": "host"})).parseXml(response.data)
            self.assertSearchResults(entries, expectedItems)
            calledUrls = sorted([x.url for x in requestsMock.request_history])
            self.assertTrue(compare('http://www.newznab1.com/api?apikey=apikeyindexer.com&t=search&extended=1&offset=0&limit=100&q=query+!excluded', calledUrls[0]))

            self.app.get("/internalapi/search?query=query+!excluded&category=all")
            calledUrls = sorted([x.url for x in requestsMock.request_history])
            self.assertTrue(compare('http://www.newznab1.com/api?apikey=apikeyindexer.com&t=search&extended=1&offset=0&limit=100&q=query+!excluded', calledUrls[0]))

            self.app.get("/internalapi/search?query=query+--excluded&category=all")
            calledUrls = sorted([x.url for x in requestsMock.request_history])
            self.assertTrue(compare('http://www.newznab1.com/api?apikey=apikeyindexer.com&t=search&extended=1&offset=0&limit=100&q=query+!excluded', calledUrls[0]))

            
Example #10
0
    def _create(self, name, config, **extra):
        """ Actually adds a new definition, must be called with self.lock held.
        """
        config_no_sensitive = deepcopy(config)
        if 'password' in config:
            config_no_sensitive['password'] = SECRET_SHADOW
        item = Bunch(config=config, config_no_sensitive=config_no_sensitive, is_created=False, impl=None)

        # It's optional
        conn = extra.get('def_', {'conn':None})['conn']

        try:
            logger.debug('Creating `%s`', config_no_sensitive)
            impl = self.create_impl(config, config_no_sensitive, **extra)

            def execute(session, statement):
                def execute_impl(**kwargs):
                    if not session:
                        raise Exception('Cannot execute the query without a session')
                    return session.execute(statement, kwargs)
                return execute_impl

            item.execute = execute(conn, impl)

            logger.debug('Created `%s`', config_no_sensitive)
        except Exception, e:
            logger.warn('Could not create `%s`, config:`%s`, e:`%s`', name, config_no_sensitive, format_exc(e))
Example #11
0
def load_atm(time,info,starttime,endtime):
    """Load atmospheric variable from a netcdf file"""
    
    outputdata=Bunch()

    for s,v in zip(icar_atm_var,atmvarlist):
        atmfile=find_atm_file(time,v,info)
        print(atmfile)
        sys.stdout.flush()
        nc_data=read_nc(atmfile,v,returnNCvar=True)
        outputdata[s]=nc_data.data[starttime:endtime,:,info.ymin:info.ymax,info.xmin:info.xmax]
        nc_data.ncfile.close()

    atmfile=find_atm_file(time,"PS",info)
    nc_data=read_nc(atmfile,"PS",returnNCvar=True)
    outputdata.ps=nc_data.data[starttime:endtime,info.ymin:info.ymax,info.xmin:info.xmax]
    nc_data.ncfile.close()
    del nc_data
    print(gc.collect())
    sys.stdout.flush()
    
    a=read_nc(atmfile,"hyam").data
    b=read_nc(atmfile,"hybm").data
    p0=read_nc(atmfile,"P0").data
    #p_(i,j,k)= A_k * P_0 + B_k P_s(i,j)  from http://www.cesm.ucar.edu/models/atm-cam/docs/usersguide/node25.html
    outputdata.p = a[np.newaxis,:,np.newaxis,np.newaxis]*p0+b[np.newaxis,:,np.newaxis,np.newaxis]*outputdata.ps[:,np.newaxis,:,:]
    
    outputdata.ntimes=outputdata.p.shape[0]
    
    return outputdata
Example #12
0
File: convert.py Project: NCAR/icar
def convert_atm(data):
    output_data=Bunch()
    output_data.u   = data.u[np.newaxis, ::-1,::-1,:]           # m/s
    output_data.v   = data.v[np.newaxis, ::-1,::-1,:]           # m/s
    output_data.hgt = data.gph[::-1,:]/g                        # (m^2/s^2) / (m/s^2) = m

    # calculate pressure in Pa from ln(sfc_press) and hybrid sigma coordinates
    output_data.p = np.zeros((output_data.u.shape))
    ps = np.exp(data.ln_p_sfc[::-1,:])                          # Pa
    for i in range(len(data.sigma_a)):
        # see http://rda.ucar.edu/datasets/ds627.0/docs/Eta_coordinate/
        # notes on http://aaron.boone.free.fr/aspdoc/node7.html might help...
        output_data.p[0,len(data.sigma_a)-i-1,:,:]=(data.sigma_a[i]*data.P0+data.sigma_b[i]*ps)

    psl=ps/((1 - 2.25577E-5*output_data.hgt)**5.25588)
    output_data.z=np.zeros(output_data.p.shape)
    for i in range(output_data.p.shape[1]):
        output_data.z[0,i,...]=((output_data.p[0,i,...]/psl)**(1/5.25588)-1) / (-2.25577E-5)

    pii=(100000.0/output_data.p)**(R/cp)
    output_data.t=data.t[np.newaxis,::-1,::-1,:]*pii                    # K (converted to potential temperature)
    
    output_data.qv    = data.qv[np.newaxis,::-1,::-1,:]                 # kg/kg
    output_data.cloud = data.cloud[np.newaxis,::-1,::-1,:]              # kg/kg
    output_data.ice   = data.ice[np.newaxis,::-1,::-1,:]                # kg/kg
    
    return output_data
Example #13
0
    def __init__(self, conn, id = None, article_etree = None):
        """
        Article

        :param conn: Connection-Object
        """

        Bunch.__init__(self)

        self.conn = conn
        self.content_language = None

        self.id = id  # integer
        self.created = None  # datetime
        self.article_number = None
        self.number = None  # integer
        self.number_pre = None
        self.title = None
        self.description = None
        self.sales_price = None  # float
        self.sales_price2 = None  # float
        self.sales_price3 = None  # float
        self.sales_price4 = None  # float
        self.sales_price5 = None  # float
        self.currency_code = None
        self.unit_id = None  # integer
        self.tax_id = None  # integer
        self.purchase_price = None  # float
        self.purchase_price_net_gross = None
        self.supplier_id = None  # integer

        if article_etree is not None:
            self.load_from_etree(article_etree)
        elif id is not None:
            self.load()
Example #14
0
 def test_delete_after_pick_up(self):
     
     support_values = (True, False)
     
     for delete_after_pick_up in support_values:
         def ignored(*ignored, **ignored_kwargs):
             pass
         
         server = Bunch()
         server.parallel_server = Bunch()
         server.parallel_server.id = rand_int()
         server.parallel_server.hot_deploy_config = Bunch()
         server.parallel_server.hot_deploy_config.delete_after_pick_up = delete_after_pick_up
         server.parallel_server.odb = Bunch()
         
         server.parallel_server.notify_new_package = ignored
         server.parallel_server.odb.hot_deploy = ignored
         
         file_name = '{}.py'.format(uuid4().hex)
         
         processor = BasePickupEventProcessor(uuid4().hex, server)
         
         _os_remove = Mock()
         util._os_remove = _os_remove
         
         with NamedTemporaryFile(prefix='zato-test-', suffix=file_name) as tf:
             tf.flush()
             ret = processor.hot_deploy(tf.name, os.path.basename(tf.name))
             self.assertEquals(ret, True)
             
             if delete_after_pick_up:
                 _os_remove.assert_called_with(tf.name)
             else:
                 self.assertFalse(_os_remove.called)
Example #15
0
    def __init__(self, conn, id = None, reminder_etree = None):
        """
        Reminder

        :param conn: Connection-Object
        """

        Bunch.__init__(self)

        self.conn = conn
        self.content_language = None

        self.id = id  # integer
        self.created = None  # datetime
        self.status = None
        self.invoice_id = None  # integer
        self.contact_id = None  # integer
        self.reminder_text_id = None  # integer
        self.reminder_level = None  # integer
        self.reminder_level_name = None
        self.date = None  # date
        self.label = None
        self.subject = None
        self.intro = None
        self.note = None
        self.due_date = None  # date
        self.total_gross = None  # float
        self.is_old = None  # Steht dieses Flag auf 1, dann gibt es eine aktuellere Mahnung.

        if reminder_etree is not None:
            self.load_from_etree(reminder_etree)
        elif id is not None:
            self.load()
Example #16
0
def load_atm(time,info):
    """Load atmospheric variable from a netcdf file"""
    
    outputdata=Bunch()

    for s,v in zip(icar_atm_var,atmvarlist):
        atmfile=find_atm_file(time,v,info)
        print(atmfile)
        sys.stdout.flush()
        nc_data=read_nc(atmfile,v)#,returnNCvar=True)
        outputdata[s]=nc_data.data[:,:,info.ymin:info.ymax,info.xmin:info.xmax]
        # nc_data.ncfile.close()

    nc_data=read_nc(atmfile,"ps")#,returnNCvar=True)
    outputdata.ps=nc_data.data[:,info.ymin:info.ymax,info.xmin:info.xmax]
    # nc_data.ncfile.close()
    del nc_data
    print(gc.collect())
    sys.stdout.flush()
    
    a=read_nc(atmfile,"a").data
    b=read_nc(atmfile,"b").data
    p0=read_nc(atmfile,"p0").data
    outputdata.p = a[np.newaxis,:,np.newaxis,np.newaxis]*p0+b[np.newaxis,:,np.newaxis,np.newaxis]*outputdata.ps[:,np.newaxis,:,:]
    
    outputdata.ntimes=outputdata.p.shape[0]
    
    return outputdata
Example #17
0
    def forward(self, bucket):
        """
        Receive data bucket from source, run through
        transformation machinery and emit to target.
        """

        # 1. Map/transform topology address information
        if 'transform' in self.channel:
            for entrypoint in read_list(self.channel.transform):
                try:
                    transformer = KotoriBootloader.load_entrypoint(entrypoint)
                    bucket.tdata.update(transformer(bucket.tdata))
                except ImportError as ex:
                    log.error('ImportError "{message}" when loading entrypoint "{entrypoint}"',
                        entrypoint=entrypoint, message=ex)

        # MQTT doesn't prefer leading forward slashes with topic names, let's get rid of them
        target_uri_tpl = self.target_uri.path.lstrip('/')

        # Compute target bus topic from url matches
        target_uri = target_uri_tpl.format(**bucket.tdata)

        # Enrich bucket by putting source and target addresses into it
        bucket.address = Bunch(source=self.source_address, target=self.target_address)

        # 2. Reporting
        bucket_logging = Bunch(bucket)
        if 'body' in bucket_logging and len(bucket_logging.body) > 100:
            bucket_logging.body = bucket_logging.body[:100] + ' [...]'
        log.debug('Forwarding bucket to {target} with bucket={bucket}. Effective target uri is {target_uri}',
            target=self.channel.target, target_uri=target_uri, bucket=dict(bucket_logging))

        # 3. Adapt, serialize and emit appropriately
        return self.target_service.emit(target_uri, bucket)
Example #18
0
    def __init__(self, conn, id = None, contact_etree = None):
        """
        Contact

        :param conn: Connection-Object
        """

        Bunch.__init__(self)

        self.conn = conn

        self.id = id  # integer
        self.created = None  # datetime
        self.client_id = None  # integer
        self.label = None
        self.name = None
        self.street = None
        self.zip = None
        self.city = None
        self.state = None
        self.country_code = None
        self.first_name = None
        self.last_name = None
        self.salutation = None
        self.phone = None
        self.fax = None
        self.mobile = None
        self.email = None
        self.www = None

        if contact_etree is not None:
            self.load_from_etree(contact_etree)
        elif id is not None:
            self.load()
    def __init__(self, conn, id = None, recurring_item_etree = None):
        """
        Recurring-Item

        :param conn: Connection-Object
        """

        Bunch.__init__(self)

        self.conn = conn

        self.id = id  # integer
        self.created = None  # datetime
        self.article_id = None
        self.recurring_id = None  # integer
        self.position = None  # integer
        self.unit = None
        self.quantity = None  # float
        self.unit_price = None  # float
        self.tax_name = None
        self.tax_rate = None  # float
        self.title = None
        self.description = None
        self.total_gross = None  # float
        self.total_net = None  # float
        self.reduction = None
        self.total_gross_unreduced = None  # float
        self.total_net_unreduced = None  # float

        if recurring_item_etree is not None:
            self.load_from_etree(recurring_item_etree)
        elif id is not None:
            self.load()
Example #20
0
 def __init__(self, *args, **kwargs):
     warnings.warn(
         "DictContainer is deprecated.  Use the Bunch class" " from python-bunch instead.",
         DeprecationWarning,
         stacklevel=2,
     )
     Bunch.__init__(self, *args, **kwargs)
Example #21
0
 def test_uniq(self):
     one = Bunch.uniq(TEST_PATH)
     two = Bunch.uniq(TEST_PATH, "test")
     self.assertTrue(one.path != two.path)
     self.assertTrue(one.path != TEST_PATH)
     self.assertTrue(one.kind == GHOST)
     self.assertTrue(two.kind == "test")
Example #22
0
def remote(cmd, creds, curr_dir=None):
    """
    runs a command on a remote machine and returns output
    """
    if isinstance(creds, dict):
        creds = Bunch(creds)

    client = paramiko.SSHClient()
    args = {'hostname':creds.host, 'username':creds.user}
    if 'key_filename' in creds:
        creds.key = os.path.expanduser(creds.key_filename[0])
    if 'key' in creds:
        key_path = os.path.expanduser(creds.key)
        key = paramiko.RSAKey.from_private_key_file(key_path)
        args['pkey'] = key
    if 'password' in creds:
        args['password'] = creds.password

    client = paramiko.SSHClient()
    client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
    args['timeout'] = SSH_TIMEOUT
    client.connect(**args)
    if curr_dir is not None:
        make_dirs(curr_dir, creds)
        cmd = 'cd "%s" && %s' % (curr_dir, cmd)
    stdout, stderr = client.exec_command(cmd)[1:3]
    output = stdout.read()
    err = stderr.read()
    client.close()
    if len(err) > 0:
        raise Exception(err)
    return output
Example #23
0
    def test_create_channel(self):

        channel_item = uuid4().hex
        sec_info = uuid4().hex
        soap_action = uuid4().hex
        url_path = uuid4().hex
        match_target = '{}{}{}'.format(soap_action, MISC.SEPARATOR, url_path)

        def _dummy_channel_item_from_msg(*ignored):
            return channel_item

        def _dummy_sec_info_from_msg(*ignored):
            return sec_info

        msg = Bunch()
        msg.soap_action = soap_action
        msg.url_path = url_path

        ud = url_data.URLData()
        ud._channel_item_from_msg = _dummy_channel_item_from_msg
        ud._sec_info_from_msg = _dummy_sec_info_from_msg
        ud.channel_data = []
        ud.url_sec = {}

        ud._create_channel(msg, {})

        self.assertIn(match_target, ud.url_sec)
        eq_(ud.url_sec[match_target], sec_info)

        eq_(len(ud.channel_data), 1)
        eq_(ud.channel_data[0], channel_item)
 def __next__(self):
     
     self.curpos+=1
     output_data=Bunch()
     
     filename=self.files[self._curfile]
     for v in self._var_names:
         if type(v)==str:
             curdata=self.load_data(v)
             curvarname=v
         
         elif type(v)==list:
             cur_operator=v[0]
             for varname in v[1:]: 
                 if type(varname)==str:
                     curvarname=v[1]
                     break
             
             curdata=self.load_data(v[1])
             for curv in v[2:]:
                 next_data=self.load_data(curv)
                 cur_operator(curdata,next_data)
         
         output_data[self._short_names[curvarname]]=curdata
     
     output_data.date=self.get_current_date()
     return output_data
Example #25
0
def load_erai_means(wind_option):
    """docstring for load_erai_means"""
    eraid="erai/"
    varlist=["p","rh","ta","ua","va","z"]
    if (wind_option=="nowind"):
        varlist=["p","rh","ta","z"]
        
    outputdata=[]
    month_mid_point_doy=(start_day_per_month[1:]+np.array(start_day_per_month[:-1]))*0.5
    
    for month in range(1,13):
        curoutput=Bunch(doy=month_mid_point_doy[month-1])
        for v in varlist:
            if v=="ta":
                ta=mygis.read_nc(eraid+"regridded_ERAi_to_cesm_month{0:02}.nc".format(month),v).data
            else:
                curoutput[v]=mygis.read_nc(eraid+"regridded_ERAi_to_cesm_month{0:02}.nc".format(month),v).data
        curoutput["theta"] = ta / units.exner(curoutput.p)
        
        # erai is probably "upside down" so reverse the vertical dimension of all variables
        if curoutput.p[0,0,0]<curoutput.p[-1,0,0]:
            for v in curoutput.keys():
                if v!="doy":
                    curoutput[v]=curoutput[v][::-1,:,:]
        outputdata.append(curoutput)
    return outputdata
Example #26
0
def generate_majority_vote(n = 200, m = 9):
    vote = Bunch()
    vote.DESCR = "Toy example to train a network how to do a majority vote."\
                 "Target is 1 iff there are more 1s than 0s in the input."
    vote.data = np.random.randint(0, 2, (n, m))
    vote.target = (np.sum(vote.data, 1) > m // 2).reshape(-1,1)
    return vote
Example #27
0
def load_xor():
    xor = Bunch()
    xor.DESCR = "The XOR function from logic. A Toy Example for Neural Networks. "\
                "Needs at least two hidden units."
    xor.data = np.array([[0,0],[0,1],[1,0],[1,1]])
    xor.target = np.array([[0, 1, 1, 0]]).T
    return xor
Example #28
0
    def test_on_broker_msg_CHANNEL_HTTP_SOAP_CREATE_EDIT(self):

        no_old_name_msg = uuid4().hex
        dummy_lock = DummyLock()
        dummy_delete_channel = Dummy_delete_channel(no_old_name_msg)
        dummy_create_channel = Dummy_create_channel()

        ud = url_data.URLData()
        ud.url_sec_lock = dummy_lock
        ud._delete_channel = dummy_delete_channel
        ud._create_channel = dummy_create_channel

        old_name = uuid4().hex
        key = uuid4().hex
        value = uuid4().hex

        for _old_name in(None, old_name):
            msg = Bunch()
            msg.old_name = old_name
            msg[key] = value

            ud.on_broker_msg_CHANNEL_HTTP_SOAP_CREATE_EDIT(msg)

            if msg.old_name:
                eq_(dummy_delete_channel.msg.old_name, msg.old_name)
                eq_(dummy_delete_channel.msg[key], msg[key])
            else:
                eq_(dummy_delete_channel.msg, no_old_name_msg)

            eq_(sorted(dummy_create_channel.msg.items()), sorted(msg.items()))
            eq_(dummy_lock.enter_called, True)
Example #29
0
    def send_matches(self, request, obj, client, lawyers, form):
        """
        Sent the matches email.
        """
        logger.info('Sending project matches email')

        recipient = '"{name}" <{email}>'.format(name=client.get_full_name(), email=client.email)

        intro_content = '\r\n'.join(['<h2 style="color: #6c797c !important; font-family: \'Helvetica Neue\', Helvetica, Arial, sans-serif; font-size: 16px; font-weight: normal; line-height: 1.5em; margin: 0 0 25px; padding: 0;">{para}</h2>'.format(para=para) for para in form.cleaned_data.get('intro', '').split("\r\n") if para.strip() != ''])

        email = Bunch(
            template_name='project_matches',
            from_email=SITE_EMAIL,
            recipient_list=[recipient],
            bcc=['*****@*****.**'],
            context={
                'lawyers': lawyers,
                'to_name': client.first_name,
                'intro_content': intro_content,
                'url': reverse('dashboard:overview'),
            }
        )

        if settings.DEBUG:
            email.pop('bcc')

        send_templated_mail(**email)
Example #30
0
    def __init__(self, conn, id = None, reminder_item_etree = None):
        """
        Reminder-Item

        :param conn: Connection-Object
        """

        Bunch.__init__(self)

        self.conn = conn

        self.id = id  # integer
        self.created = None  # datetime
        self.article_id = None
        self.reminder_id = None  # integer
        self.position = None  # integer
        self.unit = None
        self.quantity = None  # float
        self.unit_price = None  # float
        self.title = None
        self.description = None
        self.total = None  # float

        if reminder_item_etree is not None:
            self.load_from_etree(reminder_item_etree)
        elif id is not None:
            self.load()
Example #31
0
 def get_response_data(self):
     return Bunch({'id': rand_int()})
Example #32
0
 def __init__(self, *args, **kwargs):
     super(CreateEdit, self).__init__()
     self.input = Bunch()
     self.input_dict = {}
Example #33
0
class CreateEdit(_BaseView):
    """ Subclasses of this class will handle the creation/updates of Zato objects.
    """
    def __init__(self, *args, **kwargs):
        super(CreateEdit, self).__init__()
        self.input = Bunch()
        self.input_dict = {}

    def __call__(self,
                 req,
                 initial_input_dict={},
                 initial_return_data={},
                 *args,
                 **kwargs):
        """ Handles the request, taking care of common things and delegating
        control to the subclass for fetching this view-specific data.
        """
        self.input_dict.clear()
        self.clear_user_message()

        try:
            super(CreateEdit, self).__call__(req, *args, **kwargs)
            self.set_input()
            self.populate_initial_input_dict(initial_input_dict)

            input_dict = {'cluster_id': self.cluster_id}
            post_id = self.req.POST.get('id')

            if post_id:
                input_dict['id'] = post_id

            input_dict.update(initial_input_dict)

            for name in chain(self.SimpleIO.input_required,
                              self.SimpleIO.input_optional):
                if name not in input_dict and name not in self.input_dict:
                    value = self.input.get(name)
                    value = self.pre_process_item(name, value)
                    if value != SKIP_VALUE:
                        input_dict[name] = value

            self.input_dict.update(input_dict)

            logger.info('Request self.input_dict %s', self.input_dict)
            logger.info('Request self.SimpleIO.input_required %s',
                        self.SimpleIO.input_required)
            logger.info('Request self.SimpleIO.input_optional %s',
                        self.SimpleIO.input_optional)
            logger.info('Request self.input %s', self.input)
            logger.info('Request self.req.GET %s', self.req.GET)
            logger.info('Request self.req.POST %s', self.req.POST)

            logger.info('Sending `%s` to `%s`', self.input_dict,
                        self.service_name)

            response = self.req.zato.client.invoke(self.service_name,
                                                   self.input_dict)

            if response.ok:
                return_data = {'message': self.success_message(response.data)}

                return_data.update(initial_return_data)

                for name in chain(self.SimpleIO.output_optional,
                                  self.SimpleIO.output_required):
                    if name not in initial_return_data:
                        value = getattr(response.data, name, None)
                        if value:
                            if isinstance(value, basestring):
                                value = value.encode('utf-8')
                            else:
                                value = str(value)
                        return_data[name] = value

                self.post_process_return_data(return_data)

                logger.info('CreateEdit data for frontend `%s`', return_data)

                return HttpResponse(dumps(return_data),
                                    content_type='application/javascript')
            else:
                msg = 'response:`{}`, details.response.details:`{}`'.format(
                    response, response.details)
                logger.error(msg)
                raise ZatoException(msg=msg)

        except Exception:
            return HttpResponseServerError(format_exc())

    def pre_process_item(self, name, value):
        return value

    def success_message(self, item):
        raise NotImplementedError('Must be implemented by a subclass')

    def post_process_return_data(self, return_data):
        return return_data

    @property
    def verb(self):
        if self.form_prefix:
            return 'updated'
        return 'created'
Example #34
0
 def __init__(self):
     super(Index, self).__init__()
     self.input = Bunch()
     self.items = []
     self.item = None
     self.clear_user_message()
    def create_configuration(self, **kwargs):
        r"""Create a configuration object (set of parameter values) based on a scenario.

    :param \**kwargs: See below.

    :Keyword arguments:
      * *application*          -- Application of the intervention. 'itn' for insecticide-treated nets (exposure on day of first feed) or 'irs' (exposure on day of first indoor rest)
      * *intervention*         -- Type of intervention (characterized by combination of effects). 'dbh' for DBH, 'ins' for insecticide, or 'none' for no intervention.
      * *coverage*             -- Proportion of mosquitoes exposed on their first feed (or indoor rest). Value between 0 and 1.
      * *dose*                 -- Dose of DBH or insecticide, which determines the efficacies of the different effects. 'low', 'medium', or 'high'
      * *num_days*             -- Run the simulation for this many days.
      * *intervention_day*     -- Introduce the intervention on this day. Must be less than num_days.
      * *transmission*         -- Transmission setting, determined by the biting rate. 'low', 'medium', or 'high'
      * *larva_mortality*      -- Shape of the relationship between larva mortality and larva population size. 'linear' or 'logistic'
      * *human_infectiousness* -- Shape of the relationship between human infectiousness to mosquitoes and human prevalence. 'linear' or 'nonlinear'
    """
        kwargs = Bunch(kwargs)
        for k in ('intervention', 'coverage', 'dose', 'num_days',
                  'intervention_day', 'transmission', 'application',
                  'larva_mortality', 'human_infectiousness'):
            if k not in kwargs:
                raise ValueError('Need to specify "{}".'.format(k))

        # use parameters object as a scaffold for our configuration object
        config = copy.deepcopy(self._parameters)
        del config.efficacy  # we will be putting efficacies under "intervention"

        # add some parameters
        if kwargs.larva_mortality not in {'linear', 'logistic'}:
            raise ValueError(
                "Arguments for 'larva_mortality' are 'linear' or 'logistic'.")
        if kwargs.human_infectiousness not in {'linear', 'nonlinear'}:
            raise ValueError(
                "Arguments for 'human_infectiousness' are 'linear' or 'nonlinear'."
            )
        config.simulation = Bunch(
            num_days=kwargs.num_days,
            intervention_day=kwargs.intervention_day,
            larva_mortality=kwargs.larva_mortality,
            human_infectiousness=kwargs.human_infectiousness)
        config.intervention = Bunch(coverage=kwargs.coverage,
                                    application=kwargs.application,
                                    efficacy=Bunch(mating=0,
                                                   egg=0,
                                                   parasite=0,
                                                   persistent_mortality=[],
                                                   immediate_mortality=0))

        # select the transmission level
        try:
            config.malaria.biting_scaling_factor = self._parameters.malaria.biting_scaling_factor[
                kwargs.transmission]
        except KeyError:
            raise ValueError(
                "Arguments for 'transmission' are 'low', 'medium', or 'high'.")

        # select index corresponding to the dose level
        try:
            index = {"low": 0, "medium": 1, "high": 2}[kwargs.dose]
        except KeyError:
            raise ValueError(
                "Arguments for 'dose' are 'low', 'medium', or 'high'.")

        # then use dose index and intervention type to set combination of efficacies
        itv = kwargs.intervention
        if itv == 'dbh':
            config.intervention.efficacy.mating = self._parameters.efficacy.mating[
                index]
            config.intervention.efficacy.egg = self._parameters.efficacy.egg[
                index]
            config.intervention.efficacy.parasite = self._parameters.efficacy.parasite[
                index]
            config.intervention.efficacy.persistent_mortality = self._parameters.efficacy.persistent_mortality[
                index]
        elif itv == 'ins':
            config.intervention.efficacy.immediate_mortality = self._parameters.efficacy.immediate_mortality[
                index]
        elif itv == 'dbh_mating':
            config.intervention.efficacy.mating = self._parameters.efficacy.mating[
                index]
        elif itv == 'dbh_egg':
            config.intervention.efficacy.egg = self._parameters.efficacy.egg[
                index]
        elif itv == 'dbh_parasite':
            config.intervention.efficacy.parasite = self._parameters.efficacy.parasite[
                index]
        elif itv == 'dbh_mortality':
            config.intervention.efficacy.persistent_mortality = self._parameters.efficacy.persistent_mortality[
                index]
        elif itv == 'none':
            pass
        else:
            raise ValueError(
                "Arguments for 'intervention' are 'dbh', 'ins', 'dbh_mating', dbh_egg', 'dbh_parasite', dbh_mortality', or 'none'."
            )

        return config
 def __init__(self, parameters_path):
     self._parameters_path = parameters_path
     with open(parameters_path) as par_file:
         self._parameters = json.load(par_file,
                                      object_hook=lambda d: Bunch(d))
 def prepareIndexers(self, indexerCount):
     config.settings.indexers = []
     for i in range(1, indexerCount + 1):
         nn = Bunch()
         nn.enabled = True
         nn.name = "newznab%d" % i
         nn.type = "newznab"
         nn.host = "http://www.newznab%d.com" % i
         nn.apikey = "apikeyindexer.com"
         nn.hitLimit = None
         nn.timeout = None
         nn.score = 0
         nn.accessType = "both"
         nn.search_ids = ["imdbid", "tvdbid", "rid"]
         config.settings.indexers.append(nn)
Example #38
0
    def get_stats(self,
                  start,
                  stop,
                  service='*',
                  n=None,
                  n_type=None,
                  needs_trends=True,
                  stats_key_prefix=None,
                  suffixes=None):
        """ Returns statistics for a given interval, as defined by 'start' and 'stop'.
        service default to '*' for all services in that period and may be set to return
        a one-element list of information regarding that particular service. Setting 'n' 
        to a positive integer will make it return only top n services.
        """
        if not stats_key_prefix:
            stats_key_prefix = self.stats_key_prefix

        stats_elems = {}
        all_services_stats = Bunch({'usage': 0, 'time': 0})

        # All mean values
        mean_all_services_list = []

        # A mean value of all the mean values (mean_all_services_list)
        mean_all_services = 0

        start = parse(start)
        stop = parse(stop)
        delta = (stop - start)

        if hasattr(delta, 'total_seconds'):
            delta_seconds = delta.total_seconds()
        else:
            delta_seconds = delta.seconds

        if not suffixes:
            suffixes = self.get_suffixes(start, stop)

        # We make several passes. First two passes are made over Redis keys, one gathers the services, if any at all,
        # and another one actually collects statistics for each service found. Next pass, a partly optional one,
        # computes trends for mean response time and service usage. Another one computes each of the service's
        # average rate and updates other attributes basing on values collected in the previous step.
        # Optionally, the last one will pick only top n elements of a given type (top mean response time
        # or top usage).

        # 1st pass
        for suffix in suffixes:
            keys = self.server.kvdb.conn.keys('{}{}:{}'.format(
                stats_key_prefix, service, suffix))
            for key in keys:
                service_name = key.replace(stats_key_prefix,
                                           '').replace(':{}'.format(suffix),
                                                       '')

                stats_elem = StatsElem(service_name)
                stats_elems[service_name] = stats_elem

                # When building statistics, we can't expect there will be data for all the time
                # elems built above so to guard against it, this is a dictionary whose keys are the
                # said elems and values are mean/usage for each elem. The values will remain
                # 0/0.0 if there is no data for the time elem, which may mean that in this
                # particular time slice the service wasn't invoked at all.
                stats_elem.expected_time_elems = OrderedDict(
                    (elem, Bunch({
                        'mean': 0,
                        'usage': 0.0
                    })) for elem in suffixes)

        # 2nd pass
        for service, stats_elem in stats_elems.items():
            for suffix in suffixes:
                key = '{}{}:{}'.format(stats_key_prefix, service, suffix)

                # We can convert all the values to floats here to ease with computing
                # all the stuff and convert them still to integers later on, when necessary.
                key_values = Bunch((
                    (name, float(value))
                    for (name,
                         value) in self.server.kvdb.conn.hgetall(key).items()))

                if key_values:

                    time = (key_values.usage * key_values.mean)
                    stats_elem.time += time

                    mean_all_services_list.append(key_values.mean)
                    all_services_stats.time += time
                    all_services_stats.usage += key_values.usage

                    stats_elem.min_resp_time = min(stats_elem.min_resp_time,
                                                   key_values.min)
                    stats_elem.max_resp_time = max(stats_elem.max_resp_time,
                                                   key_values.max)

                    for attr in ('mean', 'usage'):
                        stats_elem.expected_time_elems[suffix][
                            attr] = key_values[attr]

        mean_all_services = '{:.0f}'.format(
            sp_stats.tmean(
                mean_all_services_list)) if mean_all_services_list else 0

        # 3rd pass (partly optional)
        for stats_elem in stats_elems.values():

            stats_elem.mean_all_services = mean_all_services
            stats_elem.all_services_time = int(all_services_stats.time)
            stats_elem.all_services_usage = int(all_services_stats.usage)

            values = stats_elem.expected_time_elems.values()

            stats_elem.mean_trend_int = [int(elem.mean) for elem in values]
            stats_elem.usage_trend_int = [int(elem.usage) for elem in values]

            stats_elem.mean = float('{:.2f}'.format(
                sp_stats.tmean(stats_elem.mean_trend_int)))
            stats_elem.usage = sum(stats_elem.usage_trend_int)
            stats_elem.rate = float('{:.2f}'.format(
                sum(stats_elem.usage_trend_int) / delta_seconds))

            self.set_percent_of_all_services(all_services_stats, stats_elem)

            if needs_trends:
                stats_elem.mean_trend = ','.join(
                    str(elem) for elem in stats_elem.mean_trend_int)
                stats_elem.usage_trend = ','.join(
                    str(elem) for elem in stats_elem.usage_trend_int)

        # 4th pass (optional)
        if n:
            for stats_elem in self.yield_top_n(n, n_type, stats_elems):
                yield stats_elem

        else:
            for stats_elem in stats_elems.values():
                yield stats_elem
Example #39
0
 def add(self, name, item):
     with self.update_lock:
         self.data[name] = Bunch()
         self.data[name].config = item
         self.ns_map[name] = self.data[name].config.value
Example #40
0
 def get_response_data(self):
     return Bunch({'name': rand_string()})
    myaperturenode = TeapotApertureNode(1, 10, 18, position)
    node.addChildNode(myaperturenode, node.EXIT)
    position += node.getLength()
    n += 1

#----------------------------------------------
# Add the main bunch and lost particles bunch
#----------------------------------------------
print '\nAdding main bunch ...'
Intensity = 170e+10
m0 = mass_proton  # protons ...
N_mp = 100000
Particle_distribution_file = 'Input/INITIAL-6D.dat'

macrosize = Intensity / N_mp
bunch = Bunch()
setBunchParamsPTC(bunch)
kin_Energy = bunch.getSyncParticle().kinEnergy()
print '  Momentum: ', bunch.getSyncParticle().momentum(), 'GeV'
print '  Ekin:     ', bunch.getSyncParticle().kinEnergy(), 'GeV'
print '  Gamma:    ', bunch.getSyncParticle().gamma()
print '  Beta:     ', bunch.getSyncParticle().beta()
print '  Charge:   ', bunch.charge(), 'e'
print '  Mass:     ', bunch.mass(), 'GeV'

bunch_orbit_to_pyorbit(paramsDict["length"], kin_Energy,
                       Particle_distribution_file, bunch,
                       N_mp + 1)  #read in only first N_mp particles.
print '  Number of particles in the bunch: ', bunch.getSizeGlobal()
ParticleIdNumber().addParticleIdNumbers(
    bunch)  # Give particles unique number ids
Example #42
0
    def _subscribe_impl(self, ctx):
        """ Invoked by subclasses to subscribe callers using input pub/sub config context.
        """
        with self.lock('zato.pubsub.subscribe.%s' % (ctx.topic_name)):

            # Emit events about an upcoming subscription
            self.pubsub.emit_about_to_subscribe({
                'stage': 'sub.sk.1',
                'sub_key': ctx.sub_key
            })

            self.pubsub.emit_about_to_subscribe({
                'stage': 'init.ctx',
                'data': ctx
            })

            self.pubsub.emit_about_to_subscribe({
                'stage': 'sub.sk.2',
                'sub_key': ctx.sub_key
            })

            # Endpoint on whose behalf the subscription will be made
            endpoint = self.pubsub.get_endpoint_by_id(ctx.endpoint_id)

            # Event log
            self.pubsub.emit_in_subscribe_impl({
                'stage': 'endpoint',
                'data': endpoint,
            })

            self.pubsub.emit_about_to_subscribe({
                'stage': 'sub.sk.3',
                'sub_key': ctx.sub_key
            })

            with closing(self.odb.session()) as session:

                with session.no_autoflush:

                    # Non-WebSocket clients cannot subscribe to the same topic multiple times
                    if not ctx.ws_channel_id:

                        # Event log
                        self.pubsub.emit_in_subscribe_impl({
                            'stage':
                            'no_ctx_ws_channel_id',
                            'data':
                            ctx.ws_channel_id
                        })

                        self.pubsub.emit_about_to_subscribe({
                            'stage':
                            'sub.sk.4',
                            'sub_key':
                            ctx.sub_key
                        })

                        if has_subscription(session, ctx.cluster_id,
                                            ctx.topic.id, ctx.endpoint_id):

                            # Event log
                            self.pubsub.emit_in_subscribe_impl({
                                'stage': 'has_subscription',
                                'data': {
                                    'ctx.cluster_id': ctx.cluster_id,
                                    'ctx.topic_id': ctx.topic.id,
                                    'ctx.endpoint_id': ctx.endpoint_id,
                                }
                            })

                            raise PubSubSubscriptionExists(
                                self.cid,
                                'Endpoint `{}` is already subscribed to topic `{}`'
                                .format(endpoint.name, ctx.topic.name))

                    # Is it a WebSockets client?
                    is_wsx = bool(ctx.ws_channel_id)

                    self.pubsub.emit_about_to_subscribe({
                        'stage': 'sub.sk.5',
                        'sub_key': ctx.sub_key
                    })

                    ctx.creation_time = now = utcnow_as_ms()
                    sub_key = new_sub_key(self.endpoint_type,
                                          ctx.ext_client_id)

                    self.pubsub.emit_in_subscribe_impl({
                        'stage': 'new_sk_generated',
                        'data': {
                            'sub_key': sub_key,
                        }
                    })

                    # Event log
                    self.pubsub.emit_in_subscribe_impl({
                        'stage': 'before_add_subscription',
                        'data': {
                            'is_wsx': is_wsx,
                            'ctx.creation_time': ctx.creation_time,
                            'sub_key': sub_key,
                            'sub_sk':
                            sorted(self.pubsub.subscriptions_by_sub_key),
                        }
                    })

                    # Create a new subscription object and flush the session because the subscription's ID
                    # may be needed for the WSX subscription
                    ps_sub = add_subscription(session, ctx.cluster_id, sub_key,
                                              ctx)
                    session.flush()

                    # Event log
                    self.pubsub.emit_in_subscribe_impl({
                        'stage': 'after_add_subscription',
                        'data': {
                            'ctx.cluster_id': ctx.cluster_id,
                            'ps_sub': ps_sub.asdict(),
                            'sub_sk':
                            sorted(self.pubsub.subscriptions_by_sub_key),
                        }
                    })

                    # Common configuration for WSX and broker messages
                    sub_config = Bunch()
                    sub_config.topic_name = ctx.topic.name
                    sub_config.task_delivery_interval = ctx.topic.task_delivery_interval
                    sub_config.endpoint_name = endpoint.name
                    sub_config.endpoint_type = self.endpoint_type
                    sub_config.unsub_on_wsx_close = ctx.unsub_on_wsx_close
                    sub_config.ext_client_id = ctx.ext_client_id

                    for name in sub_broker_attrs:
                        sub_config[name] = getattr(ps_sub, name, None)

                    #
                    # At this point there may be several cases depending on whether there are already other subscriptions
                    # or messages in the topic.
                    #
                    # * If there are subscribers, then this method will not move any messages because the messages
                    #   will have been already moved to queues of other subscribers before we are called
                    #
                    # * If there are no subscribers but there are messages in the topic then this subscriber will become
                    #   the sole recipient of the messages (we don't have any intrinsic foreknowledge of when, if at all,
                    #   other subscribers can appear)
                    #
                    # * If there are no subscribers and no messages in the topic then this is a no-op
                    #

                    move_messages_to_sub_queue(session, ctx.cluster_id,
                                               ctx.topic.id, ctx.endpoint_id,
                                               ctx.sub_pattern_matched,
                                               sub_key, now)

                    # Subscription's ID is available only now, after the session was flushed
                    sub_config.id = ps_sub.id

                    # Update current server's pub/sub config
                    self.pubsub.add_subscription(sub_config)

                    if is_wsx:

                        # Event log
                        self.pubsub.emit_in_subscribe_impl({
                            'stage': 'before_wsx_sub',
                            'data': {
                                'is_wsx':
                                is_wsx,
                                'sub_sk':
                                sorted(self.pubsub.subscriptions_by_sub_key),
                            }
                        })

                        # This object persists across multiple WSX connections
                        wsx_sub = add_wsx_subscription(
                            session, ctx.cluster_id, ctx.is_internal, sub_key,
                            ctx.ext_client_id, ctx.ws_channel_id, ps_sub.id)

                        # Event log
                        self.pubsub.emit_in_subscribe_impl({
                            'stage': 'after_wsx_sub',
                            'data': {
                                'wsx_sub':
                                wsx_sub.asdict(),
                                'sub_sk':
                                sorted(self.pubsub.subscriptions_by_sub_key),
                            }
                        })

                        # This object will be transient - dropped each time a WSX client disconnects
                        self.pubsub.add_wsx_client_pubsub_keys(
                            session, ctx.sql_ws_client_id, sub_key,
                            ctx.ws_channel_name, ctx.ws_pub_client_id,
                            ctx.web_socket.get_peer_info_dict())

                        # Let the WebSocket connection object know that it should handle this particular sub_key
                        ctx.web_socket.pubsub_tool.add_sub_key(sub_key)

                    # Commit all changes
                    session.commit()

                    # Produce response
                    self.response.payload.sub_key = sub_key

                    if is_wsx:

                        # Let the pub/sub task know it can fetch any messages possibly enqueued for that subscriber,
                        # note that since this is a new subscription, it is certain that only GD messages may be available,
                        # never non-GD ones.
                        ctx.web_socket.pubsub_tool.enqueue_gd_messages_by_sub_key(
                            sub_key)

                        gd_depth, non_gd_depth = ctx.web_socket.pubsub_tool.get_queue_depth(
                            sub_key)
                        self.response.payload.queue_depth = gd_depth + non_gd_depth
                    else:

                        # TODO:
                        # This should be read from that client's delivery task instead of SQL so as to include
                        # non-GD messages too.

                        self.response.payload.queue_depth = get_queue_depth_by_sub_key(
                            session, ctx.cluster_id, sub_key, now)

                # Notify workers of a new subscription
                sub_config.action = BROKER_MSG_PUBSUB.SUBSCRIPTION_CREATE.value

                # Append information about current server which will let all workers
                # know if they should create a subscription object (if they are different) or not.
                sub_config.server_receiving_subscription_id = self.server.id
                sub_config.server_receiving_subscription_pid = self.server.pid
                sub_config.is_api_call = True

                logger_pubsub.info(
                    'Subscription created id=`%s`; t=`%s`; sk=`%s`; patt=`%s`',
                    sub_config['id'], sub_config['topic_name'],
                    sub_config['sub_key'], sub_config['sub_pattern_matched'])

                self.broker_client.publish(sub_config)
Example #43
0
class VaultConnAPI(object):
    """ An API through which connections to Vault are established and managed.
    """
    def __init__(self, config_list=None, requests_adapter=None):
        self.config = Bunch()
        self.lock = RLock()
        self.requests_adapter = requests_adapter

        for config in config_list or []:
            self.create(config)

# ################################################################################################################################

    def __getitem__(self, name):
        return self.config[name]

# ################################################################################################################################

    def get(self, name):
        return self.config.get(name)

# ################################################################################################################################

    def get_client(self, name):
        return self.config[name].client

# ################################################################################################################################

    def _ping(self, name):
        try:
            self.config[name].client.ping()
        except Exception:
            logger.warn('Could not ping Vault connection `%s`, e:`%s`', name, format_exc())
        else:
            logger.info('Ping OK, Vault connection `%s`', name)

    def ping(self, name):
        spawn(self._ping, name)

# ################################################################################################################################

    def _create(self, config):
        conn = _VaultConn(
            config.name, config.url, config.token, config.get('service_name'), config.tls_verify, config.timeout,
            config.allow_redirects, requests_adapter=self.requests_adapter)
        self.config[config.name] = conn

        if config.url != UNITTEST.VAULT_URL:
            self.ping(config.name)

# ################################################################################################################################

    def create(self, config):
        with self.lock:
            self._create(config)

# ################################################################################################################################

    def _delete(self, name):
        try:
            self.config[name].client.close()
        except Exception:
            logger.warn(format_exc())
        finally:
            del self.config[name]

# ################################################################################################################################

    def delete(self, name):
        with self.lock:
            self._delete(name)

# ################################################################################################################################

    def edit(self, new_config):
        with self.lock:
            self._delete(new_config.old_name)
            self._create(new_config)
Example #44
0
 def __init__(self, data={}):
     self.data = Bunch.fromDict(data)
     self.ns_map = {}
     self.update_lock = RLock()
Example #45
0
 def get_response_data(self):
     return Bunch()
Example #46
0
def train_eval_fn(FLAGS,
				worker_count, 
				task_index, 
				is_chief, 
				target,
				init_checkpoint,
				train_file,
				dev_file,
				checkpoint_dir,
				is_debug):

	graph = tf.Graph()
	with graph.as_default():
		import json
				
		config = json.load(open(FLAGS.config_file, "r"))

		config = Bunch(config)
		config.use_one_hot_embeddings = True
		config.scope = "bert"
		config.dropout_prob = 0.1
		config.label_type = "single_label"
		
		if FLAGS.if_shard == "0":
			train_size = FLAGS.train_size
			epoch = int(FLAGS.epoch / worker_count)
		elif FLAGS.if_shard == "1":
			train_size = int(FLAGS.train_size/worker_count)
			epoch = FLAGS.epoch

		init_lr = 2e-5

		label_dict = json.load(open(FLAGS.label_id))

		num_train_steps = int(
			train_size / FLAGS.batch_size * epoch)
		num_warmup_steps = int(num_train_steps * 0.1)

		num_storage_steps = int(train_size / FLAGS.batch_size)

		num_eval_steps = int(FLAGS.eval_size / FLAGS.batch_size)

		if is_debug == "0":
			num_storage_steps = 2
			num_eval_steps = 10
			num_train_steps = 10
		print("num_train_steps {}, num_eval_steps {}, num_storage_steps {}".format(num_train_steps, num_eval_steps, num_storage_steps))

		print(" model type {}".format(FLAGS.model_type))

		print(num_train_steps, num_warmup_steps, "=============")
		
		opt_config = Bunch({"init_lr":init_lr/worker_count, 
							"num_train_steps":num_train_steps,
							"num_warmup_steps":num_warmup_steps,
							"worker_count":worker_count,
							"opt_type":FLAGS.opt_type})

		model_io_config = Bunch({"fix_lm":False})
		
		model_io_fn = model_io.ModelIO(model_io_config)

		optimizer_fn = optimizer.Optimizer(opt_config)
		
		num_classes = FLAGS.num_classes

		model_train_fn = model_fn_builder(config, num_classes, init_checkpoint, 
												model_reuse=None, 
												load_pretrained=True,
												model_io_fn=model_io_fn,
												optimizer_fn=optimizer_fn,
												model_io_config=model_io_config, 
												opt_config=opt_config,
												exclude_scope="",
												not_storage_params=[],
												target="")
		
		model_eval_fn = model_fn_builder(config, num_classes, init_checkpoint, 
												model_reuse=True, 
												load_pretrained=True,
												model_io_fn=model_io_fn,
												optimizer_fn=optimizer_fn,
												model_io_config=model_io_config, 
												opt_config=opt_config,
												exclude_scope="",
												not_storage_params=[],
												target="")
		
		def eval_metric_fn(features, eval_op_dict):
			logits = eval_op_dict["logits"]
			print(logits.get_shape(), "===logits shape===")
			pred_label = tf.argmax(logits, axis=-1, output_type=tf.int32)
			prob = tf.nn.softmax(logits)
			accuracy = correct = tf.equal(
				tf.cast(pred_label, tf.int32),
				tf.cast(features["label_ids"], tf.int32)
			)
			accuracy = tf.reduce_mean(tf.cast(correct, tf.float32))

			return {"accuracy":accuracy, "loss":eval_op_dict["loss"], 
					"pred_label":pred_label, "label_ids":features["label_ids"]}

		def train_metric_fn(features, train_op_dict):
			logits = train_op_dict["logits"]
			print(logits.get_shape(), "===logits shape===")
			pred_label = tf.argmax(logits, axis=-1, output_type=tf.int32)
			prob = tf.nn.softmax(logits)
			accuracy = correct = tf.equal(
				tf.cast(pred_label, tf.int32),
				tf.cast(features["label_ids"], tf.int32)
			)
			accuracy = tf.reduce_mean(tf.cast(correct, tf.float32))
			return {"accuracy":accuracy, "loss":train_op_dict["loss"], 
					"train_op":train_op_dict["train_op"]}
		
		name_to_features = {
				"input_ids":
						tf.FixedLenFeature([FLAGS.max_length], tf.int64),
				"input_mask":
						tf.FixedLenFeature([FLAGS.max_length], tf.int64),
				"segment_ids":
						tf.FixedLenFeature([FLAGS.max_length], tf.int64),
				"label_ids":
						tf.FixedLenFeature([], tf.int64),
		}

		def _decode_record(record, name_to_features):
			"""Decodes a record to a TensorFlow example.
			"""
			example = tf.parse_single_example(record, name_to_features)

			# tf.Example only supports tf.int64, but the TPU only supports tf.int32.
			# So cast all int64 to int32.
			for name in list(example.keys()):
				t = example[name]
				if t.dtype == tf.int64:
					t = tf.to_int32(t)
				example[name] = t

			return example 

		params = Bunch({})
		params.epoch = FLAGS.epoch
		params.batch_size = FLAGS.batch_size

		train_features = tf_data_utils.train_input_fn(train_file,
									_decode_record, name_to_features, params, if_shard=FLAGS.if_shard,
									worker_count=worker_count,
									task_index=task_index)

		eval_features = tf_data_utils.eval_input_fn(dev_file,
									_decode_record, name_to_features, params, if_shard=FLAGS.if_shard,
									worker_count=worker_count,
									task_index=task_index)
		
		train_op_dict = model_train_fn(train_features, [], tf.estimator.ModeKeys.TRAIN)
		eval_op_dict = model_eval_fn(eval_features, [], tf.estimator.ModeKeys.EVAL)
		eval_dict = eval_metric_fn(eval_features, eval_op_dict["eval"])
		train_dict = train_metric_fn(train_features, train_op_dict["train"])
		
		def eval_fn(eval_dict, sess):
			i = 0
			total_accuracy = 0
			eval_total_dict = {}
			while True:
				try:
					eval_result = sess.run(eval_dict)
					for key in eval_result:
						if key not in eval_total_dict:
							if key in ["pred_label", "label_ids"]:
								eval_total_dict[key] = []
								eval_total_dict[key].extend(eval_result[key])
							if key in ["accuracy", "loss"]:
								eval_total_dict[key] = 0.0
								eval_total_dict[key] += eval_result[key]
						else:
							if key in ["pred_label", "label_ids"]:
								eval_total_dict[key].extend(eval_result[key])
							if key in ["accuracy", "loss"]:
								eval_total_dict[key] += eval_result[key]

					i += 1
					if np.mod(i, num_eval_steps) == 0:
						break
				except tf.errors.OutOfRangeError:
					print("End of dataset")
					break

			label_id = eval_total_dict["label_ids"]
			pred_label = eval_total_dict["pred_label"]

			result = classification_report(label_id, pred_label, 
				target_names=list(label_dict["label2id"].keys()))

			print(result, task_index)
			eval_total_dict["classification_report"] = result
			return eval_total_dict

		def train_fn(train_op_dict, sess):
			i = 0
			cnt = 0
			loss_dict = {}
			monitoring_train = []
			monitoring_eval = []
			while True:
				try:
					[train_result, step] = sess.run([train_op_dict, tf.train.get_global_step()])
					for key in train_result:
						if key == "train_op":
							continue
						else:
							if np.isnan(train_result[key]):
								print(train_loss, "get nan loss")
								break
							else:
								if key in loss_dict:
									loss_dict[key] += train_result[key]
								else:
									loss_dict[key] = train_result[key]
					
					i += 1
					cnt += 1
					
					if np.mod(i, num_storage_steps) == 0:
						string = ""
						for key in loss_dict:
							tmp = key + " " + str(loss_dict[key]/cnt) + "\t"
							string += tmp
						print(string, step)
						monitoring_train.append(loss_dict)

						eval_finial_dict = eval_fn(eval_dict, sess)
						monitoring_eval.append(eval_finial_dict)

						for key in loss_dict:
							loss_dict[key] = 0.0
						cnt = 0

					if is_debug == "0":
						if i == num_train_steps:
							break

				except tf.errors.OutOfRangeError:
					print("==Succeeded in training model==")

		print("===========begin to train============")
		sess_config = tf.ConfigProto(allow_soft_placement=False,
									log_device_placement=False)

		checkpoint_dir = checkpoint_dir if task_index == 0 else None
		print("==checkpoint_dir==", checkpoint_dir)

		print("start training")

		hooks = []
		if FLAGS.opt_type == "ps":
			sync_replicas_hook = optimizer_fn.opt.make_session_run_hook(is_chief, num_tokens=0)
			hooks.append(sync_replicas_hook)
			sess = tf.train.MonitoredTrainingSession(master=target,
												 is_chief=is_chief,
												 config=sess_config,
												 hooks=hooks,
												 checkpoint_dir=checkpoint_dir,
												 save_checkpoint_steps=num_storage_steps)
		elif FLAGS.opt_type == "pai_soar" and pai:
			sess = tf.train.MonitoredTrainingSession(master=target,
												 is_chief=is_chief,
												 config=sess_config,
												 hooks=hooks,
												 checkpoint_dir=checkpoint_dir,
												 save_checkpoint_steps=num_storage_steps)
		elif FLAGS.opt_type == "hvd" and hvd:
			bcast_hook = hvd.BroadcastGlobalVariablesHook(0)
			hooks.append(bcast_hook)
			sess_config.gpu_options.allow_growth = True
			sess_config.gpu_options.visible_device_list = str(hvd.local_rank())
			sess = tf.train.MonitoredTrainingSession(checkpoint_dir=checkpoint_dir,
												   hooks=hooks,
												   config=sess_config,
												   save_checkpoint_steps=num_storage_steps)
		else:
			print("==single sess==")
			sess = tf.train.MonitoredTrainingSession(config=sess_config,
												   hooks=hooks,
												   checkpoint_dir=checkpoint_dir,
												   save_checkpoint_steps=num_storage_steps)
						
		step = sess.run(optimizer_fn.global_step)
		print(step)
		train_fn(train_dict, sess)

		if task_index == 0:
			print("===========begin to eval============")
			eval_finial_dict = eval_fn(eval_dict, sess)
Example #47
0
def parse_options():
    # strategy: read in command line options
    # if an options file is specified, the settings in there override
    # what is chosen on the command line. the command line parser options
    # are read to parser_options, the config file reader options to config.
    # the final options are stored in cl_options.
    parser = OptionParser()
    group_input = OptionGroup(
        parser, "input file specification",
        "These files are required to run the simulation. They "
        "may either be specified via the command line arguments "
        "or in an options file.")
    group_sim = OptionGroup(
        parser, "simulation options",
        "These options modify simulation parameters such as "
        "duration, initialisation temperature, etc.")
    parser.add_option_group(group_sim)
    parser.add_option_group(group_input)

    parser.add_option(
        "-o",
        "--options",
        dest="optfile",
        type="string",
        help=
        "path and name of a file specifying input options and simulation options",
        metavar=">file<")

    parser.add_option("-p",
                      "--path",
                      dest="outpath",
                      type="string",
                      default='./',
                      help="destination path for simulation output",
                      metavar=">path<")
    group_input.add_option("-f",
                           "--forcing",
                           dest="forcing_file",
                           type="string",
                           help="location and name of the atmospheric forcing",
                           metavar=">file<")
    group_input.add_option(
        "-z",
        "--horizon",
        dest="horizon_file",
        type="string",
        help="location and name of the horizon function file",
        metavar=">file<")
    group_input.add_option(
        "-c",
        "--car",
        dest="car_file",
        type="string",
        help="location and name of the car general information file",
        metavar=">file<")
    group_input.add_option("-b",
                           "--body",
                           dest="body_file",
                           type="string",
                           help="location and name of the car body file",
                           metavar=">file<")

    group_sim.add_option("-d",
                         "--duration",
                         dest="duration",
                         type="float",
                         default=2.0,
                         help="duration of the simulation in hours",
                         metavar=">duration<")
    group_sim.add_option(
        "-T",
        "--T0",
        dest="T0",
        type="float",
        default=20.0,
        help="initialisation temperature of all components in deg. Celsius",
        metavar=">T0<")
    group_sim.add_option("--date",
                         dest="date",
                         type="string",
                         help="begin of simulation date, format: YYYY-MM-DD",
                         metavar=">date<")
    group_sim.add_option("--time",
                         dest="time",
                         type="string",
                         help="begin of simulation time, format: HH:MM",
                         metavar=">time<")
    (parser_options, args) = parser.parse_args()

    cl_options = Bunch()

    if not parser_options.optfile is None:
        if file_exists(parser_options.optfile):
            config = configparser.ConfigParser()
            config.read(parser_options.optfile)

            print(" * reading options from specified file")

            float_keys = ["duration", "t0"]

            # copy all options to cl_options structure
            for sec in config.sections():
                for key in config.options(sec):
                    if key in float_keys:
                        if key == 't0':  # need to check this since keys are only lower case
                            cl_options["T0"] = float(config.get(sec, key))
                        else:
                            cl_options[key] = float(config.get(sec, key))
                    else:
                        cl_options[key] = config.get(sec, key)
        else:
            print(" error, submitted options file not found!")
            print(" {:s}".format(cl_options.optfile))

    option_errors = ""
    if cl_options == Bunch():
        option_errors += "  please supply location and name of an options file or other required parameters\n"
    else:
        if cl_options.forcing_file is None:
            option_errors += "  please supply location and name of a forcing file\n"
        if cl_options.car_file is None:
            option_errors += "  please supply location and name of a car general information file\n"
        if cl_options.body_file is None:
            option_errors += "  please supply location and name of a car body_file\n"
        if not "horizon_file" in cl_options:  # a horizon file is no requirement. if not submitted
            cl_options.horizon_file = None  # it's just ground and sky.

    if option_errors != "":
        print("")
        print(option_errors[:-1])
        print("  try --help for more information.")
        print("")
        print("exiting...")
        sys.exit(1)

    return cl_options
Example #48
0
    def edit(self, new_config):
        with self.lock:
            self._delete(new_config.old_name)
            self._create(new_config)

# ################################################################################################################################

if __name__ == '__main__':

    name = 'abc'
    client_token = '5f763fa3-2872-71ab-4e5d-f1398aca6637'
    username = '******'
    password = '******'
    gh_token = ''

    config = Bunch()
    config.name = name
    config.url = 'http://localhost:49517'
    config.token = client_token
    config.service_name = 'my.service'
    config.tls_verify = True
    config.timeout = 20
    config.allow_redirects = True

    api = VaultConnAPI([config])

    import time
    time.sleep(0.1)

    response1 = api[name].client.authenticate(VAULT.AUTH_METHOD.TOKEN.id, client_token)
    logger.info('Response1 %s', response1)
Example #49
0
	elem = teapot.DriftTEAPOT("a drift")
	elem.setLength(lattice_length)
	elem.setnParts(n_parts)	
	teapot_lattice = teapot.TEAPOT_Lattice("teapot_lattice")
	teapot_lattice.addNode(elem)
	teapot_lattice.initialize()
	return teapot_lattice

lattice_length = 248.0    # the length of the drift
n_parts = 1  # number of parts on what the drift will be chopped, or the number of SC nodes
lattice = getLattice(lattice_length,n_parts)

#------------------------------
#Main Bunch init
#------------------------------
b = Bunch()
print "Read Bunch."
runName = "Benchmark_SpaceCharge"

total_macroSize=1.0e+14
b.mass(0.93827231)
energy = 1.0 #Gev
bunch_orbit_to_pyorbit(lattice.getLength(), energy, "Bm_KV_Uniform_10",b)
b.dumpBunch("bunch_init.dat")
b.getSyncParticle().kinEnergy(energy)
nParticlesGlobal = b.getSizeGlobal()
b.macroSize(total_macroSize/nParticlesGlobal)
print total_macroSize/nParticlesGlobal
#-----------------------------------
# Add Direct Force Space Charge node
#-----------------------------------
#tokenizer_en = tfds.features.text.SubwordTextEncoder.load_from_file(file_path.subword_vocab_path)

hyp = {
    "num_layers" : 3,                       #number of transformer blocks
    "d_model" : 256,                        #the projected word vector dimension
    "dff" : 512,                            #feed forward network hidden parameters
    "num_heads" : 8,                        #the number of heads in the multi-headed attention unit
    "dropout_rate" : 0.0,                           
    "epsilon_ls" : 0.1,                              #label_smoothing hyper parameter
    "batch_size" : 2,
    "epochs" : 2,
    "print_chks" : 50,
    "input_vocab_size" : 8096,                  # 8094 + start and end token
    "target_vocab_size" : 8096,
    "doc_length" : 51200,
    "summ_length" : 7000,
    "beam_size" : 3,
    "test_size" : 0.20,
    "copy_gen" : True,
    "decay_lr" : False,                             #decay learning rate
    "run_tensorboard" : True,
    "from_scratch" : True,
    "write_summary_op" : True
}
#print all the model hyperparameters



config = Bunch(hyp)

Example #51
0
from bunch import Bunch

PORT = 15170
LISTEN_ADDRESS = "0.0.0.0"
MAX_PACKET_SIZE = 6 * 4

VELOCITY_THRESHOLD = 1
JUMP_THRESHOLD = 5

# Usage: Movement_State.LEFT
MOVEMENT_STATE = Bunch.fromDict({
    "LEFT": "Left",
    "STILL": "Still",
    "RIGHT": "Right"
})
Example #52
0
from injection import InjectParts
from injection import JohoTransverse, SNSESpreadDist
from orbit.utils.orbit_mpi_utils import bunch_orbit_to_pyorbit, bunch_pyorbit_to_orbit
print "Start."

xmin = -0.050
xmax = 0.050
ymin = -0.050
ymax = 0.050

foilparams = (xmin, xmax, ymin, ymax)

#------------------------------
#Bunch init
#------------------------------
b = Bunch()
runName = "Test_Injection"

b.mass(0.93827231)
b.macroSize(1.0e+1)
energy = 1.0  #Gev
b.getSyncParticle().kinEnergy(energy)

lostfoilbunch = Bunch()
lostfoilbunch.addPartAttr("LostParticleAttributes")

#------------------------------
#Initial Distribution Functions
#------------------------------
sp = b.getSyncParticle()
Example #53
0
        print('error')


#test
if __name__ == '__main__':
    config_dict = {
        "data_train": "<path>/dataset/mnist/mnist.npz",
        "num_class": 10,
        "input_h": 28,
        "input_w": 28,
        "input_c": 1,
        "batch_size": 5,
        "num_iter_per_epoch": 2000000,
    }
    config_dict.update(keras_format=tf.keras.backend.image_data_format())
    config = Bunch(config_dict)
    g_config.__init(config)

    class Train():
        def __init__(self):
            self.net = gen_net()

            self.optimizer = gen_optimizer()

            self.accuracy = (
                lambda logits, labels: compute_acc(logits, labels))

            self.loss = (lambda logits, labels: compute_loss(logits, labels))

        def train_step(self, model, optimizer, images, labels):
            with tf.GradientTape() as tape:
Example #54
0
    teapot_lattice.addNode(elem)
    teapot_lattice.initialize()
    return teapot_lattice


lattice_length = 248.0  # the length of the lattice
# number of parts into which the drift will be chopped:
n_parts = 1
# also the number of SC nodes
lattice = getLattice(lattice_length, n_parts)

#------------------------------
# Bunch initialization
#------------------------------

b = Bunch()
print "Read Bunch."
runName = "Benchmark_Collimator"

total_macroSize = 1.0e+16
b.mass(consts.mass_proton)

ERef = 1.0  # Gev
print "Reference energy is 1.0 (GeV). \nPlease input desired energy:"
energy = float(raw_input())
print "energy is:", energy
bunch_orbit_to_pyorbit(lattice.getLength(), energy, "Bm_KV_Uniform_10000", b)
b.getSyncParticle().kinEnergy(energy)
nParticlesGlobal = b.getSizeGlobal()
b.macroSize(total_macroSize / nParticlesGlobal)
Example #55
0
def get_config_from_json(json_file):
    with open(json_file, 'r') as config_file:
        config_dict = json.load(config_file)
    config = Bunch(config_dict)
    return config, config_dict
Example #56
0
    def on_call_finished(self, invoked_service, response, exception):

        now = invoked_service.time.utcnow()
        cid = invoked_service.wsgi_environ['zato.request_ctx.{}'.format(
            self.request_ctx_cid_key)]
        data_key = self.data_pattern.format(cid)
        counter_key = self.counter_pattern.format(cid)
        source, req_ts_utc, on_target = invoked_service.kvdb.conn.hmget(
            data_key, 'source', 'req_ts_utc', 'on_target')

        with invoked_service.lock(self.lock_pattern.format(cid)):

            data = Bunch()
            data.cid = cid
            data.resp_ts_utc = now
            data.response = response
            data.exception = exception
            data.ok = False if exception else True
            data.source = source
            data.target = invoked_service.name
            data.req_ts_utc = req_ts_utc

            # First store our response and exception (if any)
            json_data = dumps(data)
            invoked_service.kvdb.conn.hset(data_key,
                                           invoked_service.get_name(),
                                           json_data)

            on_target = loads(on_target)

            if logger.isEnabledFor(DEBUG):
                self._log_before_callbacks('on_target', on_target,
                                           invoked_service)

            # We always invoke 'on_target' callbacks, if there are any
            self.invoke_callbacks(invoked_service, data, on_target,
                                  self.on_target_channel, cid)

            # Was it the last parallel call?
            if not invoked_service.kvdb.conn.decr(counter_key):

                # Not every subclass will need final callbacks
                if self.needs_on_final:

                    payload = invoked_service.kvdb.conn.hgetall(data_key)
                    payload['data'] = {}

                    for key in (key for key in payload.keys()
                                if key not in JSON_KEYS):
                        payload['data'][key] = loads(payload.pop(key))

                    for key in JSON_KEYS:
                        if key not in ('source', 'data', 'req_ts_utc'):
                            payload[key] = loads(payload[key])

                    on_final = payload['on_final']

                    if logger.isEnabledFor(DEBUG):
                        self._log_before_callbacks('on_final', on_final,
                                                   invoked_service)

                    self.invoke_callbacks(invoked_service, payload, on_final,
                                          self.on_final_channel, cid)

                    invoked_service.kvdb.conn.delete(counter_key)
                    invoked_service.kvdb.conn.delete(data_key)
Example #57
0
quads = accLattice.getNodesOfClass(Quad)

#--- The last quad will have zero field.
#--- we will use it in the other examples 
#--- to provide tilted exit plane.
#--- It is better to have exit from the 3D region
#--- in the free space.
quads[len(quads) - 1].setParam("dB/dr",0.)

for quad in quads:
	quad.setnParts(10)
	print "quad  =",quad.getName()," gradient[T/m] = %+6.5f "%quad.getParam("dB/dr")
print "========================================================"


bunch_ini = Bunch()
eKin = 1.3
bunch_ini.getSyncParticle().kinEnergy(eKin)

#----------------------------------------------------
# Let's calculate beta functions for 90 deg FODO
# Brho[T*m] = (10/2.998)*beta_relativistic*E[GeV]
# focusing_length[m] = Brho/(g*length_of_quad) g = [T/m] quad gradient
# cappa = 2*focusing_length/L_between_quads
# Twiss beta_max = L*cappa*(cappa + 1)/sqrt(cappa**2 - 1)
# Twiss beta_min = L*cappa*(cappa - 1)/sqrt(cappa**2 - 1)
# L = L_between_quads
#-----------------------------------------------------

L_between_quads = accLattice.getNodePositionsDict()[quads[1]][1] - accLattice.getNodePositionsDict()[quads[0]][1]
print "Distance between quads [m] = ",L_between_quads
Example #58
0
def _get_feature_info(obj, request):
    params = dict((k.upper(), v) for k, v in request.params.items())
    p_bbox = map(float, params.get('BBOX').split(','))
    p_width = int(params.get('WIDTH'))
    p_height = int(params.get('HEIGHT'))
    p_srs = params.get('SRS')
    p_info_format = params.get('INFO_FORMAT', b'text/html')

    p_x = float(params.get('X'))
    p_y = float(params.get('Y'))
    p_query_layers = params.get('QUERY_LAYERS').split(',')
    p_feature_count = int(params.get('FEATURE_COUNT', GFI_FEATURE_COUNT))

    bw = p_bbox[2] - p_bbox[0]
    bh = p_bbox[3] - p_bbox[1]

    qbox = dict(l=p_bbox[0] + bw * (p_x - GFI_RADIUS) / p_width,
                b=p_bbox[3] - bh * (p_y + GFI_RADIUS) / p_height,
                r=p_bbox[0] + bw * (p_x + GFI_RADIUS) / p_width,
                t=p_bbox[3] - bh * (p_y - GFI_RADIUS) / p_height)

    srs = SRS.filter_by(id=int(p_srs.split(':')[-1])).one()

    qgeom = geom_from_wkt(("POLYGON((%(l)f %(b)f, %(l)f %(t)f, " +
                           "%(r)f %(t)f, %(r)f %(b)f, %(l)f %(b)f))") % qbox,
                          srs.id)

    lmap = dict((lyr.keyname, lyr) for lyr in obj.layers)

    results = list()
    fcount = 0

    for lname in p_query_layers:
        layer = lmap[lname]
        flayer = layer.resource.feature_layer

        request.resource_permission(DataScope.read, layer.resource)
        request.resource_permission(DataScope.read, flayer)

        query = flayer.feature_query()
        query.intersects(qgeom)

        # Limit number of layer features so that we
        # don't overshoot its total number
        query.limit(p_feature_count - fcount)

        features = list(query())
        fcount += len(features)

        results.append(
            Bunch(keyname=layer.keyname,
                  display_name=layer.display_name,
                  feature_layer=flayer,
                  features=features))

        # Needed number of features found, stop search
        if fcount >= p_feature_count:
            break

    if p_info_format == 'application/json':
        result = [
            dict(
                keyname=result.keyname,
                display_name=result.display_name,
                features=[{
                    fld.display_name: feature.fields[fld.keyname]
                    for fld in result.feature_layer.fields
                } for feature in result.features],
            ) for result in results
        ]
        return Response(json.dumps(result, cls=geojson.Encoder),
                        content_type='application/json',
                        charset='utf-8')

    return Response(render_template(
        'nextgisweb:wmsserver/template/get_feature_info_html.mako',
        dict(results=results, resource=obj),
        request=request),
                    content_type='text/html',
                    charset='utf-8')
Example #59
0
def parse():
    parser = argparse.ArgumentParser(
        description='Convert ERAi files to ICAR input forcing files')
    parser.add_argument('start_date',
                        nargs="?",
                        action='store',
                        help="Specify starting date (yyyy-mm-dd)",
                        default="2000-10-01")
    parser.add_argument('end_date',
                        nargs="?",
                        action='store',
                        help="Specify end date (yyyy-mm-dd)",
                        default="2000-10-02")
    parser.add_argument('lat_n',
                        nargs="?",
                        action='store',
                        help="northern latitude boundary",
                        default="60")
    parser.add_argument('lat_s',
                        nargs="?",
                        action='store',
                        help="southern latitude boundary",
                        default="20")
    parser.add_argument('lon_e',
                        nargs="?",
                        action='store',
                        help="eastern longitude boundary",
                        default="-50")
    parser.add_argument('lon_w',
                        nargs="?",
                        action='store',
                        help="western longitude boundary",
                        default="-140")
    parser.add_argument('dir',
                        nargs="?",
                        action='store',
                        help="ERAi file location",
                        default="/glade/p/rda/data/ds627.0/")
    parser.add_argument('atmdir',
                        nargs="?",
                        action='store',
                        help="ERAi atmospheric data file location",
                        default="ei.oper.an.ml/_Y__M_/")
    parser.add_argument('sfcdir',
                        nargs="?",
                        action='store',
                        help="ERAi surface data file location",
                        default="ei.oper.fc.sfc/_Y__M_/")
    parser.add_argument('atmfile',
                        nargs="?",
                        action='store',
                        help="ERAi primary atmospheric file",
                        default="ei.oper.an.ml.regn128sc._Y__M__D__h_")
    parser.add_argument('atmuvfile',
                        nargs="?",
                        action='store',
                        help="ERAi U/V atm file",
                        default="ei.oper.an.ml.regn128uv._Y__M__D__h_")
    parser.add_argument('sfcfile',
                        nargs="?",
                        action='store',
                        help="ERAi surface file",
                        default="ei.oper.fc.sfc.regn128sc._Y__M__D__h_")
    parser.add_argument('temp_nc_dir',
                        nargs="?",
                        action='store',
                        help="temporary directory to store netCDF files in",
                        default="temp_nc_dir")

    parser.add_argument('-v',
                        '--version',
                        action='version',
                        version='ERAi2ICAR v' + version)
    parser.add_argument('--verbose',
                        action='store_true',
                        default=False,
                        help='verbose output',
                        dest='verbose')
    args = parser.parse_args()

    date0 = args.start_date.split("-")
    start_date = datetime.datetime(int(date0[0]), int(date0[1]), int(date0[2]))

    date0 = args.end_date.split("-")
    end_date = datetime.datetime(int(date0[0]), int(date0[1]), int(date0[2]))

    if args.temp_nc_dir[-1] != "/":
        args.temp_nc_dir += "/"

    info = Bunch(lat=[float(args.lat_s), float(args.lat_n)],
                 lon=[float(args.lon_w), float(args.lon_e)],
                 start_date=start_date,
                 end_date=end_date,
                 atmdir=args.dir + args.atmdir,
                 sfcdir=args.dir + args.sfcdir,
                 atmfile=args.atmfile,
                 uvfile=args.atmuvfile,
                 sfcfile=args.sfcfile,
                 nc_file_dir=args.temp_nc_dir,
                 version=version)

    return info
Example #60
0
    def __init__(cls, classname, bases, nmspc):

        # Table name is equal to resource id by default.
        # It'll hardlybe ever needed otherwise, but let's keep this
        # possibility.

        if '__tablename__' not in cls.__dict__:
            setattr(cls, '__tablename__', cls.identity)

        # Child class can set it's own arguments, let's
        # keep it possible. If not set, set our own.

        if '__mapper_args__' not in cls.__dict__:
            mapper_args = dict()
            setattr(cls, '__mapper_args__', mapper_args)
        else:
            mapper_args = getattr(cls, '__mapper_args__')

        if 'polymorphic_identity' not in mapper_args:
            mapper_args['polymorphic_identity'] = cls.identity

        # For Resource class this variable is not set yet.
        Resource = globals().get('Resource', None)

        if Resource and cls != Resource:

            # Field with external key is needed for child classes, pointing
            # to base resource class. May need to be created by hand, but easier to
            # create for all together.

            if 'id' not in cls.__dict__:
                idcol = db.Column('id', db.ForeignKey(Resource.id),
                                  primary_key=True)
                idcol._creation_order = Resource.id._creation_order
                setattr(cls, 'id', idcol)

            # Automatic parent link field detection may not work
            # if there are two fields with external key to resource.id.

            if 'inherit_condition' not in mapper_args:
                mapper_args['inherit_condition'] = (
                    cls.id == Resource.id)

        scope = Bunch()

        for base in cls.__mro__:
            bscope = base.__dict__.get('__scope__', None)

            if bscope is None:
                continue

            if bscope and not hasattr(bscope, '__iter__'):
                bscope = tuple((bscope, ))

            for s in bscope:
                scope[s.identity] = s

        setattr(cls, 'scope', scope)

        super(ResourceMeta, cls).__init__(classname, bases, nmspc)

        resource_registry.register(cls)