Esempio n. 1
0
    def __init__(self, name, n_actions, n_features, hidsizes, lr, ac_fn, vf_coef=0.1, seed=0):
        np.random.seed(seed)
        tf.set_random_seed(seed)

        self.logger = get_logger(name)

        self.n_actions = n_actions
        self.n_features = n_features
        self.hidsizes = hidsizes
        self.ac_fn = ac_fn
        self.lr = lr
        self.vf_coef = vf_coef

        self._build_net()

        config = tf.ConfigProto(allow_soft_placement=True, log_device_placement=False)
        config.gpu_options.allow_growth = True

        self.sess = tf.Session(config=config)
        self.sess.run(tf.global_variables_initializer())
Esempio n. 2
0
from concurrent.futures import ProcessPoolExecutor
from common import util

LOGGER = util.get_logger()
MAX_THREADS = 10
TEST_URL = 'https://lpdaac.usgs.gov/'


def _sum(*args):
    LOGGER.info(f'adding {args[0]} ...')
    return sum(*args)


def main():
    sum_inputs = [(1, 2), (3, 4)]
    all_sums = []

    with ProcessPoolExecutor() as pool:
        # the process pool creates N number of Python interp processes
        # where Ni s the number of available CPUs

        # parallel calculation by mapping
        for s in pool.map(_sum, sum_inputs):
            all_sums.append(s)

    # sum of the sums
    total = sum(all_sums)
    LOGGER.info(f'total = {total}')


if __name__ == '__main__':
with timed('loading data ...'):
    if args.model == 'cnn':
        X_train, y_train = get_data_and_labels('data/train.txt', False)
        X_test, y_test = get_data_and_labels('data/test.txt', False)
    else:
        X_train = np.loadtxt('data/hog_X_train.csv', delimiter=',')
        y_train = np.expand_dims(
            np.loadtxt('data/hog_y_train.csv', delimiter=','), -1)
        X_test = np.loadtxt('data/hog_X_test.csv', delimiter=',')
        y_test = np.expand_dims(
            np.loadtxt('data/hog_y_test.csv', delimiter=','), -1)

if args.model == 'lr':
    from baselines.logistic_regression import LogisticRegression
    logger = get_logger('lr_langevin' if args.langevin else 'lr')
    model = LogisticRegression(
        learning_rate=args.lr,
        total_epoches=args.total_epoches,
        langevin=args.langevin,
        seed=args.seed,
        logger=logger,
        X_test=X_test,
        y_test=y_test,
    )
    # from sklearn.linear_model.logistic import LogisticRegression
    # model = LogisticRegression()

elif args.model == 'svm':
    from baselines.svm import SVM
    logger = get_logger(f'svm_{args.kernel}')
Esempio n. 4
0
    #环境检测
    EnvCheck()
    
    #获取host  port
    try:
        host = sys.argv[1]
        port = int(sys.argv[2])
    except IndexError, e:
        host = '0.0.0.0'
        port = setting.TcpPort
    #addr
    g.addr = (host, port)
    #队列
    g.events_queue = Queue.Queue()
    #日志
    g.logger = util.get_logger(setting.logFileName)
    #线程锁
    g.condLock = threading.Condition()
   
    #信号处理
    regSignal()
    
    #记录pid
    logPid(setting.runPidFile)
    
    #修改进程名称
    util.setProcName("AutoDeployServer")
 
    #db记录目录变化
    daemonDoSomeThing(watchDirByDb)
    
Esempio n. 5
0
from rl.a2c.a2c import Model
from rl.a2c.runner import Runner
from rl.common import set_global_seeds

from baselines.stochastic import Model as Stochastic
from baselines.rule import Model as Rule

import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt


if __name__ == '__main__':
    set_global_seeds(args.seed)

    logger = get_logger('trading')
    logger.info(str(args))

    env = Env('train')

    # Instantiate the model objects (that creates defender_model and adversary_model)
    model = Model(
        ob_size=env.ob_size,
        act_size=env.act_size,
        learning_rate=args.lr,
        latents=args.latents,
        activation=args.activation,
        optimizer=args.optimizer,
        vf_coef=args.vf_coef,
        ent_coef=args.ent_coef,
        max_grad_norm=args.max_grad_norm
Esempio n. 6
0
    def __init__(
            self,
            name,  # name of this model
            env,  # environment
            latents,  # network hidden layer sizes
            lr=1e-5,  # learning rate
            activation='relu',  # activation function
            optimizer='adam',  # optimization function
            vf_coef=0.1,  # vf_loss weight
            ent_coef=0.01,  # ent_loss weight
            max_grad_norm=0.5):  # how frequently the logs are printed out

        sess = tf_util.get_session()

        # output to both file and console
        logger = get_logger(name)
        output = logger.info
        output(args)

        activation = tf_util.get_activation(activation)
        optimizer = tf_util.get_optimizer(optimizer)

        # lr = tf.train.polynomial_decay(
        #     learning_rate=lr,
        #     global_step=tf.train.get_or_create_global_step(),
        #     decay_steps=total_epoches,
        #     end_learning_rate=lr/10,
        # )

        ob_size = env.ob_size
        act_size = env.act_size
        n_actions = env.n_actions

        # placeholders for use
        X = tf.placeholder(tf.float32, [None, ob_size], 'observation')
        A = tf.placeholder(tf.int32, [None, act_size], 'action')
        ADV = tf.placeholder(tf.float32, [None], 'advantage')
        R = tf.placeholder(tf.float32, [None], 'reward')

        with tf.variable_scope(name):
            policy = build_policy(
                observations=X,
                act_size=act_size,
                n_actions=n_actions,
                latents=latents,
                vf_latents=latents,
                activation=activation
            )

        # Calculate the loss
        # Total loss = Policy gradient loss - entropy * entropy coefficient + Value coefficient * value loss

        # Policy loss
        neglogpac = policy.neglogp(A)
        # L = A(s,a) * -logpi(a|s)
        pg_loss = tf.reduce_mean(ADV * neglogpac)

        # Entropy is used to improve exploration by limiting the premature convergence to suboptimal policy.
        entropy = tf.reduce_mean(policy.entropy())

        # Value loss
        vf_loss = losses.mean_squared_error(tf.squeeze(policy.vf), R)

        loss = pg_loss - entropy * ent_coef + vf_loss * vf_coef

        # gradients and optimizer
        params = find_trainable_variables(name)
        grads = tf.gradients(loss, params)
        if max_grad_norm is not None:
            # Clip the gradients (normalize)
            grads, grad_norm = tf.clip_by_global_norm(grads, max_grad_norm)
        grads = list(zip(grads, params))

        # 3. Make op for one policy and value update step of A2C
        trainer = optimizer(learning_rate=lr)

        _train = trainer.apply_gradients(grads)

        # Add ops to save and restore all the variables.
        saver = tf.train.Saver(tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope=name))

        def step(obs):
            action, value = sess.run([policy.action, policy.vf], feed_dict={
                X: np.reshape(obs, (-1, ob_size))
            })
            return action, value

        def value(obs):
            return sess.run(policy.vf, feed_dict={
                X: np.reshape(obs, (-1, ob_size))
            })

        def train(obs, rewards, actions, values):
            # Here we calculate advantage A(s,a) = R + yV(s') - V(s)
            # rewards = R + yV(s')
            advs = rewards - values

            td_map = {X:obs, A:actions, ADV:advs, R:rewards}
            policy_loss, value_loss, policy_entropy, _ = sess.run(
                [pg_loss, vf_loss, entropy, _train],
                td_map
            )

            return policy_loss, value_loss, policy_entropy

        def save(save_path):
            saver.save(sess, save_path)
            print(f'Model saved to {save_path}')

        def load(load_path):
            saver.restore(sess, load_path)
            print(f'Model restored from {load_path}')

        self.train = train
        self.step = step
        self.value = value
        self.output = output
        self.save = save
        self.load = load

        tf.global_variables_initializer().run(session=sess)
Esempio n. 7
0
    def __init__(self, mosaic, mosaicConf, utilsConf):
        """
        Calculate the column density of a mosaic (applying corrections) for the following species:
        'HI'(default), and 'HISA','HI+HISA','HI+CO','HI+HISA+CO' only for CGPS/SGPS
        """
        self.survey = mosaic.survey
        self.mosaic = mosaic.mosaic
        self.species = mosaic.newspec  # specified by the user
        self.type = mosaic.type

        self.logger = get_logger(self.survey + '_' + self.mosaic + '_' + self.species + '_ColumnDensity')
        file = ''
        path = ''
        flag = ''
        sur = self.survey.lower()

        if self.species == 'HI':
            path = get_path('lustre_' + sur + '_hi_column_density')
            flag = 'HI_unabsorbed'
            if sur == 'lab':
                flag = 'HI'
        elif self.species == 'HISA':
            path = get_path('lustre_' + sur + '_hisa_column_density')
            flag = 'HISA'
        elif self.species == 'HI+HISA':
            path = get_path('lustre_' + sur + '_hi_column_density')
            flag = 'HI+HISA'
        elif self.species == 'CO':
            path = get_path('lustre_' + sur + '_co_column_density')
            flag = 'H2'
        elif self.species == 'WCO':
            path = get_path('lustre_' + sur + '_co_column_density')
            flag = 'H2'
        elif self.species == 'HI+CO':
            path = get_path('lustre_' + sur + '_hi_column_density')
            flag = 'HI+H2'
        elif self.species == 'HI+HISA+CO':
            path = get_path('lustre_' + sur + '_hi_column_density')
            flag = 'HI+HISA+H2'

        file = path + self.survey + '_' + self.mosaic + '_' + flag + '_column_density.fits'
        check_for_files([file], existence=True)

        self.logger.info("Open file and get data...")

        # Array to store results
        N = np.zeros((mosaic.ny, mosaic.nx), dtype=float)

        Ts = float(utilsConf['tspin'])  # [Excitation (or Spin) Temperature] = K (150)
        Tbg = float(utilsConf['tcmb'])  # [Cosmic Microwave Background (CMB)] = K
        if not self.species == 'WCO':
            dv = np.fabs(mosaic.dz / 1000.)  # [velocity] = km s-1
        C = float(utilsConf['c'])  # [costant] = cm-2

        # Corrections for latitude (almost negligible)
        cosdec = np.cos(np.radians(mosaic.yarray))  # [lat. correction] = rad

        if self.survey == 'Galprop':
            if self.species == 'WCO':
                # Get emission data and velocity interval
                wco = np.sum(mosaic.observation, axis=0)
                # 2 accounts for the convertion from molecules to atoms
                N = 2 * wco * float(utilsConf['xfactor']) * cosdec

        elif self.survey == 'LAB':

            # Get data
            Tb = mosaic.observation[:, :, :]
            del mosaic.observation

            # Tb must be < Ts, otherwise problems with log
            index = np.where(Tb >= Ts)
            Tb[index] = 0.999 * Ts

            # Optical depth correction
            # Without the continuum component
            cTb = np.log((Ts) / (Ts - Tb)) * Ts
            # cTb = -log(1-(Tb/(Ts-Tbg))) * Ts

            # Integrated brightness temperature over velocity (axis = 0)
            ITb = np.sum(cTb, axis=0)

            self.logger.info("Calculating NHI...")
            # Column density
            N = C * ITb * dv  # [N] = cm-2
            # Corrected column density
            N = N * cosdec

        elif self.survey == 'Dame':
            if self.species == 'WCO':
                # Get emission data and velocity interval
                wco = mosaic.observation
                del mosaic.observation

                # 2 accounts for the convertion from molecules to atoms
                N = 2 * wco * float(utilsConf['xfactor']) * cosdec

        elif self.survey == 'CGPS' or self.survey == 'SGPS':

            # Used to skip calculation (see below) - do not change it!
            flagHI, flagCO, flagHISA = True, True, True

            # If the two column density files (or one of them) already exist,
            # than add them up and skip the calculation (or skip the unnecessary one)
            if self.species == 'HI+HISA':
                path1 = get_path('lustre_' + sur + '_hi_column_density')
                file1 = path1 + self.survey + '_' + self.mosaic + '_HI_unabsorbed_column_density.fits'

                path2 = get_path('lustre_' + sur + '_hisa_column_density')
                file2 = path2 + self.survey + '_' + self.mosaic + '_HISA_column_density.fits'

                N, flagHI, flagHISA = checkToGetData(self.logger, file1, file2)

            if self.species == 'HI+CO':
                path1 = get_path('lustre_' + sur + '_hi_column_density')
                file1 = path1 + self.survey + '_' + self.mosaic + '_HI_unabsorbed_column_density.fits'

                path2 = get_path('lustre_' + sur + '_co_column_density')
                file2 = path2 + self.survey + '_' + self.mosaic + '_H2_column_density.fits'

                N, flagHI, flagCO = checkToGetData(self.logger, file1, file2)

            if self.species == 'HI+HISA+CO':
                path1 = get_path('lustre_' + sur + '_hi_column_density')
                file1 = path1 + self.survey + '_' + self.mosaic + '_HI_unabsorbed_column_density.fits'

                path2 = get_path('lustre_' + sur + '_hisa_column_density')
                file2 = path2 + self.survey + '_' + self.mosaic + '_HISA_column_density.fits'

                path3 = get_path('lustre_' + sur + '_co_column_density')
                file3 = path3 + self.survey + '_' + self.mosaic + '_H2_column_density.fits'

                N, flagHI, flagHISA, flagCO = checkToGetData(self.logger, file1, file2, file3)

            # Computing Column Density
            if self.species != 'CO':
                # Get HI emission data
                Tb = mosaic.observation[0, :, :, :]
                # Setting the negative/0-values to Tcmb
                # Tb = where( (Tb<0.) | (Tb==0.),Tbg,Tb)

                # HI Column Density
                if self.species != 'HISA' and flagHI:
                    NHI = np.zeros((mosaic.ny, mosaic.nx), dtype=float)

                    self.logger.info("Initializing parameters...")
                    self.logger.info("1) Ts = %.2f K" % Ts)
                    self.logger.info("2) dV = %.2f km/s" % dv)
                    self.logger.info("3) Tb(min) = %.2f K, Tb(max) = %.2f K" % (np.amin(Tb), np.amax(Tb)))

                    self.logger.info("Calculating NHI...")
                    # Optical depth correction
                    # With the continuum component
                    # Tfunc = (Ts-Tc)/(Ts-Tc-Tb)
                    # Without the continuum component
                    Tfunc = Ts / (Ts - Tb)

                    Tfunc[Tfunc < 1.] = 1.  # <------ TO JUSTIFY
                    cTb = np.log(Tfunc) * Ts
                    # Integrated brightness temperature over velocity (axis = 0)
                    ITb = np.sum(cTb[mosaic.zmin:mosaic.zmax, :, :], axis=0)
                    # Column density
                    NHI = C * ITb * dv  # [NHI] = cm-2
                    N = NHI * cosdec

                # HISA Column Density
                if self.species not in ['HI', 'HI+CO'] and flagHISA:
                    NHISA = np.zeros(mosaic.observation.shape, dtype=np.float)

                    # Get HI continuum data
                    pathc = get_path(sur + '_hi_continuum')
                    continuum = pathc + self.survey + '_' + self.mosaic + '_1420_MHz_I_image.fits'
                    check_for_files([continuum])
                    data, header = fits.getdata(continuum, 0, header=True)
                    data[np.isnan(data)] = 0.
                    Tc = data
                    if self.survey == 'CGPS':
                        Tc = data[0, 0, :, :]
                    if self.survey == 'SGPS':
                        Tc = data[:, :]
                    # Tc = where( (Tc<0.) | (Tc==0.),Tbg,Tc)

                    # Get HISA data
                    path_hisa_dat = get_path(sur + '_hisa_dat')
                    amplitude = path_hisa_dat + self.survey + '_' + self.mosaic + '_HISA.dat'
                    check_for_files([amplitude])
                    input = open(amplitude, 'r')
                    lines = input.readlines()

                    self.logger.info("Initializing parameters...")
                    self.logger.info("1) dv = %.2f km/s" % dv)
                    self.logger.info("2) Tb(min) = %.2f K, Tb(max) = %.2f K" % (np.amin(Tb), np.amax(Tb)))
                    self.logger.info("3) Tc(min) = %.2f K, Tc(max) = %.2f K" % (np.amin(Tc), np.amax(Tc)))
                    galax_dist = open("galaxy_" + self.mosaic + "_" + self.species + ".dat", "w")
                    string = ""
                    for line in lines:
                        na = float(line.split('\n')[0].split()[0])  # HISA region
                        nb = float(line.split('\n')[0].split()[1])  # HISA location
                        Tu = float(line.split('\n')[0].split()[2])  # Unabsorbed brightness (Tu)
                        dT = float(line.split('\n')[0].split()[3])  # Delta T (always negative)
                        # print "%.2f\t%.2f\t%.2f\t%.2f"%(na,nb,nc,nd)

                        m = np.floor(nb / 1024 / 1024)
                        ha = nb - m * 1024 * 1024
                        l = np.floor(ha / 1024)
                        k = ha - l * 1024

                        # Params
                        glon = mosaic.xarray[k]
                        glat = mosaic.yarray[l]  # <--- 160? (it's in the martin's idl code)
                        vlsr = mosaic.zarray[m] / 1000.

                        d = rotCurveMPohl(self.logger, glon, glat, vlsr)  # kpc
                        string = "%s %s %s %s\n" % (d, glon, glat, vlsr)
                        galax_dist.write(string)

                        theta = np.radians(mosaic.dy)  # rad
                        ds = d * np.tan(theta) * 1e3  # pc

                        A1 = float(utilsConf['pc2cm']) * float(utilsConf['poverk'])
                        A2 = float(utilsConf['fn']) * ds / (C * dv)
                        A = A1 * A2

                        B = Tc[l, k] + float(utilsConf['p']) * Tu

                        init = [1., 10.]

                        def equations(xx):
                            '''
                            Ts and tau functions - S.J.Gibson et al
                            (The Astrophysical Journal, 540:852-862, 2000)
                            '''
                            tt, T = xx
                            # Equation (6)
                            Tfunc = (T - B) / (T - B - dT)
                            if Tfunc < 1.:
                                Tfunc = 1.  # Tbg # <------ TO JUSTIFY
                            f1 = np.log(Tfunc) - tt
                            # Equation (9)
                            ttfunc = A / tt
                            if ttfunc < 0.:
                                ttfunc = 0.  # <------ TO JUSTIFY
                            f2 = np.sqrt(ttfunc) - T

                            return np.array([f1, f2], dtype=float)

                        (tau, Ts), infodict, ier, mesg = fsolve(equations, init, full_output=1)
                        # plotFunc(tau,Ts)
                        TsMin = Tbg
                        if Ts < TsMin:
                            Ts = TsMin
                            Tfunc = (Ts - B) / (Ts - B - dT)
                            tau = np.log(Tfunc)
                        TsMax = Tc[l, k] + (dT + Tu) + (float(utilsConf['p']) - 1.) * Tu
                        if Ts > TsMax:
                            # For Ts = TsMax, tau --> +oo
                            Ts = TsMax
                            tau = 1e4
                        if tau < 0.:
                            Ts = 0.
                            tau = np.log(B / (B + dT))

                        # print Ts,tau
                        solution = False
                        if ier == 1:
                            solution = True
                            NHISA[0, m, l, k] = tau * Ts * dv * C
                            NHISA[0, m, l, k] = NHISA[0, m, l, k] * np.cos(np.radians(mosaic.yarray[l]))
                        if not solution:
                            # print "Could not find a valid solution:"
                            # print mesg
                            NHISA[0, m, l, k] = 0.

                    galax_dist.close()

                    self.logger.info("Calculating NHI...")
                    # Corrected column density
                    N = N + np.sum(NHISA[0, mosaic.zmin:mosaic.zmax, :, :], axis=0)

            if self.species in ['CO', 'HI+CO', 'HI+HISA+CO'] and flagCO:
                # Get CO emission data
                pathco = get_path('lustre_' + sur + '_co')
                co = pathco + self.survey + '_' + self.mosaic + '_CO_line.fits'
                check_for_files([co])
                Wco, header = fits.getdata(co, 0, header=True)
                # 2 accounts for the convertion from molecules to atoms
                N = N + 2 * Wco * float(utilsConf['xfactor']) * cosdec

            if self.species == 'CO':
                kmin = 30
                kmax = 230

                # Get emission data and velocity interval
                Tb = mosaic.observation[0, :, :, :]
                wco = np.zeros((mosaic.ny, mosaic.nx), dtype=np.float)
                wco = np.sum(Tb[kmin:kmax, :, :], axis=0) * dv

                # 2 accounts for the convertion from molecules to atoms
                N = 2 * wco * float(utilsConf['xfactor']) * cosdec

        # Store results
        newheader = fits.Header()
        newheader["ctype1"] = ("GLON-CAR", "Coordinate type")
        newheader["crval1"] = (mosaic.keyword["crval1"], "Galactic longitude of reference pixel")
        newheader["crpix1"] = (mosaic.keyword["crpix1"], "Reference pixel of lon")
        newheader["cdelt1"] = (mosaic.keyword["cdelt1"], "Longitude increment")
        newheader["crota1"] = (mosaic.keyword["crota1"], "Longitude rotation")
        newheader["cunit1"] = ("deg", "Unit type")

        newheader["ctype2"] = ("GLAT-CAR", "Coordinate type")
        newheader["crval2"] = (mosaic.keyword["crval2"], "Galactic latitude of reference pixel")
        newheader["crpix2"] = (mosaic.keyword["crpix2"], "Reference pixel of lat")
        newheader["cdelt2"] = (mosaic.keyword["cdelt2"], "Latitude increment")
        newheader["crota2"] = (mosaic.keyword["crota2"], "Latitude rotation")
        newheader["cunit2"] = ("deg", "Unit type")

        newheader["bunit"] = ("atoms cm-2", "Map units")
        newheader["datamin"] = ("%e" % np.amin(N), "Min value")
        newheader["datamax"] = ("%e" % np.amax(N), "Max value")
        newheader["object"] = ("Mosaic " + self.mosaic, self.survey + " Mosaic")

        results = fits.PrimaryHDU(N, newheader)

        # Output file
        self.logger.info('Write data to a fits file in...')
        results.writeto(file, output_verify='fix')
        self.logger.info('{}'.format(path))
        self.logger.info('Done')