def _forward(self, state_below, mask_below=None, init_state=None, context=None): if state_below.ndim == 3: # state_below is a 3-d matrix batch_size = state_below.shape[1] n_steps = state_below.shape[0] else: raise NotImplementedError # state_below:(src_sent_len,batch_size,embsize), # mask_below:(src_sent_len,batch_size) 0-1 matrix (padding) if mask_below: inps = [state_below, mask_below] if self.with_contex: fn = self._step_forward_with_context else: fn = self._step_forward else: inps = [state_below] if self.with_contex: fn = lambda x1, x2, x3, x4, x5: self._step_forward_with_context( x1, None, x2, x3, x4, x5) else: fn = lambda x1, x2: self._step_forward(x1, None, x2) if self.with_contex: if init_state is None: init_state = T.tanh( theano.dot(context, self.W_c_init) + self.b_init) c_z = theano.dot(context, self.W_cz) c_r = theano.dot(context, self.W_cr) c_h = theano.dot(context, self.W_ch) if self.ln: c_z = ln(c_z, self.gcz + self.bcz) c_r = ln(c_r, self.gcr + self.bcr) c_h = ln(c_h, self.gch + self.bch) non_sequences = [c_z, c_r, c_h] rval, updates = theano.scan(fn, sequences=inps, outputs_info=[init_state], non_sequences=non_sequences, n_steps=n_steps) else: if init_state is None: init_state = T.alloc(numpy.float32(0.), batch_size, self.n_hids) # init_state = T.unbroadcast(T.alloc(0., batch_size, self.n_hids), 0) rval, updates = theano.scan(fn, sequences=inps, outputs_info=[init_state], n_steps=n_steps) self.output = rval # if change like this, it only return the hidden state of the last word in the sentence return self.output
def _init_postgresql(): ctx.logger.info('Init PostreSQL DATA folder...') postgresql95_setup = '/usr/pgsql-9.5/bin/postgresql95-setup' utils.sudo(command=[postgresql95_setup, 'initdb']) ctx.logger.info('Starting PostgreSQL server...') utils.systemd.enable(service_name=PS_SERVICE_NAME, append_prefix=False) utils.systemd.start(service_name=PS_SERVICE_NAME, append_prefix=False) ctx.logger.info('Setting PostgreSQL logs path...') ps_95_logs_path = "/var/lib/pgsql/9.5/data/pg_log" ps_logs_path = "/var/log/cloudify/postgresql" utils.mkdir(ps_logs_path) utils.ln(source=ps_95_logs_path, target=ps_logs_path, params='-s')
def runTest(): ln() #make dirs if not os.path.exists(settings.TempRunDir): os.makedirs(settings.TempRunDir) if not os.path.exists(join(settings.TempRunDir, 'scraper')): os.makedirs(join(settings.TempRunDir, 'scraper')) #copy/generate stuff cleanTargetDir() copyNeededFiles() createDumperDllCfg() dumperLog = os.path.abspath(settings.TempRunDir + '\\' + settings.DumperDllLogFilename) testSuiteLog = os.path.abspath(settings.TempRunDir + '\\' + settings.TestSuite2LogFilename) executeCommandInThread(settings.ManualModeXMLRPCexe, 0) #ensure stuff is started sleep(1) #don't do this too early, we copy relative, OH needs OH dir as current dir os.chdir(settings.TempRunDir) #start OH #get ini file: OHIni = [ x for x in settings.LocalFilesToCopy if x.endswith(".ini") ][0] executeCommandInThread(['OpenHoldem.exe', '/ini:~\\' + OHIni], settings.WaitSecondsBeforeStartingTests) #wait to ensure that OH is open, such that MM is in foreground ln() runTestSuite(testSuiteLog) ln() #come back to our dir as working dir os.chdir(this_scripts_path) print("Testing done. Analysing results...") expDumperLog = os.path.abspath(settings.ExpectedOutputDir + '\\' + settings.DumperDllExpectedLogFilename) expTestSuiteLog = os.path.abspath(settings.ExpectedOutputDir + '\\' + settings.TestSuite2ExpectedLogFilename) analyseOutput.analyseLogs(settings, dumperLog, expDumperLog, testSuiteLog, expTestSuiteLog) ln() #finally restore orig working dir print("restoring working dir to '%s'" % (origWorkDir)) os.chdir(origWorkDir) ln()
def _init_postgresql(): ctx.logger.info('Initializing PostreSQL DATA folder...') postgresql95_setup = join(PGSQL_USR_DIR, 'bin', 'postgresql95-setup') try: utils.sudo(command=[postgresql95_setup, 'initdb']) except Exception: ctx.logger.debug('PostreSQL DATA folder already been init...') pass ctx.logger.info('Starting PostgreSQL server...') utils.systemd.enable(service_name=SERVICE_NAME, append_prefix=False) utils.systemd.start(service_name=SERVICE_NAME, append_prefix=False) ctx.logger.info('Setting PostgreSQL logs path...') ps_95_logs_path = join(PGSQL_LIB_DIR, '9.5', 'data', 'pg_log') utils.mkdir(PGSQL_LOGS_DIR) if not os.path.isdir(ps_95_logs_path): utils.ln(source=ps_95_logs_path, target=PGSQL_LOGS_DIR, params='-s')
def _init_postgresql(): ctx.logger.info('Initializing PostreSQL DATA folder...') postgresql95_setup = '/usr/pgsql-9.5/bin/postgresql95-setup' try: utils.sudo(command=[postgresql95_setup, 'initdb']) except Exception: ctx.logger.debug('PostreSQL DATA folder already been init...') pass ctx.logger.info('Starting PostgreSQL server...') utils.systemd.enable(service_name=PS_SERVICE_NAME, append_prefix=False) utils.systemd.start(service_name=PS_SERVICE_NAME, append_prefix=False) ctx.logger.info('Setting PostgreSQL logs path...') ps_95_logs_path = "/var/lib/pgsql/9.5/data/pg_log" ps_logs_path = "/var/log/cloudify/postgresql" utils.mkdir(ps_logs_path) if not os.path.isdir(ps_95_logs_path): utils.ln(source=ps_95_logs_path, target=ps_logs_path, params='-s')
def state_with_attend(self, h1, attended, x_m=None): # attented: (src_sent_len, batch_size, src_nhids*2) _az = theano.dot(attended, self.W_cz) + self.b_z2 _hz = theano.dot(h1, self.W_hz2) if self.ln is not False: _az = ln(_az, self.g1, self.b1) _hz = ln(_hz, self.g2, self.b2) z = T.nnet.sigmoid(_az + _hz) # z: (batch_size, trg_nhids) _ar = theano.dot(attended, self.W_cr) + self.b_r2 _hr = theano.dot(h1, self.W_hr2) if self.ln is not False: _ar = ln(_ar, self.g1, self.b1) _hr = ln(_hr, self.g2, self.b2) r = T.nnet.sigmoid(_ar + _hr) # r: (batch_size, trg_nhids) # _ah: (batch_size, trg_nhids) _ah = theano.dot(attended, self.W_ch) _hh = T.dot(h1, self.W_hh2) + self.b_h2 if self.ln is not False: _ah = ln(_ah, self.g3, self.b3) _hh = ln(_hh, self.g4, self.b4) h2 = T.tanh(_ah + _hh * r) h2 = z * h1 + (1. - z) * h2 if x_m is not None: h2 = x_m[:, None] * h2 + (1. - x_m)[:, None] * h1 # h2: (batch_size, trg_nhids) return h2
def _configure_dbus(rest_venv): # link dbus-python-1.1.1-9.el7.x86_64 to the venv for `cfy status` # (module in pypi is very old) site_packages = 'lib64/python2.7/site-packages' dbus_relative_path = join(site_packages, 'dbus') dbuslib = join('/usr', dbus_relative_path) dbus_glib_bindings = join('/usr', site_packages, '_dbus_glib_bindings.so') dbus_bindings = join('/usr', site_packages, '_dbus_bindings.so') if isdir(dbuslib): dbus_venv_path = join(rest_venv, dbus_relative_path) if not islink(dbus_venv_path): utils.ln(source=dbuslib, target=dbus_venv_path, params='-sf') utils.ln(source=dbus_bindings, target=dbus_venv_path, params='-sf') if not islink(join(rest_venv, site_packages)): utils.ln(source=dbus_glib_bindings, target=join( rest_venv, site_packages), params='-sf') else: ctx.logger.warn( 'Could not find dbus install, cfy status will not work')
def _configure_dbus(rest_venv): # link dbus-python-1.1.1-9.el7.x86_64 to the venv for `cfy status` # (module in pypi is very old) site_packages = 'lib64/python2.7/site-packages' dbus_relative_path = join(site_packages, 'dbus') dbuslib = join('/usr', dbus_relative_path) dbus_glib_bindings = join('/usr', site_packages, '_dbus_glib_bindings.so') dbus_bindings = join('/usr', site_packages, '_dbus_bindings.so') if isdir(dbuslib): dbus_venv_path = join(rest_venv, dbus_relative_path) if not islink(dbus_venv_path): utils.ln(source=dbuslib, target=dbus_venv_path, params='-sf') utils.ln(source=dbus_bindings, target=dbus_venv_path, params='-sf') if not islink(join(rest_venv, site_packages)): utils.ln(source=dbus_glib_bindings, target=join(rest_venv, site_packages), params='-sf') else: ctx.logger.warn( 'Could not find dbus install, cfy status will not work')
def _step_forward(self, x_t, x_m, h_tm1): ''' x_t: input at time t x_m: mask of x_t h_tm1: previous state c_x: contex of the rnn when decoding: x_t: embedding of a word in target sentence (1, trgw_embsz) h_tm1: hidden state (batch_size, trg_nhids) result h_t: next hidden state (batch_size, trg_nhids) ''' _xz = T.dot(x_t, self.W_xz) + self.b_z _hz = T.dot(h_tm1, self.W_hz) if self.ln is not False: _xz = ln(_xz, self.g1, self.b1) _hz = ln(_hz, self.g2, self.b2) z_t = T.nnet.sigmoid(_xz + _hz) _xr = T.dot(x_t, self.W_xr) + self.b_r _hr = T.dot(h_tm1, self.W_hr) if self.ln is not False: _xr = ln(_xr, self.g1, self.b1) _hr = ln(_hr, self.g2, self.b2) r_t = T.nnet.sigmoid(_xr + _hr) _xh = T.dot(x_t, self.W_xh) + self.b_h _hh = T.dot(h_tm1, self.W_hh) if self.ln is not False: _xh = ln(_xh, self.g3, self.b3) _hh = ln(_hh, self.g4, self.b4) can_h_t = T.tanh(_xh + _hh * r_t) h_t = (1 - z_t) * h_tm1 + z_t * can_h_t # just because this, look for the reason for 6 hours, when it is a little different from training, error... #h_t = z_t * h_tm1 + (1. - z_t) * can_h_t if x_m is not None: h_t = x_m[:, None] * h_t + (1. - x_m[:, None]) * h_tm1 return h_t
##################################################### SettingNames = [ 'WaitSecondsBeforeStartingTests', 'TestcasesDir', 'ExpectedOutputDir', 'TestSuite2LogFilename', 'TestSuite2ExpectedLogFilename', 'DumperDllExpectedLogFilename', 'DumperDllLogFilename', 'OpenHoldemExeDir', 'TempRunDir', 'TestSuite2Dir', 'ManualModeXMLRPCexe', 'ManualModeXMLRPCTableMap', 'OHFilesToCopy', 'LocalFilesToCopy', 'cardsTrunkVsOld', 'cardsOldVsTrunk', 'ignoreSymbolsPrefix', 'ignoreSymbolsExact' ] ##################################################### ### loading global settings import os from utils import ln, loadSettings #print line with '-----------...' ln() #### init settings: look if passed as first arg on command line, #### or from other script as gobal or local var, use default from SettingsDefault.py otherwise import sys if __name__ == "__main__" and len(sys.argv) > 1 and not sys.argv[0].endswith(".log"): SettingsImportName = sys.argv[1] else: SettingsImportName = None settings = loadSettings(SettingsImportName, SettingNames) settings.TestcasesDir = os.path.abspath(settings.TestcasesDir) ln()