def eval_bond_coul2nd(self, occm_ch, nneocc_ch_trial): logger.debug("now determining charged 2nd neighbour's") sec_nneocc_ch_trial = SupportFunctions.sec_neigh(self.args.n, self.args.m, occm_ch, nneocc_ch_trial, self.icind) icsec_ch_trial = SupportFunctions.totnoneigh2(self.args.n, self.args.m, sec_nneocc_ch_trial, True) ecoulrep_sec = self.args.beta_sec_coul * icsec_ch_trial logger.debug("2nd neighbour coloumbic energy: {0}".format(ecoulrep_sec)) return ecoulrep_sec
def LoadData(self, fromBinaryFile = True, filtering = False, UseExternalGeoDataNearestest = True, UseExternalGeoDataPlaces = True): if fromBinaryFile: dataTogether = np.load(self.commonPath + "dump_allData.npy") self.Labels = np.load(self.commonPath + "dump_labels.npy") self.LabelsN = sp.LabelsSimpleFromLabels(self.Labels) self.TrainData = dataTogether[0:len(self.Labels)] self.TestData = dataTogether[len(self.Labels):len(dataTogether)] print("data loaded from file." ) else: trainDataRaw, labels = self.LoadTrainDataAndLabels() #(firstRows = 5000) testDataRaw = self.LoadTestData() #(firstRows = 5000) self.Labels = self.LabelsToNums(labels[:, 0]) self.LabelsN = sp.LabelsSimpleFromLabels(self.Labels) dataTogether = np.concatenate((trainDataRaw, testDataRaw)) dataTogether = self.DataNormalization(dataTogether) print("data normilized." ) np.save(self.commonPath + "dump_allData", dataTogether) np.save(self.commonPath + "dump_labels", self.Labels) self.TrainData = dataTogether[0:len(trainDataRaw)] self.TestData = dataTogether[len(trainDataRaw): len(dataTogether)] print("data saved to file." ) if UseExternalGeoDataNearestest: nearestData = self.LoadGeoNearestData() self.TrainData = np.c_[self.TrainData, nearestData[0:len(self.Labels)]] self.TestData = np.c_[self.TestData, nearestData[len(self.Labels):len(dataTogether)]] if UseExternalGeoDataPlaces: self.GeoData = self.GeoDataPlaces() self.TrainData = np.c_[self.TrainData, self.GeoData[0:len(self.Labels)]] self.TestData = np.c_[self.TestData, self.GeoData[len(self.Labels):len(dataTogether)]] '''self.TrainDataHalf1 = self.TrainData[::2] self.TrainDataHalf2 = self.TrainData[1::2] self.LabelsDataHalf1 = self.Labels[::2] self.LabelsDataHalf2 = self.Labels[1::2] self.LabelsNHalf1 = self.LabelsN[::2] self.LabelsNHalf2 = self.LabelsN[1::2]''' self.TrainDataHalf1, self.TrainDataHalf2, self.LabelsDataHalf1, self.LabelsDataHalf2, self.LabelsNHalf1, self.LabelsNHalf2 = sp.SplitTrainDataByWeeks(self.TrainData, self.Labels, self.LabelsN)
def eval_bond_coul(self, occm_trial, nneocc_trial): logger.debug("charging effects are active") occm_ch, ntot_ch = SupportFunctions.identify(self.args.n, self.args.m, occm_trial, nneocc_trial, self.args.n_thresh_ch) nneocc_scr_trial = SupportFunctions.occneigh2(self.args.n, self.args.m, occm_ch, occm_trial, self.icind) ictot_scr_trial = SupportFunctions.totnoneigh2(self.args.n, self.args.m, nneocc_scr_trial, False) logger.debug('evaluating neights based off of coloumbic interaction') escreen = -self.args.beta_scr * (ictot_scr_trial - ntot_ch * self.args.n_thresh_ch) logger.debug("screnmol, energy: {0} {1} {2} ".format(ntot_ch, ictot_scr_trial, escreen)) logger.debug("now determines charged 1st nn's") nneocc_ch_trial = SupportFunctions.occneigh2(self.args.n, self.args.m, occm_ch, occm_ch, self.icind) logger.debug('computing number of charge bonds and such coulombic energy') ictot_ch_trial = SupportFunctions.totnoneigh2(self.args.n, self.args.m, nneocc_ch_trial, True) ecoulrep = self.args.beta_coul * ictot_ch_trial return ecoulrep, escreen, occm_ch, nneocc_ch_trial, ntot_ch, ictot_ch_trial
def setup(self): if self.args.brestart: self.occm = numpy.zeros([self.args.n * self.args.m], dtype=numpy.bool) ifile = open(self.args.inp_filenm, 'r') self.etot = 0.0 self.args.n = numpy.load(ifile) self.args.m = numpy.load(ifile) self.ntotmol = numpy.load(ifile) self.occm = numpy.load(ifile) self.etot = numpy.load(ifile) ifile.close() # a dictionary to store energy values in # determines all positions and indices vectors self.allpos, self.allind, self.ic, self.pos = SupportFunctions.allpositions(self.args.n, self.args.m, self.args.b) #determines the index file of all neighbours self.allneigh = numpy.zeros([self.args.n*self.args.m, 6, 2], dtype=numpy.int) self.icind = numpy.zeros([self.args.n*self.args.m, 6], dtype=numpy.int) self.allneigh, self.icind = SupportFunctions.allneighbors(self.allind, self.ic, self.args.n, self.args.m) if not self.args.brestart: self.occm, self.ntotmol = SupportFunctions.randdistr(self.args.n, self.args.m, self.args.cov) #colours the molecules (occupied sites) #determines their neighbours self.nneocc = SupportFunctions.occneigh(self.args.n, self.args.m, self.occm, self.icind) # counts the bonds (shared edges) self.ictot = SupportFunctions.totnoneigh2(self.args.n, self.args.m, self.nneocc, True) # total energy self.evdW = -1.0*self.args.beta*self.ictot # total energy for the moment just vdW if not self.args.brestart: self.etot = self.evdW # logger.info('Temperature: {0}'.format(self.args.kt)) logger.info('INITIAL energy/mol: {0}'.format(self.etot/float(self.ntotmol))) # makes a little metropolis animation self.entropy = 0.0 self.escreen = 0.0 self.ecoulrep = 0.0 self.ecoulrep_sec = 0.0 self.jfrom = 0 self.jto = 0 self.icmov = 0 self.cluster_type = 0
def WriteChockValues(mChock, vNameSector, mChockName, sDirectoryOutput, nYear, nSectors): vDataSheet = [] vSheetName = [] vRowsLabel = [] vColsLabel = [] vUseHeader = [] vNameRowsReg = vNameSector vNameColsReg = mChockName vDataSheet.append(mChock) vSheetName.append('ChockBase') vRowsLabel.append(vNameRowsReg) vColsLabel.append(vNameColsReg) vUseHeader.append(True) sFileSheet = 'Chock_Base_' + str(nYear) + '_' + str(nSectors) + '.xlsx' Support.write_data_excel(sDirectoryOutput, sFileSheet, vSheetName, vDataSheet, vRowsLabel, vColsLabel, vUseHeader) return
def WriteMultipliers(tTuplesIa, tTuplesIb, tTuplesII, vNameSector, sDirectoryOutput, nYear, nSectors): vDataSheet = [] vSheetName = [] vRowsLabel = [] vColsLabel = [] vUseHeader = [] vNameCols = [ 'VA_I', 'VA_II', 'Occup_I', 'Occup_II', 'Remun_I', 'Remun_II', 'EOB_I', 'EOB_II', 'Salary_I', 'Salary_II' ] vNameMult = ['Multiplier', 'Gerador', 'Coef'] xI = np.concatenate(tTuplesIa, axis=1) vDataSheet.append(xI) vSheetName.append(vNameMult[0]) vRowsLabel.append(vNameSector) vColsLabel.append(vNameCols) vUseHeader.append(True) xI = np.concatenate(tTuplesIb, axis=1) vDataSheet.append(xI) vSheetName.append(vNameMult[1]) vRowsLabel.append(vNameSector) vColsLabel.append(vNameCols) vUseHeader.append(True) vNameCols = ['VA', 'Occup', 'Remun', 'EOB', 'Salary'] xI = np.concatenate(tTuplesII, axis=1) vDataSheet.append(xI) vSheetName.append(vNameMult[2]) vRowsLabel.append(vNameSector) vColsLabel.append(vNameCols) vUseHeader.append(True) sFileSheet = 'Multip_' + str(nYear) + '_' + str(nSectors) + '.xlsx' Support.write_data_excel(sDirectoryOutput, sFileSheet, vSheetName, vDataSheet, vRowsLabel, vColsLabel, vUseHeader) return
def DataNormalization(self, data): #streets = sp.Streets(data[:, 3]) corner = sp.IfCorner(data[:, 3]) coordinates = sp.Coordinates(data[:, 4:6]) severalActionsInSameTime = sp.SeveralActionsInSameTime(coordinates, data[:, 0]) streetsTypes = sp.StreetType(data[:, 3]) dateTime = self.DateTimeNormalization(data[:, 0]) distriction = sp.PDDistriction(data[:, 2]) weekDaysBinarized = sp.WeekDaysBinarization(data[:, 1]) dataNormalized = np.c_[dateTime, dataNormalized, weekDaysBinarized, coordinates, distriction, streetsTypes, severalActionsInSameTime, corner] return dataNormalized
def iteration(self, idx): logger.debug('conducting interation') logger.debug('generating trial move') occm_trial, jfrom, jto = SupportFunctions.trialmove(self.args.n, self.args.m, self.occm) logger.debug("evaluating vDW bonds") evdW_trial, ictot_trial, nneocc_trial = self.eval_bond_vdw(occm_trial) escreen = 0 # these are initlaised here instead of further down (if self.args.bbcharg is False) ecoulrep = 0 ecoulrep_sec = 0 if self.args.bbcharg: ecoulrep, escreen, occm_ch, nneocc_ch_trial, ntot_ch, ictot_ch_trial = self.eval_bond_coul(occm_trial, nneocc_trial) logger.debug("1st neighbour coloumbic energy: {0}".format(ecoulrep)) if self.args.bbtwoch: ecoulrep_sec = self.eval_bond_coul2nd(occm_ch, nneocc_ch_trial) logger.debug("2nd neighbour coloumbic energy: {0}".format(ecoulrep_sec)) else: logger.debug("proceeding without bbtwoch") logger.debug("ecoulmol, energy: {0} {1} {2}".format(ntot_ch, ictot_ch_trial, ecoulrep)) else: logger.debug("proceeding without charges") etot_trial = evdW_trial + escreen + ecoulrep + ecoulrep_sec delta_e = etot_trial - self.etot ee = numpy.exp(-delta_e / self.args.kt) random_prob = numpy.random.rand() logger.debug("ee: {0}, random_prob: {1}, old cluster_type: {2}".format(ee, random_prob, self.cluster_type)) if ee > random_prob: # accepts the trial move with prob. ee logger.debug("trial move accepted!") self.occm = occm_trial.copy() # and makes trial move the curent layout self.nneocc = nneocc_trial.copy() self.ictot = ictot_trial self.etot = etot_trial self.evdW = evdW_trial self.escreen = escreen self.ecoulrep = ecoulrep self.ecoulrep_sec = ecoulrep_sec self.entropy, self.cluster_type = entropy.evaluate_entropy(occm=self.occm, ic=self.ic, ictot=self.ictot, allneigh=self.allneigh, args=self.args) logger.debug("entropy: {0}, cluster_type: {1}".format(self.entropy, self.cluster_type)) logger.debug("move ok, energy per molecule (eV) ({0}, {1})".format(idx, self.args.hartree * self.etot / float(self.ntotmol))) self.icmov += 1 logger.debug("evaluating entropy") return True else: logger.debug("trial move rejected") return False
nBeginModel = time.perf_counter() sTimeBeginModel = time.localtime() print( "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++" ) print("Running Model for ", nSectors, "sectors") print("Begin at ", time.strftime("%d/%b/%Y - %H:%M:%S", sTimeBeginModel)) print( "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++" ) if lRelativAbsolut: nAdjustUni = 1 else: nAdjustUni = 0 vCodGrupSector, vNameGrupSector = Support.load_GrupSector( sDirectoryInput, "SetoresGrupo.xlsx", nGrupSectors, nSectors) mDemandShock = Support.load_DemandShock(sDirectoryInput, sFileShock, "Demanda", nSectors, nAdjustUni) vLaborRestriction = Support.load_OfferShock(sDirectoryInput, sFileShock, "Oferta", nSectors, nAdjustUni) # # vCodStates, vNameRegions, vNameStates, vShortNameStates = Support.load_table_states(sDirectoryInput, sFileStates, nStates) # ============================================================================================ # Import values from National MIP from guilhoto # ============================================================================================ sTimeIntermediate = time.localtime() print(time.strftime("%d/%b/%Y - %H:%M:%S", sTimeIntermediate), " - Reading National Mip Matrix") print( "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++" )
"+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++" ) print("Running Model by year ", nYear, " for ", nProducts, "products x ", nSectors, "sectors ") print("Begin at ", time.strftime("%d/%b/%Y - %H:%M:%S", sTimeBeginModel)) print( "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++" ) # ============================================================================================ # Import values from TRUs # ============================================================================================ vCodProduct, vNameProduct, vCodSector, vNameSector, mIntermConsumNat = Support.load_intermediate_consumption \ (sDirectoryInput, sFileUses, sSheetIntermedConsum, nProducts, nSectors) mDemandNat, vNameDemand = Support.load_demand(sDirectoryInput, sFileUses, sSheetDemand, nProducts, nColsDemand) mAddedValueNat, vNameAddedValue = Support.load_gross_added_value \ (sDirectoryInput, sFileUses, sSheetAddedValue, nSectors, nRowsAV) mOfferNat, vNameOffer = Support.load_offer(sDirectoryInput, sFileResources, sSheetOffer, nProducts, nColsOffer) mProductionNat = Support.load_production(sDirectoryInput, sFileResources, sSheetProduction, nProducts, nSectors) vImportNat = Support.load_import(sDirectoryInput, sFileResources, sSheetImport, nProducts)
#debugPrinter.addMessageToDebugOutput(nextUrlLink+"\n"); try: # add test for ascii characters only openFileHandle = urlopen(nextUrlLink); lutron_soup = Bsoup(openFileHandle.read()); # create a new instance of a class to handle the processing of the soup except Exception as expt: print "the \"try\" failed"; finally: openFileHandle.close(); if lutron_soup != "": bSoupProcessor = LFDM_bsoupProcessor.LutronSoupProcessor(lutron_soup,debugPrinter.addMessageToDebugOutput); allPostTokens.extend(bSoupProcessor.getTokenizedTextOfPosts()); filteredHrefList = SupportFunctions.filteringHrefLists(bSoupProcessor.getAllHyperlinks()); fullHrefList = SupportFunctions.returnListWithLeadingStringInEachEntry(filteredHrefList,baseSearchUrl); urlLinksVisited.append(nextUrlLink); unvisitedHrefList = [x for x in fullHrefList if x not in urlLinksVisited]; pendingUrlLinks = list(set(pendingUrlLinks+unvisitedHrefList)); print 'end reached of one url' print len(urlLinksVisited); print len(unvisitedHrefList); print len(pendingUrlLinks); if (len(urlLinksVisited) > 10): moreToGo = False; elif (len(pendingUrlLinks)>0): nextUrlLink = pendingUrlLinks.pop(0); else:
def Equilibrium(nCountries, nSectors, nTradebleSectors, nSectorsLabor, nYears, nBeta, nValIntertemp, nPositionBR, mInitialMigration, mInitialLaborStock, vFactor, nMaxIterations, nTolerance, mInitialY, nAdjust, mCsiBrasil, isNormal, sDirectoryInputScenario, sDirectoryOutput, sNameScenario): mY = mInitialY # Loading trade flows in files txt # B - Share of value added # GO - Gross Output # IO - Input Output Matrix # T (Thetas)- dispersion of productivity - non-tradables = 8.22 # Need Checks that dispersion of productivity lRead = ['B', 'Comercio', 'Csi_total', "GO", 'IO', 'Tarifas', 'T'] mShareVA, mTrade, Csi_total, mGrossOutputOrigin, mIO, mTariffs, mThetasOrigin\ = Support.read_data_txt(lRead, sDirectoryInputScenario) # ============================================================================================ # Loading data from prior run from csv files if isNormal: lRead = ['w_aux', 'wbr_aux'] else: lRead = ['w_aux_C', 'wbr_aux_C'] w_aux, wbr_aux = Support.read_data_csv(lRead, sDirectoryOutput) mTau = 1 + mTariffs / 100 nIteration = 1 Ymax = 1 while (nIteration <= nMaxIterations) and (Ymax > nTolerance): nBeginInteration = time.perf_counter() nTimeBeginInteration = time.localtime() print("Running ", sNameScenario, "int = ", nIteration) mGrowthLabor, mMigration = \ Labor(mY, nCountries, nSectors, nSectorsLabor, nYears, nBeta, mInitialMigration, mInitialLaborStock) mGrossOutput = np.copy(mGrossOutputOrigin) mThetas = mThetasOrigin # dispersion of productivity - non-tradables = 8.22 # Need Checks that dispersion of productivity mThetas = np.hstack((1. / mThetas, np.ones([(nSectors - nTradebleSectors)], dtype=float) * 1 / 8.22)).reshape(nSectors, 1) # reformatting theta vector mLinearThetas = np.ones([nSectors * nCountries, 1], dtype=float) for j in range(nSectors): for n in range(nCountries): mLinearThetas[j * nCountries + n, :] = mThetas[j] # Calculating expenditures mTauPrev = mTau[0:nSectors*nCountries ,:] mTauActual= mTau[nSectors*nCountries:2*nSectors*nCountries ,:] xbilat = np.vstack((mTrade, np.zeros([(nSectors - nTradebleSectors) * nCountries, nCountries]))) * mTauPrev # Domestic sales x = np.zeros([nSectors, nCountries]) xbilat_domestic = xbilat / mTauPrev for i in range(nSectors): # Computing sum of partial columns (0 a 30, 31 sectors) of exports # Adding as rows x[i, :] = sum(xbilat_domestic[i * nCountries: (i + 1) * nCountries, :]) # Checking MAX between Exports and Domestic Product mGrossOutput = np.maximum(mGrossOutput, x) domsales = mGrossOutput - x # Bilateral trade matrix domsales_aux = domsales.T aux2 = np.zeros([nSectors * nCountries, nCountries], dtype=float) for i in range(nSectors): aux2[i * nCountries: ((i + 1) * nCountries), :] = np.diag(domsales_aux[:, i]) xbilat = aux2 + xbilat # Calculating Expenditures shares A = sum(xbilat.T) XO = np.zeros([nSectors, nCountries]) for j in range(nSectors): XO[j, :] = A[j * nCountries: (j + 1) * nCountries] # Calculating expenditures shares Xjn = sum(xbilat.T).T.reshape(nSectors * nCountries, 1).dot(np.ones([1, nCountries], dtype=float)) Din = xbilat / Xjn # Calculating superavits xbilattau = xbilat / mTauPrev M = np.zeros([nSectors, nCountries]) E = np.zeros([nSectors, nCountries]) for j in range(nSectors): # Imports M[j, :] = sum(xbilattau[j * nCountries: (j + 1) * nCountries, :].T).T for n in range(nCountries): # Exports E[j, n] = sum(xbilattau[j * nCountries: (j + 1) * nCountries, n]).T Sn = (sum(E).T - sum(M).T).reshape(nCountries, 1) # Calculating Value Added VAjn = mGrossOutput * mShareVA VAn = sum(VAjn).T.reshape(nCountries, 1) VA_Br = np.ones([nSectors, 1], dtype= float) for j in range(nSectors): VA_Br[j, 0] = VAjn[j, nPositionBR] Csi_teste = Csi_total Cap = VAjn * Csi_teste rem_cap = sum(Cap).T.reshape(nCountries, 1) Qui = sum(rem_cap) mIota = (rem_cap - Sn) / Qui num = np.zeros([nSectors, nCountries]) for n in range(nCountries): num[:, n] = XO[:, n] - mIO[n * nSectors:(n + 1) * nSectors, :].dot((1 - mShareVA[:, n]) * E[:, n]) F = np.zeros([nSectors, nCountries]) for j in range(nSectors): F[j, :] = sum((Din[j * nCountries: (j + 1) * nCountries:1, :] / mTauPrev[j * nCountries: (j + 1) * nCountries:1, :]).T) mAlphas = num / (np.ones([nSectors, 1], dtype=float)).dot((VAn + sum(XO * (1 - F)).T.reshape(nCountries, 1) - Sn).T) for j in range(nSectors): for n in range(nCountries): if mAlphas[j, n] < 0: mAlphas[j, n] = 0 mAlphas = mAlphas / np.ones([nSectors, 1]).dot(sum(mAlphas).reshape(1, nCountries)) ############################## # Main program conterfactuals ############################## VAn = VAn / 100 Sn = Sn / 100 VA_Br = VA_Br / 100 VABrasil = np.ones([nYears, nSectors], dtype=float) w_Brasil = np.ones([nYears, nSectors], dtype=float) P_Brasil = np.ones([nYears, nSectors], dtype=float) PBr = np.ones([nYears, 1], dtype=float) xbilat_total = np.zeros([nYears * nSectors * nCountries, nCountries], dtype=float) mGrossOutputTotal = np.zeros([nYears * nSectors, nCountries], dtype=float) mAllPrice = np.zeros([nYears * nSectors, nCountries], dtype=float) # ============================================================================================ # Routine that repeat for nYears years # ============================================================================================ for nActualYear in range(nYears): LG = np.ones([nSectors, 1], dtype=float) for j in range(nSectors): LG[j, 0] = mGrowthLabor[nActualYear, j + 1] if (nActualYear>0): mTauPrev = mTau[nActualYear * nSectors * nCountries : (nActualYear+1) * nSectors * nCountries, :] mTauActual = mTau[(nActualYear+1) * nSectors * nCountries : (nActualYear+2)* nSectors * nCountries, :] mTauHat = mTauActual / mTauPrev mWages, mPriceFactor, PQ, mWeightedTariffs, mTradeShare, ZW, Snp2, mCost, DP, PF, mWagesBrasil \ = equilibrium_LC(mTauHat, mTauActual, mAlphas, mLinearThetas, mThetas, mShareVA, mIO, Din, nSectors, nCountries, nMaxIterations, nTolerance, VAn, Sn, vFactor, LG, VA_Br, nBeta, nPositionBR, nActualYear, w_aux, wbr_aux, mCsiBrasil, Csi_teste, mIota) w_aux = np.ones([nCountries, nYears], dtype=float) wbr_aux = np.ones([nSectors, nYears], dtype=float) for n in range(nCountries): w_aux[n, nActualYear] = mWages[n, 0] for j in range(nSectors): wbr_aux[j, nActualYear] = mWagesBrasil[j, 0] PQ_vec = PQ.T.reshape(nSectors * nCountries, 1, order='F').copy() # expenditures Xji in long vector: PQ_vec=(X11 X12 X13...)' Dinp_om = mTradeShare / mTauActual xbilattau = (PQ_vec.dot(np.ones((1, nCountries)))) * Dinp_om xbilatp = xbilattau * mTauActual for j in range(nSectors): mGrossOutput[j, :] = sum(xbilattau[j * nCountries: (j + 1) * nCountries, :]) VAjn = mGrossOutput * mShareVA VAn = sum(VAjn).T.reshape(nCountries, 1) # dif no VA_Br 2/5/2018 00:51 VA_Br = VAjn[:, nPositionBR].reshape(nSectors, 1) Din = mTradeShare for j in range(nSectors): VABrasil[nActualYear, j] = VA_Br[j, 0] w_Brasil[nActualYear, j] = mWagesBrasil[j, 0] P_Brasil[nActualYear, j] = mPriceFactor[j, nPositionBR] # pf0_all = mPriceFactor. / (mAlphas); # P = prod(pf0_all. ^ (mAlphas)); # PBr(nActualYear, 1) = P(1, nPositionBR); # pf0_all = mPriceFactor./(mAlphas); P = np.prod(mPriceFactor ** mAlphas, axis=0) PBr[nActualYear, 0] = P[nPositionBR] # xbilatp_old = xbilatp.copy() # for j in range(nSectors): # for n in range(nCountries): # xbilatp_old[n + j * nCountries, n] = 0 sidx = np.arange(nSectors) cidx = np.arange(nCountries) xbilatp[cidx + sidx[:,None] * nCountries, cidx] = 0 # assert np.array_equal(xbilatp, xbilatp_old) # xbilat_total_old = xbilat_total.copy() # for i in range(nCountries * nSectors): # for n in range(nCountries): # xbilat_total_old[nActualYear * nCountries * nSectors + i, n] = xbilatp[i, n] n = nCountries * nSectors xbilat_total[nActualYear*n:(nActualYear+1)*n] = xbilatp # assert np.array_equal(xbilat_total, xbilat_total_old) # mGrossOutputTotal_old = mGrossOutputTotal.copy() # mAllPrice_old = mAllPrice.copy() # for j in range(nSectors): # for n in range(nCountries): # mGrossOutputTotal_old[nActualYear * nSectors + j, n] = mGrossOutput[j, n] # mAllPrice_old[nActualYear * nSectors + j, n] = mPriceFactor[j, n] mGrossOutputTotal[nActualYear*nSectors:(nActualYear+1)*nSectors] = mGrossOutput mAllPrice[nActualYear*nSectors:(nActualYear+1)*nSectors] = mPriceFactor # assert np.array_equal(mGrossOutputTotal, mGrossOutputTotal_old) # assert np.array_equal(mAllPrice, mAllPrice_old) # Y_aux = mY Y_aux = np.ones([nYears, nSectorsLabor], dtype=float) for i in range(nYears - 1, 0, -1): Y_aux[i - 1, 0] = np.dot(mMigration[(i - 1) * nSectorsLabor, :], (Y_aux[i, :] ** nBeta).T.reshape(nSectorsLabor, 1)) for j in range(nSectors): Y_aux[i - 1, j + 1] = np.dot(((w_Brasil[i - 1, j] / PBr[i - 1]) ** (1 / nValIntertemp)), np.dot(mMigration[(i - 1) * nSectorsLabor + j + 1, :], (Y_aux[i, :] ** nBeta).T)) Y1_ant = mY Y1 = Y_aux Y_aux2 = sum(abs(Y1 - mY)) vYmax = np.zeros([1, 1], dtype=float) vYmax[0, 0] = sum(Y_aux2.T) Ymax = vYmax[0, 0] nEndInteration = time.perf_counter() nElapsedTimeInteration = (nEndInteration - nBeginInteration) nTimeEndinteration = time.localtime() print("End ", sNameScenario, "int = ", nIteration, " between ", time.strftime("%d/%b/%Y - %H:%M:%S", nTimeBeginInteration), " and ", time.strftime("%d/%b/%Y - %H:%M:%S", nTimeEndinteration ), " Spent: %.2f segs " % nElapsedTimeInteration, " Ymax = ", Ymax ) Y2 = mY - nAdjust * (mY - Y1) if isNormal: lDataToSave = ['Y1', 'w_aux', 'wbr_aux', 'Y1_ant', 'YmaxV'] else: lDataToSave = ['Y1_C', 'w_aux_C', 'wbr_aux_C', 'Y1_ant_C', 'YmaxV_C'] lData = [Y1, w_aux, wbr_aux, Y1_ant, vYmax] Support.write_data_csv(lDataToSave, lData, sDirectoryOutput) mY = Y2 nIteration +=1 return VABrasil, w_Brasil, P_Brasil, mY, mGrowthLabor, PBr, xbilat_total, mGrossOutputTotal, mAllPrice, mMigration,\ sDirectoryInputScenario, mTau, mAlphas
def eval_bond_vdw(self, occm_trial): nneocc_trial = SupportFunctions.occneigh(self.args.n, self.args.m, occm_trial, self.icind) ictot_trial = SupportFunctions.totnoneigh2(self.args.n, self.args.m, nneocc_trial, True) evdW_trial = -self.args.beta * ictot_trial return evdW_trial, ictot_trial, nneocc_trial
column_names = gen_data.columns gen_data = gen_data.astype(str) insert_data = gen_data.values.tolist() del gen_data # Clear existing results in table for given circuit sql = 'DELETE FROM ' + table_name + ' WHERE ADDED IS NULL AND ERRORMESSAGE IS NULL' cur.execute(sql) con.commit() # Insert new results sql = 'INSERT INTO ' + table_name + ' (' + ', '.join(column_names) + ') VALUES (' for col_number in range(1, len(column_names) + 1): if col_number != 1: sql += ', ' sql += ':' + str(col_number) sql += ')' cur.prepare(sql) cur.executemany(None, insert_data) con.commit() return None if __name__ == "__main__": import SupportFunctions as Support cmate_con, cmate_cur = Support.oracle_conn('CMATE') create_generator(cmate_con, cmate_cur) cmate_cur.close() cmate_con.close()