def calcRandStartTime(Result, unit, approachName, houseNumber): """ This method used to get the load information based on random number for each request from each 'house'. Then start time of each house in different appliances [request,...], int, string, int => {str : [float, float,...], ...} """ ideal_shiftable_load = modify_ideal_load(Result[1][-1], unit) # get ideal shift-able load signal_load = list(ideal_shiftable_load) # make ideal shift-able load equal to signal load ## first of all, get requests from reading result requests = Result[0] requests = updateProfile(requests, unit) # update profile requests = getProbDistribution(requests, signal_load, unit, approachName) # update probabilities requestTypeNumberDict = getEachRequetsTypeNumber(Result[0]) # dict with key-value: requestName to this Type requests Number requestTypeGroupDict = getRequestTypeGroupNumber(requestTypeNumberDict) # distribute the request types into four big groups requestNameRequestsMap = orderRequests(requests) # use to save request with same request name for requestTypeGroup in requestTypeGroupDict.keys(): if houseNumber <= requestTypeGroupDict.get(requestTypeGroup): groupMultiplyMap = {} for requestTypeGroup in requestTypeGroupDict.keys(): groupMultiplyMap[requestTypeGroup] = float(houseNumber) / requestTypeGroupDict[requestTypeGroup] applianceDict = getRandStartTimeGreaterThanHouse(groupMultiplyMap, requestNameRequestsMap, requestTypeNumberDict, unit, houseNumber) else: applianceDict = getRandStartTimeLessThanHouse(requestNameRequestsMap, unit) return applianceDict
def writeYamlFileWithLargeNumOfHouses(folderName, inputType, unit, approachName, houseNumber): """ This method used to create a yaml file with the large number of houses. In order to save code time, several methods will import from 'yamlCalcMethods' and 'yamlWriteMethods'. Consider of the tiny different requirement, it is better to write a new python to construct yaml file. str, str, int, str, int, float => file """ print "START read request type file" RequestTypeDict = CsvToLoad.readExperiment(folderName) for RequestType in RequestTypeDict.values(): load_list = CsvToLoad.extractApplianceProfile(RequestType, unit) CsvToLoad.generateLoadTxt(load_list, RequestType.request_name, folderName, unit) print "START read requests file" Result = ReadCSV(folderName, inputType) # read requests information from input csv file requests = list(Result[0]) houseLayout = yamlCalcMethods.getHouseLayout(houseNumber, requests) requestNameMinMap = yamlCalcMethods.getRequestNameMinMap(requests) # get the requestNameMinMap ideal_shiftable_load = modify_ideal_load(Result[1][-1], unit) # get ideal shift-able load signal_load = list(ideal_shiftable_load) # make ideal shift-able load equal to signal load requests = updateProfile(requests, unit) # update profile requests = getProbDistribution(requests, signal_load, unit, approachName) # update probabilities fileCount = 0 totalRequestNumber = getTotalRequestNumber(requests) restRequestNumber = int(totalRequestNumber) while restRequestNumber != 0: if restRequestNumber < houseNumber: repeatNumber = restRequestNumber else: repeatNumber = houseNumber fileName = "".join(("CSVdata/", folderName, "/resident/" "resident_", folderName, "_", str(fileCount), ".yaml")) applianceMap = getApplianceMap(repeatNumber, requests, unit) # get the start time and request name dict requests = setNewRequests(requests) # delete the request which quantity is zero writeFile = open(fileName, "w") yamlWriteMethods.writeConstParameters(writeFile) yamlWriteMethods.writeHouseLayout(writeFile, houseLayout) yamlWriteMethods.writeVariableList(writeFile, houseLayout) yamlWriteMethods.writeConstSimulation(writeFile) yamlWriteMethods.writeConstNetwork(writeFile) yamlWriteMethods.writeConstHeartbeat(writeFile) yamlWriteMethods.writeTimeSerialLoop(writeFile, applianceMap, folderName) # write load / time_series content yamlWriteMethods.writeConstBus(writeFile) yamlWriteMethods.writeConstGenericgen(writeFile) yamlWriteMethods.writeConstLoop(writeFile) applianceCount = 0 yamlWriteMethods.writeAllAppliance(houseLayout, writeFile, applianceMap, applianceCount, requestNameMinMap) writeFile.close() print "No.", print fileCount, print "yaml file had constructed" fileCount = fileCount + 1 restRequestNumber = restRequestNumber - repeatNumber print "CONSTRUCT ", print fileCount, print "yaml files"
def performLearningProcess(stop_approach, ideal_type_load, observed_load, convergence_range, signal_load, epsilon, learn_speed, total_iter , requests, unit, approach_name, learn_appraoch, MSE_list, signal_loads, observed_loads, non_shiftable_load, input_type): """ This method used to convergence the learning processes based on given stop approach, then return the MSE_list to represent to the MSE development situation during learning process. """ if stop_approach == 'byMSE': past_MSE = 0 iteration = 0 while not convergenceByMSE(ideal_type_load, observed_load, past_MSE, convergence_range): # when it does not meet the stop situation if iteration == total_iter or iteration > total_iter: break iteration = iteration + 1 result = learn(signal_load, observed_load, ideal_type_load, epsilon, learn_speed, learn_appraoch, non_shiftable_load) signal_load = result[0] # learn signal_load past_MSE = float(result[1]) # save the signal_load as the past_signal_load MSE_list.append(float(result[1])) print 'process ', iteration , ' learning' requests = getProbDistribution(requests, signal_load, unit, approach_name) #update probabilities observed_load = calculateExpectedLoad(requests, unit) if input_type == 'ideal_load': # if we would like output ideal load situation, add non_shiftable_load for index in range(len(observed_load)): observed_load[index] = observed_load[index] + non_shiftable_load[index] signal_loads.append(list(signal_load)) # save them into signal load list observed_loads.append(list(observed_load)) # save them into observed load list if stop_approach == 'bySignal': past_signal_load = [] iteration = 0 while not convergenceBySignal(past_signal_load, signal_load, convergence_range): # when it does not meet the stop situation if iteration == total_iter or iteration > total_iter: break iteration = iteration + 1 past_signal_load = list(signal_load) result = learn(signal_load, observed_load, ideal_type_load, epsilon, learn_speed, learn_appraoch, non_shiftable_load) signal_load = result[0] # learn signal_load MSE_list.append(float(result[1])) print 'process ', iteration , ' learning, MSE is ', result[1] requests = getProbDistribution(requests, signal_load, unit, approach_name) #update probabilities observed_load = calculateExpectedLoad(requests, unit) if input_type == 'ideal_load': # if we would like output ideal load situation, add non_shiftable_load for index in range(len(observed_load)): observed_load[index] = observed_load[index] + non_shiftable_load[index] signal_loads.append(list(signal_load)) # save them into signal load list observed_loads.append(list(observed_load)) # save them into observed load list return MSE_list
def calcRandLoad(Result, unit, approach_name): """ This method used to get the load information based on random number for each request from each 'house'. Then get a load result. [request,...], int, string => [float, float,...] """ ideal_shiftable_load = modify_ideal_load(Result[1][-1], unit) # get ideal shift-able load signal_load = list(ideal_shiftable_load) # make ideal shift-able load equal to signal load ## first of all, get requests from reading result requests = Result[0] requests = updateProfile(requests, unit) # update profile requests = getProbDistribution(requests, signal_load, unit, approach_name) # update probabilities for request in requests: #print 'before: ' #print request.request_type.request_name, ' : ', request.probabilities getPureProbabilities(request) # get the new probabilities quantitiy = request.quantity newProbabilities = [0] * len(request.pure_probabilities) for count in range(quantitiy): randomNum = random.random() if randomNum < request.pure_probabilities[0] or randomNum == request.pure_probabilities[0] : newProbabilities[0] = newProbabilities[0] + 1 continue elif randomNum > request.pure_probabilities[-1] or randomNum == request.pure_probabilities[-1]: newProbabilities[-1] = newProbabilities[-1] + 1 continue else: for probCount in range(len(request.pure_probabilities)): if randomNum > request.pure_probabilities[probCount]: if randomNum < request.pure_probabilities[probCount + 1] or randomNum == request.pure_probabilities[probCount + 1]: newProbabilities[probCount + 1] = newProbabilities[probCount + 1] + 1 break else: continue for count in range(len(newProbabilities)): newProbabilities[count] = newProbabilities[count] / (quantitiy * 1.0) request.update_pure_probabilities(newProbabilities) ## then, modify the actual probabilities modifyActualProbabilities(request) print 'Finish Request: ', request.request_type.request_name #print 'after: ' #print request.request_type.request_name, ' : ', request.probabilities observed_load = calculateExpectedLoad(requests, unit) return observed_load