def doCompile(): args = Options.args if len(args) == 1: fileList = glob.glob(args[0]) else: fileList = args if fileList and (Options.options.closure or Options.options.forceCompile): #import pdb; pdb.set_trace() fileList = CompOrder.getCompileCandidates(fileList) if Options.options.whatif: for file in fileList: print "m3 %s" % file elif fileList: for file in fileList: import M3TypeLib M3TypeLib.flush() import UnitCache UnitCache.flush() # TBD : This is wasteful but it allows us to carry out destructive operations on types (I think) import Scaling Scaling.flush() compile(fileName=file, mainProg=True) if Message.errors: sys.exit("Errors during Compilation") Message.reset() else: compile(fileName=args[0], mainProg=True) if Message.errors: sys.exit("Errors during Compilation")
def image(self): if self.val == Uninitialised: return "???" if self.tipe.scaling: return Scaling.image(self.tipe.scaling, self.val) else: return "%s" % self.val
def find_action_images(action): path = 'original_images/' retVal = [] retString = [] # copy images from original directory for file_name in os.listdir(path): if is_action_image(file_name, action): im = Image.open(path + file_name) im = Scaling.scale(im, 40, 45) retVal.append(im) retString.append(file_name) return retVal, retString
def Run(): print 'running the inserter' [width, height] = Template.getTemplateData() for sprite in custom_utils.get_working_sprites(): sprite.image = finalImage = Scaling.scale(sprite.image, width, height) sprite.image = finalImage = Bitify.bitify(sprite.image) SaveToPlayerFile.saveSprite(sprite) custom_utils.save_working_sprite(sprite) print 'save to player file' SaveToPlayerFile.savePlayerFile()
dataset['ca'] = dataset['ca'].astype(float) dataset['thal'] = dataset['thal'].astype(float) dataset['num'] = dataset['num'].apply(func) target=dataset['num'] del dataset['num'] '''-------------------------Feature Selection-----------------------------''' # Correlation Matriz #FeatureSelection.CorrelationMatrizWithHeatMap(dataset, target) n_features = 9 dataset_select_Uni = FeatureSelection.Univariate_Selection(dataset, target, n_features) dataset_select = FeatureSelection.Feature_Importance(dataset, target, n_features) '''-------------------------Divide Dataset into Train and Test-----------------------------''' data_train, data_test, target_train, target_test = train_test_split(dataset_select, target, test_size=0.2, random_state=0) data_train = data_train.reset_index(drop=True) target_train = target_train.reset_index(drop=True) '''-------------------------Scaling-----------------------------''' colunms = list(data_train) # data_train_scaled, data_test_scaled = Scaling.Scaling_StandardScaler(data_train, data_test, colunms) data_train_scaled, data_test_scaled = Scaling.Scaling_MinMaxScaler(data_train, data_test, colunms) #data_train_scaled, data_test_scaled = Scaling.Scaling_RobustScaler(data_train, data_test, colunms) Scaling.ScalingComparationScaling(data_train,data_train_scaled)
print("intercept:", reg.intercept_) print("coefficient for previous box office:", reg.coef_[0]) print("-------------------------------------------------------") plt.rcParams['font.sans-serif'] = ['Microsoft JhengHei'] plt.rcParams['axes.unicode_minus'] = False plt.plot(x, y, 'o', markersize=15, alpha=0.3) plt.plot(x, reg.intercept_ + reg.coef_ * x, linewidth=5) plt.xlabel('前場票房') plt.ylabel('當場票房') plt.savefig('前場票房的預測') plt.show() #----------------------------------------------------------------------- #feature scaling for x,y x = Scaling.scale_x(x) # x = x/1000 y = Scaling.scale_y(y) # x = pd.DataFrame(preprocessing.scale(x)) # y = pd.DataFrame(preprocessing.scale(y)) print(x) print(y) input("continue") #設定train,test data split_date = datetime.datetime.strptime("2019-03-23", "%Y-%m-%d") split_index = analysis_data[analysis_data.DATE == split_date].index.tolist()[0] print("split_index:", split_index, "\n") # x_train = x[0:split_index] # x_test = x[split_index:]
import Quantization import Scaling import os def mkdir(path): if not os.path.exists(path): os.makedirs(path) mkdir('./Quantization') mkdir('./Scale') Quantization.quantize('10.png', 128, './Quantization/10-level-128.png') Quantization.quantize('10.png', 32, './Quantization/10-level-32.png') Quantization.quantize('10.png', 8, './Quantization/10-level-8.png') Quantization.quantize('10.png', 4, './Quantization/10-level-4.png') Quantization.quantize('10.png', 2, './Quantization/10-level-2.png') Scaling.scale('10.png', (192, 128), './Scale/down-scale(192x128).png') Scaling.scale('10.png', (96, 64), './Scale/down-scale(96x64).png') Scaling.scale('10.png', (48, 32), './Scale/down-scale(48x32).png') Scaling.scale('10.png', (24, 16), './Scale/down-scale(24x16).png') Scaling.scale('10.png', (12, 8), './Scale/down-scale(12x8).png') Scaling.scale('10.png', (300, 200), './Scale/down-scale(300x200).png') Scaling.scale('10.png', (450, 300), './Scale/up-scale(450x300).png') Scaling.scale('10.png', (500, 200), './Scale/scale(500x200).png')