def __sort(algs, data): if 'insertion' == algs: Insertion.sort(data) if 'insertion_advance' == algs: Insertion.sort_advance(data) if 'selection' == algs: Selection.sort(data) if 'shell' == algs: Shell.sort(data) if 'merge' == algs: Merge.sort(data)
def __init__(self, input_size, batch_size, num_units_merge, rnn_layers, grad_clip_merge, num_units_split, split_layers, grad_clip_split, beta=1.0): super(DivideAndConquerNetwork, self).__init__() # General self.input_size = input_size self.batch_size = batch_size # Merge self.num_units_merge = num_units_merge self.rnn_layers = rnn_layers self.merge = Merge(input_size, num_units_merge, batch_size) # Split self.num_units_split = num_units_split self.split_layers = split_layers self.beta = beta self.split = Split(input_size, num_units_split, batch_size, split_layers) # Training self.grad_clip_split = grad_clip_split self.optim_split = optim.RMSprop(self.split.parameters()) self.grad_clip_merge = grad_clip_merge self.optim_merge = optim.Adam(self.merge.parameters())
def onRadianButton(self, event): self.spreadsheet = 0 #send spreadsheet path to Merge class myMerge = Merge(self.tc_files) #myMerge.hello() for i, v in enumerate(self.dropped_files): if self.dropped_files[i].endswith('.xlsx' or '.XLSX'): self.spreadsheet = v self.tc_files.WriteText("\n Processing Spreadsheet...") propertyList = myMerge.openFile(self.spreadsheet) #self.tc_files.WriteText("\n Creating PDFs...") myMerge.rename_files() self.tc_files.WriteText("\n Creating Emails...") myEmail = Emailer() myEmail.create_mail(propertyList) self.tc_files.WriteText("\n You're done! Don't forget to hit the clear button!")
def doMerge(iFile1, iFile2, oFile): pc1 = InOut.read(iFile1) if(pc1 == None): return False pc2 = InOut.read(iFile2) if(pc2 == None): return False pcMerged = Merge.merge(pc1, pc2) InOut.write(oFile, pcMerged)
def __init__(self): AlgorithmProvider.__init__(self) self.alglist = [SumLines(), PointsInPolygon(), PointsInPolygonWeighted(), PointsInPolygonUnique(), BasicStatisticsStrings(), BasicStatisticsNumbers(), NearestNeighbourAnalysis(), MeanCoords(), LinesIntersection(), UniqueValues(), PointDistance(), ReprojectLayer(), ExportGeometryInfo(), Centroids(), Delaunay(), VoronoiPolygons(), SimplifyGeometries(), DensifyGeometries(), DensifyGeometriesInterval(), MultipartToSingleparts(), SinglePartsToMultiparts(), PolygonsToLines(), LinesToPolygons(), ExtractNodes(), Eliminate(), ConvexHull(), FixedDistanceBuffer(), VariableDistanceBuffer(), Dissolve(), Difference(), Intersection(), Union(), Clip(), ExtentFromLayer(), RandomSelection(), RandomSelectionWithinSubsets(), SelectByLocation(), RandomExtract(), RandomExtractWithinSubsets(), ExtractByLocation(), SpatialJoin(), RegularPoints(), SymetricalDifference(), VectorSplit(), VectorGrid(), DeleteColumn(), DeleteDuplicateGeometries(), TextToFloat(), ExtractByAttribute(), SelectByAttribute(), Grid(), Gridify(), HubDistance(), HubLines(), Merge(), GeometryConvert(), AddTableField(), FieldsCalculator(), SaveSelectedFeatures(), JoinAttributes(), AutoincrementalField(), Explode(), FieldsPyculator(), EquivalentNumField(), PointsLayerFromTable(), StatisticsByCategories(), ConcaveHull(), Polygonize(), RasterLayerStatistics(), PointsDisplacement(), ZonalStatistics(), PointsFromPolygons(), PointsFromLines(), RandomPointsExtent(), RandomPointsLayer(), RandomPointsPolygonsFixed(), RandomPointsPolygonsVariable(), RandomPointsAlongLines(), PointsToPaths(), PostGISExecuteSQL(), ImportIntoPostGIS(), SetVectorStyle(), SetRasterStyle(), SelectByExpression(), HypsometricCurves(), # ------ raster ------ # CreateConstantRaster(), # ------ graphics ------ # VectorLayerHistogram(), VectorLayerScatterplot(), # RasterLayerHistogram(), MeanAndStdDevPlot(), # BarPlot(), PolarPlot() ] folder = os.path.join(os.path.dirname(__file__), 'scripts') scripts = ScriptUtils.loadFromFolder(folder) for script in scripts: script.allowEdit = False self.alglist.extend(scripts) for alg in self.alglist: alg._icon = self._icon
def main(): ticker = raw_input("\n\n\n----------------------------------------------\nWelcome. Ready to trade? Pick a stock ticker: ") reuterObj = ReutersQuery() reuterVector = reuterObj.getQuery(ticker) sentimentObj = Sentiment() sentiments = sentimentObj.sentimentVectorize(reuterVector) yahooObj = YahooQuery() yahooVector = yahooObj.doYahooQuery(ticker, reuterVector) reuterDates = DateFormat() dates = reuterDates.fixDates(reuterVector) mergeObj = Merge() merged = mergeObj.mergeEverything(sentiments, yahooVector, dates) strategyObj = Strategy() metrics = strategyObj.runStrategy(ticker, merged) outputObj = Output() outputObj.putOutput(ticker, metrics, yahooVector, merged) print '\nThanks for trading with Vivek! Get money, get paid!'
class Driver: if __name__ == '__main__': # Extracting data from 5 excel files extract = Extract() ds,d,os,sa,ea= extract.getAllData() # Transforming data to obtain additional columns transform = Transform() ds,d= transform.transformAllData(ds,d) # Dropping unnecessary columns dropColumns = DropColumns() drivingSearch,delivery,orders,startAddresses,endAddresses= dropColumns.dropAllColumns(ds,d,os,sa,ea) # joining the 5 tables merge=Merge() finalData = merge.mergeAllTables(drivingSearch,delivery,orders,startAddresses,endAddresses) # Converting the date columns from Object type to DateTime finalData= transform.transformdate(finalData) sqlload=SQL_Load() sqlload.loadDataToStaging(finalData)
def __init__(self): AlgorithmProvider.__init__(self) self._icon = QIcon(os.path.join(pluginPath, 'images', 'qgis.png')) self.alglist = [ SumLines(), PointsInPolygon(), PointsInPolygonWeighted(), PointsInPolygonUnique(), BasicStatisticsStrings(), BasicStatisticsNumbers(), NearestNeighbourAnalysis(), MeanCoords(), LinesIntersection(), UniqueValues(), PointDistance(), ReprojectLayer(), ExportGeometryInfo(), Centroids(), Delaunay(), VoronoiPolygons(), SimplifyGeometries(), DensifyGeometries(), DensifyGeometriesInterval(), MultipartToSingleparts(), SinglePartsToMultiparts(), PolygonsToLines(), LinesToPolygons(), ExtractNodes(), Eliminate(), ConvexHull(), FixedDistanceBuffer(), VariableDistanceBuffer(), Dissolve(), Difference(), Intersection(), Union(), Clip(), ExtentFromLayer(), RandomSelection(), RandomSelectionWithinSubsets(), SelectByLocation(), RandomExtract(), DeleteHoles(), RandomExtractWithinSubsets(), ExtractByLocation(), SpatialJoin(), RegularPoints(), SymmetricalDifference(), VectorSplit(), VectorGrid(), DeleteColumn(), DeleteDuplicateGeometries(), TextToFloat(), ExtractByAttribute(), SelectByAttribute(), Grid(), Gridify(), HubDistance(), HubLines(), Merge(), GeometryConvert(), AddTableField(), FieldsCalculator(), SaveSelectedFeatures(), JoinAttributes(), AutoincrementalField(), Explode(), FieldsPyculator(), EquivalentNumField(), PointsLayerFromTable(), StatisticsByCategories(), ConcaveHull(), RasterLayerStatistics(), PointsDisplacement(), ZonalStatistics(), PointsFromPolygons(), PointsFromLines(), RandomPointsExtent(), RandomPointsLayer(), RandomPointsPolygonsFixed(), RandomPointsPolygonsVariable(), RandomPointsAlongLines(), PointsToPaths(), PostGISExecuteSQL(), ImportIntoPostGIS(), SetVectorStyle(), SetRasterStyle(), SelectByExpression(), HypsometricCurves(), SplitLinesWithLines(), CreateConstantRaster(), FieldsMapper(), SelectByAttributeSum(), Datasources2Vrt(), CheckValidity(), OrientedMinimumBoundingBox(), Smooth(), ReverseLineDirection() ] if hasMatplotlib: from VectorLayerHistogram import VectorLayerHistogram from RasterLayerHistogram import RasterLayerHistogram from VectorLayerScatterplot import VectorLayerScatterplot from MeanAndStdDevPlot import MeanAndStdDevPlot from BarPlot import BarPlot from PolarPlot import PolarPlot self.alglist.extend([ VectorLayerHistogram(), RasterLayerHistogram(), VectorLayerScatterplot(), MeanAndStdDevPlot(), BarPlot(), PolarPlot(), ]) if hasShapely: from Polygonize import Polygonize self.alglist.extend([Polygonize()]) if QGis.QGIS_VERSION_INT >= 21400: from ExecuteSQL import ExecuteSQL self.alglist.extend([ExecuteSQL()]) folder = os.path.join(os.path.dirname(__file__), 'scripts') scripts = ScriptUtils.loadFromFolder(folder) for script in scripts: script.allowEdit = False self.alglist.extend(scripts) for alg in self.alglist: alg._icon = self._icon
'all', 'any', 'both', 'each', 'few', 'more', 'most', 'other', 'some', 'such', 'no', 'nor', 'not', 'only', 'own', 'same', 'so', 'than', 'too', 'very', 's', 't', 'can', 'will', 'just', 'don', "dont", 'should', "shouldve", 'now', 'd', 'll', 'm', 'o', 're', 've', 'y', 'ain', 'aren', "arent", 'couldn', "couldnt", 'didn', "didnt", 'doesn', "doesnt", 'hadn', "hadnt", 'hasn', "hasnt", 'haven', "havent", 'isn', "isn't", 'ma', 'mightn', "mightnt", 'mustn', "mustnt", 'needn', "neednt", 'shan', "shant", 'shouldn', "shouldnt", 'wasn', "wasn't", 'weren', "weren't", 'won', "won't", 'wouldn', "wouldnt" ] files = [] secondFile = [] dictPost = {} fileLists = [] mg = Merge() fr = FileRetrival() dic = Dictionary() stopArr = [] fr.retrivalSMGFile(files) index = 0 max = 0 weight = 0 blockNumber = 0 articleCount = 0 dictionary = {} tf_dictionary = {} idf = {} df = {} N = 0
def begin_tpu_calc(self): surface_ind = self.waterSurfaceRadio.selection.get() surface_selection = self.water_surface_options[surface_ind] wind_ind = self.windRadio.selection.get() wind_selection = self.windOptions[wind_ind] kd_ind = self.turbidityRadio.selection.get() kd_selection = self.turbidity_options[kd_ind] # CREATE OBSERVATION EQUATIONS S = SensorModel( self.controller.controller_configuration['sensor_model']) # GENERATE JACOBIAN FOR SENSOR MODEL OBSVERVATION EQUATIONS J = Jacobian(S) # CREATE OBJECT THAT PROVIDES FUNCTIONALITY TO MERGE LAS AND TRAJECTORY DATA M = Merge() multiprocess = self.controller.controller_configuration['multiprocess'] if multiprocess: num_cores = self.controller.controller_configuration[ 'number_cores'] cpu_process_info = ('multiprocess', num_cores) else: cpu_process_info = ('singleprocess', ) tpu = Tpu( surface_selection, surface_ind, wind_selection, self.wind_vals[wind_ind][1], kd_selection, self.kd_vals[kd_ind][1], self.vdatum_region.get(), self.mcu, self.tpuOutput.directoryName, self.controller.controller_configuration['cBLUE_version'], self.controller.controller_configuration['sensor_model'], cpu_process_info, self.controller.controller_configuration['subaqueous_LUTs'], self.controller. controller_configuration['water_surface_ellipsoid_height']) las_files = [ os.path.join(self.lasInput.directoryName, l) for l in os.listdir(self.lasInput.directoryName) if l.endswith('.las') ] num_las = len(las_files) def signal_completion(): self.tpu_btn_text.set('TPU Calculated') self.tpuProcess.config(fg='darkgreen') print('DONE!! (close cBLUE before running again)') def sbet_las_tiles_generator(): """This generator is the 2nd argument for the run_tpu_multiprocessing method, to avoid passing entire sbet or list of tiled sbets to the calc_tpu() method """ for las_file in las_files: logging.debug('({}) generating SBET tile...'.format( las_file.split('\\')[-1])) inFile = laspy.file.File(las_file, mode='r') west = inFile.reader.get_header_property('x_min') east = inFile.reader.get_header_property('x_max') north = inFile.reader.get_header_property('y_max') south = inFile.reader.get_header_property('y_min') yield self.sbet.get_tile_data(north, south, east, west), las_file, J, M logging.info('processing {} las file(s) ({})...'.format( num_las, cpu_process_info[0])) if multiprocess: p = tpu.run_tpu_multiprocess(num_las, sbet_las_tiles_generator()) signal_completion() p.close() p.join() else: tpu.run_tpu_singleprocess(num_las, sbet_las_tiles_generator()) signal_completion()