def conjugate_gradient(A, b, x0=None, imax=10000, tol=1.0e-8, atol=1.0e-14, iprint=None, output_stats=False, use_c_cg=False, precon='None'): """ Try to solve linear equation Ax = b using conjugate gradient method If b is an array, solve it as if it was a set of vectors, solving each vector. """ if use_c_cg: from anuga.utilities.sparse import Sparse_CSR msg = ('c implementation of conjugate gradient requires that matrix A\ be of type %s') % (str(Sparse_CSR)) assert isinstance(A, Sparse_CSR), msg if x0 is None: x0 = num.zeros(b.shape, dtype=num.float) else: x0 = num.array(x0, dtype=num.float) b = num.array(b, dtype=num.float) err = 0 # preconditioner # Padarn Note: currently a fairly lazy implementation, needs fixing M = None if precon == 'Jacobi': M = num.zeros(b.shape[0]) jacobi_precon_c(A, M) x0 = b.copy() if len(b.shape) != 1: for i in range(b.shape[1]): if not use_c_cg: x0[:, i], stats = _conjugate_gradient_preconditioned(A, b[:, i], x0[:, i], M, imax, tol, atol, iprint, Type="Jacobi") else: # need to copy into new array to ensure contiguous access xnew = x0[:, i].copy() err = cg_solve_c_precon(A, xnew, b[:, i].copy(), imax, tol, atol, b.shape[1], M) x0[:, i] = xnew else: if not use_c_cg: x0, stats = _conjugate_gradient_preconditioned(A, b, x0, M, imax, tol, atol, iprint, Type="Jacobi") else: err = cg_solve_c_precon(A, x0, b, imax, tol, atol, 1, M) else: if len(b.shape) != 1: for i in range(b.shape[1]): if not use_c_cg: x0[:, i], stats = _conjugate_gradient(A, b[:, i], x0[:, i], imax, tol, atol, iprint) else: # need to copy into new array to ensure contiguous access xnew = x0[:, i].copy() err = cg_solve_c(A, xnew, b[:, i].copy(), imax, tol, atol, b.shape[1]) x0[:, i] = xnew else: if not use_c_cg: x0, stats = _conjugate_gradient(A, b, x0, imax, tol, atol, iprint) else: x0 = b.copy() err = cg_solve_c(A, x0, b, imax, tol, atol, 1) if err == -1: log.warning('max number of iterations attained from c cg') msg = 'Conjugate gradient solver did not converge' raise ConvergenceError, msg if output_stats: return x0, stats else: return x0
def _conjugate_gradient_preconditioned(A, b, x0, M, imax=10000, tol=1.0e-8, atol=1.0e-10, iprint=None, Type='None'): """ Try to solve linear equation Ax = b using preconditioned conjugate gradient method Input A: matrix or function which applies a matrix, assumed symmetric A can be either dense or sparse or a function (__mul__ just needs to be defined) b: right hand side x0: inital guess (default the 0 vector) imax: max number of iterations tol: tolerance used for residual Output x: approximate solution """ # Padarn note: This is temporary while the Jacboi preconditioner is the only # one avaliable. D=[] if not Type=='Jacobi': log.warning('Only the Jacobi Preconditioner is impletment cg_solve python') msg = 'Only the Jacobi Preconditioner is impletment in cg_solve python' raise PreconditionerError, msg else: D=Sparse(A.M, A.M) for i in range(A.M): D[i,i]=1/M[i] D=Sparse_CSR(D) stats = Stats() b = num.array(b, dtype=num.float) if len(b.shape) != 1: raise VectorShapeError, 'input vector should consist of only one column' if x0 is None: x0 = num.zeros(b.shape, dtype=num.float) else: x0 = num.array(x0, dtype=num.float) stats.x0 = num.linalg.norm(x0) if iprint is None or iprint == 0: iprint = imax dx = 0.0 i = 1 x = x0 r = b - A * x z = D * r d = r rTr = num.dot(r, z) rTr0 = rTr stats.rTr0 = rTr0 #FIXME Let the iterations stop if starting with a small residual while (i < imax and rTr > tol ** 2 * rTr0 and rTr > atol ** 2): q = A * d alpha = rTr / num.dot(d, q) xold = x x = x + alpha * d dx = num.linalg.norm(x-xold) #if dx < atol : # break # Padarn Note 26/11/12: This modification to the algorithm seems # unnecessary, but also seem to have been implemented incorrectly - # it was set to perform the more expensive r = b - A * x routine in # 49/50 iterations. Suggest this being either removed completely or # changed to 'if i%50==0' (or equvialent). #if i % 50: if False: r = b - A * x else: r = r - alpha * q rTrOld = rTr z = D * r rTr = num.dot(r, z) bt = rTr / rTrOld d = z + bt * d i = i + 1 if i % iprint == 0: log.info('i = %g rTr = %15.8e dx = %15.8e' % (i, rTr, dx)) if i == imax: log.warning('max number of iterations attained') msg = 'Conjugate gradient solver did not converge: rTr==%20.15e' % rTr raise ConvergenceError, msg stats.x = num.linalg.norm(x) stats.iter = i stats.rTr = rTr stats.dx = dx return x, stats
if is_opened[point_i] == False: points_writer = writer(file(dir_name + sep + gauge_file + point_name[point_i] + '.csv', "wb")) points_writer.writerow(heading) is_opened[point_i] = True else: points_writer = writer(file(dir_name + sep + gauge_file + point_name[point_i] + '.csv', "ab")) points_list = [quake_time, quake_time/3600.] + _quantities2csv(quantities, point_quantities, callable_sww.centroids, point_i) points_writer.writerow(points_list) else: if verbose: msg = 'gauge' + point_name[point_i] + 'falls off the mesh in file ' + sww_file + '.' log.warning(msg) def sww2timeseries(swwfiles, gauge_filename, production_dirs, report=None, reportname=None, plot_quantity=None, generate_fig=False, surface=None, time_min=None, time_max=None, time_thinning=1, time_unit=None, title_on=None, use_cache=False,
def sww2csv_gauges(sww_file, gauge_file, out_name='gauge_', quantities=['stage', 'depth', 'elevation', 'xmomentum', 'ymomentum'], verbose=False, use_cache=True, output_centroids=False): """ Inputs: NOTE: if using csv2timeseries_graphs after creating csv file, it is essential to export quantities 'depth' and 'elevation'. 'depth' is good to analyse gauges on land and elevation is used automatically by csv2timeseries_graphs in the legend. sww_file: path to any sww file gauge_file: Assumes that it follows this format name, easting, northing, elevation point1, 100.3, 50.2, 10.0 point2, 10.3, 70.3, 78.0 NOTE: order of column can change but names eg 'easting', 'elevation' must be the same! ALL lowercaps! out_name: prefix for output file name (default is 'gauge_') Outputs: one file for each gauge/point location in the points file. They will be named with this format in the same directory as the 'sww_file' <out_name><name>.csv eg gauge_point1.csv if <out_name> not supplied myfile_2_point1.csv if <out_name> ='myfile_2_' They will all have a header Usage: sww2csv_gauges(sww_file='test1.sww', quantities = ['stage', 'elevation','depth','bearing'], gauge_file='gauge.txt') Interpolate the quantities at a given set of locations, given an sww file. The results are written to a csv file. In the future let points be a points file. And the user choose the quantities. This is currently quite specific. If it needs to be more general, change things. This is really returning speed, not velocity. """ from csv import reader,writer from anuga.utilities.numerical_tools import ensure_numeric, mean, NAN import string from anuga.utilities.file_utils import get_all_swwfiles from anuga.abstract_2d_finite_volumes.util import file_function assert isinstance(gauge_file,string_types) or isinstance(gauge_file, str), 'Gauge filename must be a string or unicode' assert isinstance(out_name,string_types) or isinstance(out_name, str), 'Output filename prefix must be a string' try: gid = open(gauge_file) point_reader = reader(gid) gid.close() except Exception as e: msg = 'File "%s" could not be opened: Error="%s"' % (gauge_file, e) raise Exception(msg) if verbose: log.critical('Gauges obtained from: %s' % gauge_file) gid = open(gauge_file) point_reader = reader(gid) points = [] point_name = [] # read point info from file for i,row in enumerate(point_reader): # read header and determine the column numbers to read correctly. if i==0: for j,value in enumerate(row): if value.strip()=='easting':easting=j if value.strip()=='northing':northing=j if value.strip()=='name':name=j if value.strip()=='elevation':elevation=j else: #points.append([float(row[easting]),float(row[northing])]) points.append([float(row[easting]),float(row[northing])]) point_name.append(row[name]) gid.close() #convert to array for file_function points_array = num.array(points,num.float) points_array = ensure_absolute(points_array) #print 'points_array', points_array dir_name, base = os.path.split(sww_file) #need to get current directory so when path and file #are "joined" below the directory is correct if dir_name == '': dir_name =getcwd() if access(sww_file,R_OK): if verbose: log.critical('File %s exists' % sww_file) else: msg = 'File "%s" could not be opened: no read permission' % sww_file raise Exception(msg) sww_files = get_all_swwfiles(look_in_dir=dir_name, base_name=base, verbose=verbose) # fudge to get SWW files in 'correct' order, oldest on the left sww_files.sort() if verbose: log.critical('sww files=%s' % sww_files) #to make all the quantities lower case for file_function quantities = [quantity.lower() for quantity in quantities] # what is quantities are needed from sww file to calculate output quantities # also core_quantities = ['stage', 'elevation', 'xmomentum', 'ymomentum'] gauge_file = out_name heading = [quantity for quantity in quantities] heading.insert(0,'time') heading.insert(1,'hours') if verbose: log.critical('Writing csv files') quake_offset_time = None is_opened = [False]*len(points_array) for sww_file in sww_files: sww_file = join(dir_name, sww_file+'.sww') callable_sww = file_function(sww_file, quantities=core_quantities, interpolation_points=points_array, verbose=verbose, use_cache=use_cache, output_centroids = output_centroids) if quake_offset_time is None: quake_offset_time = callable_sww.starttime for point_i, point in enumerate(points_array): for time in callable_sww.get_time(): # add domain starttime to relative time. quake_time = time + quake_offset_time point_quantities = callable_sww(time, point_i) # __call__ is overridden if point_quantities[0] != NAN: if is_opened[point_i] == False: points_handle = open(dir_name + sep + gauge_file + point_name[point_i] + '.csv', 'w') points_writer = writer(points_handle) points_writer.writerow(heading) is_opened[point_i] = True else: points_handle = open(dir_name + sep + gauge_file + point_name[point_i] + '.csv', 'a') points_writer = writer(points_handle) points_list = [quake_time, quake_time/3600.] + _quantities2csv(quantities, point_quantities, callable_sww.centroids, point_i) points_writer.writerow(points_list) points_handle.close() else: if verbose: msg = 'gauge' + point_name[point_i] + 'falls off the mesh in file ' + sww_file + '.' log.warning(msg)
else: points_writer = writer( file( dir_name + sep + gauge_file + point_name[point_i] + '.csv', "ab")) points_list = [quake_time, quake_time / 3600.] + _quantities2csv( quantities, point_quantities, callable_sww.centroids, point_i) points_writer.writerow(points_list) else: if verbose: msg = 'gauge' + point_name[ point_i] + 'falls off the mesh in file ' + sww_file + '.' log.warning(msg) def sww2timeseries(swwfiles, gauge_filename, production_dirs, report=None, reportname=None, plot_quantity=None, generate_fig=False, surface=None, time_min=None, time_max=None, time_thinning=1, time_unit=None, title_on=None,