def test_great_circle_numpy(self): # One decimal degree is 111000m latitude = np.asarray([40.0, 50.0, 60.0]) longitude = np.asarray([-76.0, -86.0, -96.0]) azimuth = 90 new_gc = great_circle(distance=111000, azimuth=azimuth, latitude=latitude, longitude=longitude) # We should have gone to the right assert (new_gc["longitude"] > longitude + 0.9).all() azimuth = 270 new_gc = great_circle(distance=111000, azimuth=azimuth, latitude=latitude, longitude=longitude) # We should have gone to the left assert (new_gc["longitude"] < longitude - 0.9).all() azimuth = 180 new_gc = great_circle(distance=111000, azimuth=azimuth, latitude=latitude, longitude=longitude) # We should have gone down assert (new_gc["latitude"] < latitude - 0.9).all() azimuth = 0 new_gc = great_circle(distance=111000, azimuth=azimuth, latitude=latitude, longitude=longitude) # We should have gone up assert (new_gc["latitude"] > latitude + 0.9).all() azimuth = 315 new_gc = great_circle(distance=111000, azimuth=azimuth, latitude=latitude, longitude=longitude) # We should have gone up and to the left assert (new_gc["latitude"] > latitude + 0.45).all() assert (new_gc["longitude"] < longitude - 0.45).all()
def wedge(distance, angle, theta, lat, lon, arc_points=None): try: arc_points = float(arc_points) except TypeError: arc_points = 50. pts = [[lon, lat]] starting = great_circle(distance=distance, azimuth=math_angle_to_azimuth(angle), latitude=lat, longitude=lon) pts.append([starting['longitude'], starting['latitude']]) ending = great_circle(distance=distance, azimuth=math_angle_to_azimuth(angle+theta), latitude=lat, longitude=lon) # Calculate N points along the circumference between the starting and ending alpha = theta / arc_points for j in range(int(arc_points)): beta = alpha * j pt = great_circle(distance=distance, azimuth=math_angle_to_azimuth(angle+beta), latitude=lat, longitude=lon) pts.append([pt['longitude'], pt['latitude']]) # Add the end point pts.append([ending['longitude'], ending['latitude']]) # Add the station point pts.append([lon, lat]) return geojson.Polygon([pts])
def test_great_circle_scalars(self): # One decimal degree is 111000m latitude = 40.0 longitude = -76.0 azimuth = 90 new_gc = great_circle(distance=111000, azimuth=azimuth, latitude=latitude, longitude=longitude) # We should have gone to the right assert new_gc["longitude"] > longitude + 0.9 azimuth = 270 new_gc = great_circle(distance=111000, azimuth=azimuth, latitude=latitude, longitude=longitude) # We should have gone to the left assert new_gc["longitude"] < longitude - 0.9 azimuth = 180 new_gc = great_circle(distance=111000, azimuth=azimuth, latitude=latitude, longitude=longitude) # We should have gone down assert new_gc["latitude"] < latitude - 0.9 azimuth = 0 new_gc = great_circle(distance=111000, azimuth=azimuth, latitude=latitude, longitude=longitude) # We should have gone up assert new_gc["latitude"] > latitude + 0.9 azimuth = 315 new_gc = great_circle(distance=111000, azimuth=azimuth, latitude=latitude, longitude=longitude) # We should have gone up and to the left assert new_gc["latitude"] > latitude + 0.45 assert new_gc["longitude"] < longitude - 0.45
def get_coordinates(self): y, x, z = self.dis.get_node_coordinates() # Compute grid from top left corner, so switch Y back around y = y[::-1] # Compute grid from top left corner, so switch X back around x = x[::-1] # Make Z negative (down) z = z*-1. # Convert Z to cartesian z = z[:, :, ::-1] # Convert distances to grid cell centers (from origin) to meters if self.grid_units == 'feet': x = x*0.3048 y = y*0.3048 # Transform to a known CRS wgs84_crs = Proj(init='epsg:4326') known_x, known_y = transform(self.grid_crs, wgs84_crs, self.grid_x, self.grid_y) logger.debug("Input origin point: {!s}, {!s}".format(self.grid_x, self.grid_y)) logger.debug("Lat/Lon origin point: {!s}, {!s} (EPSG:4326)".format(known_x, known_y)) notrotated_xs = np.ndarray(0) notrotated_ys = np.ndarray(0) with LoggingTimer("Computing unrotated output grid", logger.info): upper = great_circle(distance=x, latitude=known_y, longitude=known_x, azimuth=90) for top_x, top_y in zip(upper["longitude"], upper["latitude"]): # Compute the column points for each point in the upper row. # Because this is a regular grid (rectangles), we can just rotate by 180 degrees plus the rotation angle. row = great_circle(distance=y, latitude=top_y, longitude=top_x, azimuth=180) notrotated_xs = np.append(notrotated_xs, row["longitude"]) notrotated_ys = np.append(notrotated_ys, row["latitude"]) if self.grid_rotation != 0: with LoggingTimer("Computing rotated output grid", logger.info): rotated_xs = np.ndarray(0) rotated_ys = np.ndarray(0) upper_rotated = great_circle(distance=x, latitude=known_y, longitude=known_x, azimuth=90+self.grid_rotation) for top_x, top_y in zip(upper_rotated["longitude"], upper_rotated["latitude"]): # Compute the column points for each point in the upper row. # Because this is a regular grid (rectangles), we can just rotate by 180 degrees plus the rotation angle. row = great_circle(distance=y, latitude=top_y, longitude=top_x, azimuth=180+self.grid_rotation) rotated_xs = np.append(rotated_xs, row["longitude"]) rotated_ys = np.append(rotated_ys, row["latitude"]) else: rotated_ys = notrotated_ys rotated_xs = notrotated_xs self.origin_x = known_x self.origin_y = known_y self.no_rotation_xs = notrotated_xs.reshape(self.dis.ncol, self.dis.nrow).T self.no_rotation_ys = notrotated_ys.reshape(self.dis.ncol, self.dis.nrow).T self.xs = rotated_xs.reshape(self.dis.ncol, self.dis.nrow).T self.ys = rotated_ys.reshape(self.dis.ncol, self.dis.nrow).T self.zs = z
def _update(self): """Update the current position and heading""" # update current position based on speed distance = self.speed * self.update_period result = great_circle(distance=distance, azimuth=self._ahrs.heading, latitude=self._current_location.lat, longitude=self._current_location.lng) self._current_location = Point(result['latitude'], result['longitude']) self._gps.lat = self._current_location.lat self._gps.lng = self._current_location.lng if self.target_waypoint and not self.arrived: # update compass heading if we have a target waypoint self._ahrs.heading = heading_to_point(self._current_location, self.target_waypoint) # check if we have hit our target if self.distance_to_target <= self.TARGET_DISTANCE: try: # if there are waypoints qued up keep going self.move_to_waypoint(self.waypoints.popleft()) except IndexError: # otherwise we have arrived self.arrived = True self.speed = 0 logger.info('Arrived at Waypoint({}, {})'.format( self.target_waypoint.lat, self.target_waypoint.lng)) else: # update heading and speed based on motor speeds self.speed = (self._left_motor.speed + self._right_motor.speed) // 2 self._ahrs.heading += ( (self._left_motor.speed - self._right_motor.speed) / 10) self._ahrs.heading = abs(self._ahrs.heading % 360)
def great_circle(start_point, end_point, n_points): # Check if input are Points if start_point is None or end_point is None: raise Exception('geom is required') if start_point.type != 'Point' or end_point.type != 'Point': raise Exception('start_point and end_point should be a LineString') start_coords = list(geojson.utils.coords(start_point)) end_coords = list(geojson.utils.coords(end_point)) distance_dict = pygc.great_distance( start_latitude=start_coords[0][1], start_longitude=start_coords[0][0], end_latitude=end_coords[0][1], end_longitude=end_coords[0][0], ) segments = linspace(start=0, stop=distance_dict['distance'], num=n_points) points_dict = pygc.great_circle( distance=segments, azimuth=distance_dict['azimuth'], latitude=start_coords[0][1], longitude=start_coords[0][0], ) coords = [] for i in range(n_points): coords.append( [points_dict['longitude'][i], points_dict['latitude'][i]]) return geojson.LineString(coords, precision=PRECISION)
def point_at_distance(distance, heading, current_point): result = great_circle( distance=distance, azimuth=heading, latitude=current_point.lat, longitude=current_point.lng ) return Point(result['latitude'], result['longitude'])
def wedge(distance, angle, theta, lat, lon, arc_points=None): try: arc_points = float(arc_points) except TypeError: arc_points = 50. pts = [[lon, lat]] starting = great_circle(distance=distance, azimuth=math_angle_to_azimuth(angle), latitude=lat, longitude=lon) pts.append([starting['longitude'], starting['latitude']]) ending = great_circle(distance=distance, azimuth=math_angle_to_azimuth(angle + theta), latitude=lat, longitude=lon) # Calculate N points along the circumference between the starting and ending alpha = theta / arc_points for j in range(int(arc_points)): beta = alpha * j pt = great_circle(distance=distance, azimuth=math_angle_to_azimuth(angle + beta), latitude=lat, longitude=lon) pts.append([pt['longitude'], pt['latitude']]) # Add the end point pts.append([ending['longitude'], ending['latitude']]) # Add the station point pts.append([lon, lat]) return geojson.Polygon([pts])
def suggest_another_location(i_location, i_hour_from, i_hour_to, i_result_current_location, i_date): distance_meters = Constant.DISTANCE_FROM_CURRENT_LOCATION collect_res = [] # for testing- No suggestion found! # i_result_current_location = 1 for key, value in List_Useful.azimuth_direction.items(): get_location = great_circle(distance=distance_meters, azimuth=value, latitude=i_location[1], longitude=i_location[0]) new_location = [get_location['longitude'], get_location['latitude']] relevant_collection = choose_relevant_collection(new_location) if relevant_collection is not None: # relevant_collection = 'test' valid_location_report_DB = find_closer_valid_coordinate( relevant_collection, new_location, Constant.CLOSE_AREA) valid_location_DB = find_closer_valid_coordinate( Constant.VALID_LOCATIONS_DATA_BASE, new_location, Constant.CLOSE_AREA) if valid_location_report_DB and valid_location_DB: # Take closet point if valid_location_report_DB[1] < valid_location_DB[1]: valid_location = valid_location_report_DB else: valid_location = valid_location_DB else: # At least one result is 'None' valid_location = valid_location_DB or valid_location_report_DB print(new_location, valid_location) if valid_location: res = Calculate_Percentage.calculate_percentage( valid_location[0], i_hour_from, i_hour_to, i_date) if isinstance(res, list): collect_res.append({ "Direction": key, "Point": valid_location[0], "Result": float(res[0]) }) else: collect_res.append({ "Direction": key, "Point": valid_location[0], "Result": float(res) }) minimum_value = None print(collect_res) if collect_res: minimum_value = min(collect_res, key=lambda x: x['Result']) if minimum_value['Result'] < Constant.MINIMUM_PERCENTAGE: minimum_value['Result'] = Constant.MINIMUM_PERCENTAGE if i_result_current_location <= minimum_value['Result']: minimum_value = None return minimum_value
assert math.fabs(d * 1000.0 - p['distance']) < .1 assert math.fabs(a_initial - p['azimuth']) < 1e-6 assert math.fabs(a_final - p['reverse_azimuth']) < 1e-6 print 'direct' for i in range(1000): lat = random.uniform(-80, 80) lng = random.uniform(-180, 180) dist = random.uniform(10, 1000000) bearing = random.uniform(-180, 180) latd, lngd, az = vincenty.to_dist_bear_vincenty(lat, lng, dist / 1000.0, bearing) assert az >= 0, "az=%f" % az #print latd, lngd, az p = pygc.great_circle(latitude=lat, longitude=lng, distance=dist, azimuth=bearing) #print p az = (az + 180.0) % 360.0 assert math.fabs(latd - p['latitude']) < 1e-7 assert math.fabs(lngd - p['longitude']) < 1e-7 assert math.fabs(az - p['reverse_azimuth']) < 1e-7, "%f and %f" % ( az, p['reverse_azimuth']) print 'PASS'
def get_wio_data(): if request.method == 'GET': wio_lat = float(request.args.get('lat')) wio_lon = float(request.args.get('lon')) flag = int(request.args.get('flag')) parent_ID = request.args.get('parent_ID') buzzer_num = request.args.get('buzzer_num') if request.method == 'POST': wio_lat = float(request.form['lat']) wio_lon = float(request.form['lon']) flag = int(request.form['flag']) parent_ID = request.form['parent_ID'] buzzer_num = request.form['buzzer_num'] if wio_lat == 0: wio_lat = 31 if wio_lon == 0: wio_lon = 131 # latlonを住所変換 occur_address = cj.iktoaddress(wio_lat, wio_lon) if occur_address is None: occur_address = 'address' # 現在時刻の取得 jtz = pytz.timezone('Asia/Tokyo') nowtime = datetime.now(jtz).strftime('%Y-%m-%d %H-%M-%S') mail = My_Mail(app) db = DB() # ブザーの持ち主(保護者)のデータを取得し送信 sql = ('select parent_name,parent_mail_address from parent' ' where parent_ID=%s') pdata = db.select(sql, (parent_ID, )) # 定期通信用 if flag == 0: # 危険エリア最適化 area_flag = 0 # 危険エリアに入っているか sql = ('select occur_ID,area_concentration from Hazardous_area' ' where area_a_lat>=%s and area_b_lat<=%s and area_a_lon<=%s' ' and area_b_lon>=%s and miss_flag!=1') data = ( wio_lat, wio_lat, wio_lon, wio_lon, ) area_data = db.select(sql, data) # 危険エリアに入っていたら if area_data: for ac in area_data: acon_dict = json.loads(ac[1]) for i in range(1, 6): for j in range(1, 6): key = 'co' + str(i) + str(j) # メッシュ化したマスのどこかを調べる if acon_dict[key][0][0] <= wio_lon and \ acon_dict[key][0][1] >= wio_lon and \ acon_dict[key][1][0] >= wio_lat and \ acon_dict[key][1][1] <= wio_lat: # 濃度を減らす acon_dict[key][2] = acon_dict[key][2] - 1 area_flag = 1 # DBの濃度情報を更新 sql = ('update Hazardous_area set area_concentration=%s' ' where occur_ID=%s') data = ( json.dumps(acon_dict), ac[0], ) db.insert_update(sql, data) # 家または学校の情報をとる school_house_sql = ('select parent_lat,parent_lon,' 'school_lat,school_lon from parent' ' where parent_ID=%s') school_house_data = db.select(school_house_sql, (parent_ID, )) # 学校から現在地の距離を計算 cd = CalcDistance([[ school_house_data[0][0], school_house_data[0][1], 'name', 'address' ]]) school_dis = cd.cal_rho(wio_lat, wio_lon) # 家から現在地の距離を計算 cd = CalcDistance([[ school_house_data[0][2], school_house_data[0][3], 'name', 'address' ]]) home_dis = cd.cal_rho(wio_lat, wio_lon) if float(school_dis[0]) >= 100 or float(home_dis[0]) >= 50: if area_flag == 1: area_flag = 4 else: area_flag = 3 # 異常検知 knn = KNN2d(buzzer_num) result = knn.main(wio_lat, wio_lon) if result: # メール送信 mail.ab_send_mail(pdata, nowtime, occur_address, buzzer_num) # 異常検知用のDBに入れる sql = "insert into regular_data value (%s,%s,%s,%s)" data = ( buzzer_num, wio_lat, wio_lon, nowtime, ) db.insert_update(sql, data) db.end_DB() return str(area_flag) # ボタンが押された用 elif flag: sql = 'select * from occur' lendata = len(db.select(sql)) # 事件をoccurに保存 sql = ('insert into occur(occur_ID,parent_ID,buzzer_num,occur_lat,' 'occur_lon,occur_time,occur_address)' ' value (%s,%s,%s,%s,%s,%s,%s)') data = ( lendata + 1, parent_ID, buzzer_num, wio_lat, wio_lon, nowtime, occur_address, ) db.insert_update(sql, data) # 保存した事件のoccur_IDを取得 sql = ('select occur_ID from occur' ' where parent_ID=%s and occur_lat=%s and occur_lon=%s') data = ( parent_ID, wio_lat, wio_lon, ) occur_ID = db.select(sql, data)[-1][0] # 保護者にメール送信 mail.pbz_send_mail(pdata, nowtime, occur_address, occur_ID, parent_ID, buzzer_num, wio_lat, wio_lon) # safeguardの情報取得 sql = ('select' ' safeguard_lat,safeguard_lon,' 'safeguard_name,safeguard_mail_address' ' from safeguard') sdata = db.select(sql) cd = CalcDistance(sdata) # 発生地点から500m以内にある家のリストを取得し、送信 s_namail_list = cd.choice_senduser(cd.cal_rho(wio_lat, wio_lon)) if s_namail_list: mail.sbz_send_mail(s_namail_list, nowtime, occur_address, wio_lat, wio_lon) # 対角の座標取得(発生地点を中心) edge_ab = great_circle(distance=100 * math.sqrt(2), azimuth=[135, -45], latitude=wio_lat, longitude=wio_lon) # 濃度を保持するJsonの作成 create_json = cj.My_Json() coo_dict = create_json.concentration_json(edge_ab) # JsonをHazardous_areaに保存 sql = ('insert into Hazardous_area' '(occur_ID,area_a_lat,area_a_lon,' 'area_b_lat,area_b_lon,area_concentration)' 'value (%s,%s,%s,%s,%s,%s)') data = ( occur_ID, coo_dict['a'][0], coo_dict['a'][1], coo_dict['b'][0], coo_dict['b'][1], json.dumps(coo_dict['latlon']), ) db.insert_update(sql, data) db.end_DB() return '2' return '0'
def make_i_j_grid(self, nc): # Compute lat/lon values """ Iterates over the first row or points and calculates each column of lon, lat values. This matches the numpy axis order. upper_row (basis) o o o o o o . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . column (first iteration) o . . . . . o . . . . . o . . . . . o . . . . . o . . . . . o . . . . . column (second iteration) . o . . . . . o . . . . . o . . . . . o . . . . . o . . . . . o . . . . """ xs = np.ndarray(0) ys = np.ndarray(0) upper_row = great_circle(distance=[x*self.grid_spacing for x in range(self.size_x)], azimuth=90., longitude=self.origin_x, latitude=self.origin_y) for i, (upper_x, upper_y) in enumerate(zip(upper_row['longitude'], upper_row['latitude'])): column = great_circle(distance=[x*self.grid_spacing for x in range(self.size_y)], azimuth=180., longitude=upper_x, latitude=upper_y) xs = np.append(xs, column['longitude']) ys = np.append(ys, column['latitude']) lon_values = xs.reshape(self.size_x, self.size_y) lat_values = ys.reshape(self.size_x, self.size_y) nc.createDimension('x', self.size_x) nc.createDimension('y', self.size_y) lat = nc.createVariable('lat', 'f8', ('x', 'y',), zlib=True) lat.setncatts({ 'units' : 'degrees_north', 'standard_name' : 'latitude', 'long_name' : 'latitude', 'axis': 'Y' }) lat[:] = lat_values lon = nc.createVariable('lon', 'f8', ('x', 'y',), zlib=True) lon.setncatts({ 'units' : 'degrees_east', 'standard_name' : 'longitude', 'long_name' : 'longitude', 'axis': 'X' }) lon[:] = lon_values nc.setncatts({ 'geospatial_lat_min': lat_values.min(), 'geospatial_lat_max': lat_values.max(), 'geospatial_lon_min': lon_values.min(), 'geospatial_lon_max': lon_values.max(), }) nc.sync()
def new_point_given_distance_and_bearing(from_point, distance_NM, bearing_Deg): result = pygc.great_circle(distance=distance_NM * nm_to_metres, azimuth=bearing_Deg, \ latitude=from_point.y, longitude=from_point.x) return Point(result['longitude'], result['latitude'])
def read_write_hd5(win): import struct def trig_area_par_read(nwin_p_proc, t0, q, iproc, hd5, win, L): import json time_win = [t0, t0 + timedelta(seconds=nwin_p_proc)] qbeg = '%d%02d%02d %02d:%02d:00' % ( time_win[0].year, time_win[0].month, time_win[0].day, time_win[0].hour, time_win[0].minute) qend = '%d%02d%02d %02d:%02d:00' % ( time_win[1].year, time_win[1].month, time_win[1].day, time_win[1].hour, time_win[1].minute) store = '../%s/Mdata.h5' % (win['reg']) print(qbeg, qend) sub_l = [] offset_iproc = iproc * nbin_day * nbin_hr for j in range(nbin_day * nbin_hr): sub_l.append(manager.list(L[offset_iproc + j])) query_small = 'index>\"%s\" & index<\"%s\"' % (qbeg, qend) hdf_trig = read_hdf(hd5, key='Trigger', where=query_small) mask = ((hdf_trig.index >= time_win[0]) & (hdf_trig.index < time_win[1]) & (hdf_trig['latitude'] > win['lat'][0]) & (hdf_trig['latitude'] < win['lat'][1]) & (hdf_trig['longitude'] > win['lon'][0]) & (hdf_trig['longitude'] < win['lon'][1])) #NOTE tot = len(hdf_trig[mask].index) count = 0 for item in zip(hdf_trig[mask].index, hdf_trig[mask].tt, hdf_trig[mask].deviceid): tt = item[0] tstamp = item[1] dev = item[2] t = (tt.weekday(), tt.hour) iday = t[0] ihour = t[1] year = tt.year month = tt.month day = tt.day hour = tt.hour path = '%s/%d/%02d/%02d/%02d:00:00/%s_%s.json.gz' % ( dir_data, year, month, day, hour, dev, tstamp) try: inF = gzip.open(path, "rb") except: continue obj = json.loads(inF.read().decode('utf-8')) inF.close() #check=check_in_hd5(obj) #if check==1: # continue #arr_dayHour[iproc*(nbin_day*nbin_hr)+iday*nbin_hr+ihour]+=1 #if int(((count*100)/tot))>0 and count%100==0: #print (iproc,count,tot,int(((count*100)/tot)),'%',iy,ix) stream0 = MakeData(obj) toffset = (iday * nbin_hr + ihour) sub_l[toffset].append([stream0[0], stream0[1]]) count += 1 #NOTE for j in range(nbin_day * nbin_hr): L[offset_iproc + j] = sub_l[j] del sub_l del hdf_trig, mask print(iproc, 'end', tot) #WRITE NAME OF HD5 hd5 = "myshakeMeta.h5" if len(hd5) == 0: print('erorr: please enter path to new hd5 file... exit') exit() dir_data = "/data/sensordata/output" #poly_file='../aux/LA_metro.lonlat' #fp=open(poly_file,'r') #number of threads and components nproc = 8 ncomp = 3 #time window to process time_win = [datetime(2017, 9, 20), datetime(2017, 9, 23)] print('working on ' + '%d/%d/%d' % (time_win[0].year, time_win[0].month, time_win[0].day) + ' < time win. < ' + '%d/%d/%d' % (time_win[1].year, time_win[1].month, time_win[1].day)) #time window for each thread nsec = (time_win[1] - time_win[0]).days * 86400 ave = int(nsec / nproc) extra = nsec % nproc manager = Manager() lon_org = win['lon'][0] lat_org = win['lat'][0] xmin = 0 xmax = (vincenty((lat_org, lon_org), (lat_org, win['lon'][1])).kilometers) ymin = 0 ymax = (vincenty((lat_org, lon_org), (win['lat'][1], lon_org)).kilometers) #sub-window in spatial domain DX = 20 reg_name = win['reg'] nbinx_km = int((xmax - xmin) / DX) + 1 nbiny_km = int((ymax - ymin) / DX) + 1 nbin_day = 7 nbin_hr = 24 #ystart=25 #xstart=45 #print (nbinx_km,nbiny_km) fsize_old = 0 st = Stream() #inv = Inventory(networks=[],source="MyShake01") #net = Network(code="BM",stations=[],description="smartphone array") sign_write_file = 0 #open and close files to remove old content fdata = open('test_data.bin', 'wb') fp_mdata = open('test_data.pyc', 'wb') fdata.close() fp_mdata.close() ystart = 0 xstart = 0 for iy in range(0, nbiny_km): for ix in range(0, nbinx_km): L = manager.list([[]] * nproc * nbin_day * nbin_hr) offset = time_win[0] lon0 = great_circle(distance=ix * DX * 1e3, azimuth=90, latitude=lat_org, longitude=lon_org)['longitude'] lon1 = great_circle(distance=(ix + 1) * DX * 1e3, azimuth=90, latitude=lat_org, longitude=lon_org)['longitude'] lat0 = great_circle(distance=iy * DX * 1e3, azimuth=0, latitude=lat_org, longitude=lon_org)['latitude'] lat1 = great_circle(distance=(iy + 1) * DX * 1e3, azimuth=0, latitude=lat_org, longitude=lon_org)['latitude'] clon = great_circle(distance=DX * (ix + 0.5) * 1e3, azimuth=90, latitude=lat_org, longitude=lon_org)['longitude'] clat = great_circle(distance=DX * (iy + 0.5) * 1e3, azimuth=0, latitude=lat_org, longitude=lon_org)['latitude'] #spatial window to process win_search = { 'lon': [lon0, lon1], 'lat': [lat0, lat1], 'reg': reg_name } print(iy, ix, win, lon0, lon1, lat0, lat1) #arr_dayHour=Array(c_double , nproc*nbin_day*nbin_hr , lock=multiprocessing.Lock()) jobs = [] for iproc in range(0, nproc): if iproc < extra: nwin_p_proc = ave + 1 else: nwin_p_proc = ave q = multiprocessing.Queue() p = multiprocessing.Process(target=trig_area_par_read, args=(nwin_p_proc, offset, q, iproc, hd5, win_search, L)) jobs.append([p, q]) p.start() #print(offset,offset+timedelta(seconds=nwin_p_proc)) offset += timedelta(seconds=nwin_p_proc) #NOTE for ijob, j in enumerate(jobs): j[0].join() print('start loop on L') starttime = time.time() #NOTE Mdata = [] p = [] fdata = open('test_data.bin', 'ab') fp_mdata = open('test_data.pyc', 'ab') for icount, item in enumerate(L): for obj in item: Data = np.asarray([]) nbytes = 0 for k in range(0, 4): Data = np.concatenate((Data, obj[1][k])) #NOTE p = struct.pack(len(Data) * 'f', *(Data.astype('f4'))) fdata.write(p) nbytes = len(p) obj[0]['nbytes_offset'] = nbytes Mdata.append(obj[0]) print('end loop on L time=%s sec.' % (time.time() - starttime)) print(len(Mdata)) fdata.close() if len(Mdata) > 0: pickle.dump(Mdata, fp_mdata) fp_mdata.close() del L, Mdata, p