def regenerated_original_images(galaxy_name, run_id, galaxy_id, s3_helper, connection): """ We need to regenerate the image :param galaxy_name: :param run_id: :param galaxy_id: :return: if we succeed """ all_ok = False # Get the fits file bucket = s3_helper.get_bucket(get_saved_files_bucket()) galaxy_file_name = get_galaxy_file_name(galaxy_name, run_id, galaxy_id) key_name = '{0}/{0}.fits'.format(galaxy_name) key = bucket.get_key(key_name) if key is None: LOG.error('The fits file does not seem to exists') return all_ok path_name = get_temp_file('fits') key.get_contents_to_filename(path_name) # Now regenerate try: image = FitsImage(connection) image.build_image(path_name, galaxy_file_name, galaxy_id, get_galaxy_image_bucket()) all_ok = True except Exception: LOG.exception('Major error') all_ok = False finally: os.remove(path_name) return all_ok
def regenerated_original_images(galaxy_name, run_id, galaxy_id, s3Helper, connection): """ We need to regenerate the image :param galaxy_name: :param run_id: :param galaxy_id: :return: if we succeed """ all_ok = False # Get the fits file bucket = s3Helper.get_bucket(get_files_bucket()) galaxy_file_name = get_galaxy_file_name(galaxy_name, run_id, galaxy_id) key_name = '{0}/{0}.fits'.format(galaxy_name) key = bucket.get_key(key_name) if key is None: LOG.error('The fits file does not seem to exists') return all_ok path_name = get_temp_file('fits') key.get_contents_to_filename(path_name) # Now regenerate try: image = FitsImage(connection) image.build_image(path_name, galaxy_file_name, galaxy_id, get_galaxy_image_bucket()) all_ok = True except Exception: LOG.exception('Major error') all_ok = False finally: os.remove(path_name) return all_ok
def remove_s3_files(galaxy_name, run_id, galaxy_id): """ Remove the files from S3 :return: """ s3_helper = S3Helper() remove_files_with_key(s3_helper.get_bucket(get_galaxy_image_bucket()), galaxy_name, run_id, galaxy_id) remove_files_with_key(s3_helper.get_bucket(get_files_bucket()), galaxy_name, run_id, galaxy_id)
def migrate_files(connection): """ Migrate the various files to S3 """ LOG.info('Migrating the files') s3helper = S3Helper() migrate_image_files(connection, get_galaxy_image_bucket(), get_files_bucket(), s3helper) migrate_hdf5_files(connection, get_files_bucket(), s3helper)
def data_string(connection, user, galaxies): s3_connection = get_s3_connection() bucket = get_bucket(s3_connection, get_galaxy_image_bucket()) hasParam = 1 # Prep. data for send to docsmosis dl = [] dl.append('{\n') dl.append('"accessKey":"' + DOCMOSIS_KEY + '",\n') dl.append('"templateName":"' + DOCMOSIS_TEMPLATE + '",\n') dl.append('"outputName":"DetailedUserReport.pdf",\n') dl.append('"storeTo":"mailto:' + user.email + '",\n') dl.append('"mailSubject":"theSkyNet POGS - Detailed User Report",\n') dl.append('"data":{\n') dl.append('"user":"******",\n') dl.append('"date":"' + str(datetime.date.today()) + '",\n') dl.append('"galaxy":[\n') # Loop through galaxies user has worked on. for galaxy in galaxies: galaxy_key = get_galaxy_file_name(galaxy.name, galaxy.run_id, galaxy.galaxy_id) dl.append('{\n') dl.append('"galid":"' + galaxy.name + ' (version ' + str(galaxy.version_number) + ')",\n') dl.append('"pic1":"image:base64:' + user_galaxy_image(bucket, galaxy_key, connection, user.id, galaxy.galaxy_id, 1) + '",\n') dl.append('"pic2":"image:base64:' + user_galaxy_image(bucket, galaxy_key, connection, user.id, galaxy.galaxy_id, 2) + '",\n') dl.append('"pic3":"image:base64:' + user_galaxy_image(bucket, galaxy_key, connection, user.id, galaxy.galaxy_id, 3) + '",\n') dl.append('"pic4":"image:base64:' + user_galaxy_image(bucket, galaxy_key, connection, user.id, galaxy.galaxy_id, 4) + '",\n') dl.append('"pic1_label":"' + galaxy_filter_label(connection, galaxy.galaxy_id, 1) + '",\n') dl.append('"pic2_label":"' + galaxy_filter_label(connection, galaxy.galaxy_id, 2) + '",\n') dl.append('"pic3_label":"' + galaxy_filter_label(connection, galaxy.galaxy_id, 3) + '",\n') dl.append('"pic4_label":"' + galaxy_filter_label(connection, galaxy.galaxy_id, 4) + '",\n') # Only if there is parameter images if hasParam: dl.append('"add":"true",\n') dl.append('"pic5":"image:base64:' + galaxy_parameter_image(bucket, galaxy_key, 'mu') + '",\n') dl.append('"pic6":"image:base64:' + galaxy_parameter_image(bucket, galaxy_key, 'm') + '",\n') dl.append('"pic7":"image:base64:' + galaxy_parameter_image(bucket, galaxy_key, 'ldust') + '",\n') dl.append('"pic8":"image:base64:' + galaxy_parameter_image(bucket, galaxy_key, 'sfr') + '",\n') dl.append('"gatype":"' + galaxy.galaxy_type + '",\n') dl.append('"gars":"' + str(galaxy.redshift) + '",\n') dl.append('"gades":"' + galaxy.design + '",\n') dl.append('"gara_eqj2000":"' + str(galaxy.ra_eqj2000) + '",\n') dl.append('"gadec_eqj2000":"' + str(galaxy.dec_eqj2000) + '",\n') dl.append('"gara_eqb1950":"' + str(galaxy.ra_eqb1950) + '",\n') dl.append('"gadec_eqb1950":"' + str(galaxy.dec_eqb1950) + '",\n') dl.append('},\n') dl.append(']\n') dl.append('}\n') dl.append('}\n') data = ''.join(dl) return data
def image_files_exist(galaxy_name, run_id, galaxy_id, s3Helper): """ Check if the images exist :param galaxy_name: :param run_id: :param galaxy_id: :return: """ bucket = s3Helper.get_bucket(get_galaxy_image_bucket()) galaxy_file_name = get_galaxy_file_name(galaxy_name, run_id, galaxy_id) for image_file in IMAGE_FILES: key_name = '{0}/{1}'.format(galaxy_file_name, image_file) key = bucket.get_key(key_name) if key is None: return False # if we get here we found them all return True
def image_files_exist(galaxy_name, run_id, galaxy_id, s3_helper): """ Check if the images exist :param galaxy_name: :param run_id: :param galaxy_id: :return: """ bucket = s3_helper.get_bucket(get_galaxy_image_bucket()) galaxy_file_name = get_galaxy_file_name(galaxy_name, run_id, galaxy_id) for image_file in IMAGE_FILES: key_name = '{0}/{1}'.format(galaxy_file_name, image_file) key = bucket.get_key(key_name) if key is None: return False # if we get here we found them all return True
def build_png_image_ami(): """ Build the images :return: """ # First check the galaxy exists in the database engine = create_engine(DB_LOGIN) connection = engine.connect() try: query = select([GALAXY]).distinct().where(and_(AREA.c.galaxy_id == GALAXY.c.galaxy_id, AREA.c.update_time >= GALAXY.c.image_time)) galaxy_count = 0 s3helper = S3Helper() bucket_name = get_galaxy_image_bucket() # Start the shutdown signal poller to check when this instance must close start_poll() galaxy_list = [] for galaxy in connection.execute(query): galaxy_list.append(galaxy) total_galaxies = len(galaxy_list) processed_galaxies = 0 processed_print_point = 50 for galaxy in galaxy_list: if processed_galaxies == processed_print_point: LOG.info('{0} out of {1} galaxies processed'.format(processed_galaxies, total_galaxies)) processed_print_point += 50 processed_galaxies += 1 LOG.info('Working on galaxy %s', galaxy[GALAXY.c.name]) array = numpy.empty((galaxy[GALAXY.c.dimension_y], galaxy[GALAXY.c.dimension_x], len(PNG_IMAGE_NAMES)), dtype=numpy.float) array.fill(numpy.NaN) # Return the rows pixel_count = 0 pixels_processed = 0 for row in connection.execute(select([PIXEL_RESULT]).where((PIXEL_RESULT.c.galaxy_id == galaxy[GALAXY.c.galaxy_id]) and PIXEL_RESULT.c.x > -1)): row__x = row[PIXEL_RESULT.c.x] row__y = row[PIXEL_RESULT.c.y] pixel_count += 1 if row[PIXEL_RESULT.c.workunit_id] is not None: pixels_processed += 1 # Defend against bad values if row[PIXEL_RESULT.c.mu] is not None: array[row__y, row__x, 0] = row[PIXEL_RESULT.c.mu] if row[PIXEL_RESULT.c.m] is not None: array[row__y, row__x, 1] = row[PIXEL_RESULT.c.m] if row[PIXEL_RESULT.c.ldust] is not None: array[row__y, row__x, 2] = row[PIXEL_RESULT.c.ldust] if row[PIXEL_RESULT.c.sfr] is not None: # the SFR is a log array[row__y, row__x, 3] = math.pow(10, row[PIXEL_RESULT.c.sfr]) connection.execute(GALAXY.update() .where(GALAXY.c.galaxy_id == galaxy[GALAXY.c.galaxy_id]) .values(image_time=datetime.datetime.now(), pixel_count=pixel_count, pixels_processed=pixels_processed)) galaxy_count += 1 # Now write the files black_rgb = (0, 0, 0) for name in PNG_IMAGE_NAMES: value = 0 height = galaxy[GALAXY.c.dimension_y] width = galaxy[GALAXY.c.dimension_x] idx = 0 if name == 'mu': idx = 0 elif name == 'm': idx = 1 elif name == 'ldust': idx = 2 elif name == 'sfr': idx = 3 values = [] for x in range(0, width - 1): for y in range(0, height - 1): value = array[y, x, idx] if not math.isnan(value) and value > 0: values.append(value) values.sort() if len(values) > 1000: top_count = int(len(values) * 0.005) top_value = values[len(values) - top_count] elif len(values) > 0: top_value = values[len(values) - 1] else: top_value = 1 if len(values) > 1: median_value = values[int(len(values) / 2)] elif len(values) > 0: median_value = values[0] else: median_value = 1 sigma = 1 / median_value multiplier = 255.0 / math.asinh(top_value * sigma) image = Image.new("RGB", (width, height), black_rgb) for x in range(0, width - 1): for y in range(0, height - 1): value = array[y, x, idx] if not math.isnan(value) and value > 0: value = int(math.asinh(value * sigma) * multiplier) if value > 255: value = 255 red = FIRE_R[value] green = FIRE_G[value] blue = FIRE_B[value] image.putpixel((x, height - y - 1), (red, green, blue)) file_name = '{0}/image.png'.format(POGS_TMP) image.save(file_name) s3helper.add_file_to_bucket(bucket_name, get_build_png_name(get_galaxy_file_name(galaxy[GALAXY.c.name], galaxy[GALAXY.c.run_id], galaxy[GALAXY.c.galaxy_id]), name), file_name) if shutdown() is True: LOG.info('Spot Instance Terminate Notice received, build_png_image is shutting down') break except: LOG.exception('An exception occurred.') finally: connection.close() LOG.info('Built images for %d galaxies', galaxy_count)
for extension in ['fits', 'hdf5']: copy_files(old_name, new_name, run_id, galaxy_id, extension, bucket_files) remove_files_folder(old_name, run_id, galaxy_id, bucket_files) for file_name in [ 'colour_1.png', 'colour_2.png', 'colour_3.png', 'colour_4.png', 'ldust.png', 'm.png', 'mu.png', 'sfr.png', 'tn_colour_1.png' ]: copy_galaxy_images(old_name, new_name, run_id, galaxy_id, file_name, bucket_galaxy_image) remove_galaxy_images_folder(old_name, run_id, galaxy_id, bucket_galaxy_image) if DRY_RUN: LOG.info('Updating {0} to {1}'.format(galaxy_id, new_name)) else: connection.execute(GALAXY.update().where( GALAXY.c.galaxy_id == galaxy_id).values(name=new_name)) for galaxy in connection.execute(select([GALAXY])): s3helper = S3Helper() bucket_files = s3helper.get_bucket(get_files_bucket()) bucket_galaxy_image = s3helper.get_bucket(get_galaxy_image_bucket()) if needs_fixing(galaxy[GALAXY.c.name]): fix_galaxy(galaxy, bucket_files, bucket_galaxy_image) connection.close()
def process_file(self, registration): """ Process a registration. :param registration: """ self._filename = registration[REGISTER.c.filename] self._galaxy_name = registration[REGISTER.c.galaxy_name] self._galaxy_type = registration[REGISTER.c.galaxy_type] self._priority = registration[REGISTER.c.priority] self._redshift = registration[REGISTER.c.redshift] self._run_id = registration[REGISTER.c.run_id] self._sigma = registration[REGISTER.c.sigma] self._sigma_filename = registration[REGISTER.c.sigma_filename] # Have we files that we can use for this? self._rounded_redshift = self._get_rounded_redshift() if self._rounded_redshift is None: LOG.error('No models matching the redshift of %.4f', self._redshift) return 0 self._hdu_list = pyfits.open(self._filename, memmap=True) self._layer_count = len(self._hdu_list) # Do we need to open and sort the S/N Ratio file if self._sigma_filename is not None: self._sigma = 0.0 self._signal_noise_hdu = pyfits.open(self._sigma_filename, memmap=True) if self._layer_count != len(self._signal_noise_hdu): LOG.error('The layer counts do not match %d vs %d', self._layer_count, len(self._signal_noise_hdu)) return 0, 0 else: self._sigma = float(self._sigma) self._end_y = self._hdu_list[0].data.shape[0] self._end_x = self._hdu_list[0].data.shape[1] LOG.info("Image dimensions: %(x)d x %(y)d x %(z)d => %(pix).2f Mpixels" % {'x': self._end_x, 'y': self._end_y, 'z': self._layer_count, 'pix': self._end_x * self._end_y / 1000000.0}) # Get the flops estimate amd cobblestone factor run = self._connection.execute(select([RUN]).where(RUN.c.run_id == self._run_id)).first() self._fpops_est_per_pixel = run[RUN.c.fpops_est] self._cobblestone_scaling_factor = run[RUN.c.cobblestone_factor] # Create and save the object datetime_now = datetime.now() result = self._connection.execute(GALAXY.insert().values(name=self._galaxy_name, dimension_x=self._end_x, dimension_y=self._end_y, dimension_z=self._layer_count, redshift=self._redshift, sigma=self._sigma, create_time=datetime_now, image_time=datetime_now, galaxy_type=self._galaxy_type, ra_cent=0, dec_cent=0, pixel_count=0, pixels_processed=0, run_id=self._run_id)) self._galaxy_id = result.inserted_primary_key[0] LOG.info("Writing %s to database", self._galaxy_name) # Store the fits header self._store_fits_header() # Get the filters we're using for this run and sort the layers self._get_filters_sort_layers() # Build the template file we need if necessary self._build_template_file() # Copy the filter and model files we need self._copy_important_files() # Now break up the galaxy into chunks self._break_up_galaxy() self._connection.execute(GALAXY.update().where(GALAXY.c.galaxy_id == self._galaxy_id).values(pixel_count=self._pixel_count)) LOG.info('Building the images') galaxy_file_name = get_galaxy_file_name(self._galaxy_name, self._run_id, self._galaxy_id) s3helper = S3Helper() image = FitsImage(self._connection) image.build_image(self._filename, galaxy_file_name, self._galaxy_id, get_galaxy_image_bucket()) # Copy the fits file to S3 - renamed to make it unique bucket_name = get_files_bucket() s3helper.add_file_to_bucket(bucket_name, get_key_fits(self._galaxy_name, self._run_id, self._galaxy_id), self._filename) if self._sigma_filename is not None: s3helper.add_file_to_bucket(bucket_name, get_key_sigma_fits(self._galaxy_name, self._run_id, self._galaxy_id), self._sigma_filename) return self._work_units_added, self._pixel_count
82, 87, 91, 96, 100, 104, 108, 113, 117, 121, 125, 130, 134, 138, 143, 147, 151, 156, 160, 165, 168, 171, 175, 178, 181, 185, 188, 192, 195, 199, 202, 206, 209, 213, 216, 220, 220, 221, 222, 223, 224, 225, 226, 227, 224, 222, 220, 218, 216, 214, 212, 210, 206, 202, 199, 195, 191, 188, 184, 181, 177, 173, 169, 166, 162, 158, 154, 151, 147, 143, 140, 136, 132, 129, 125, 122, 118, 114, 111, 107, 103, 100, 96, 93, 89, 85, 82, 78, 74, 71, 67, 64, 60, 56, 53, 49, 45, 42, 38, 35, 31, 27, 23, 20, 16, 12, 8, 5, 4, 3, 3, 2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 8, 13, 17, 21, 26, 30, 35, 42, 50, 58, 66, 74, 82, 90, 98, 105, 113, 121, 129, 136, 144, 152, 160, 167, 175, 183, 191, 199, 207, 215, 223, 227, 231, 235, 239, 243, 247, 251, 255, 255, 255, 255, 255, 255, 255, 255] fits_image = fitsimage.FitsImage(connection) galaxy_count = 0 s3helper = S3Helper() bucket_name = get_galaxy_image_bucket() for galaxy in connection.execute(query): LOG.info('Working on galaxy %s', galaxy[GALAXY.c.name]) array = numpy.empty((galaxy[GALAXY.c.dimension_y], galaxy[GALAXY.c.dimension_x], len(PNG_IMAGE_NAMES)), dtype=numpy.float) array.fill(numpy.NaN) # Return the rows pixel_count = 0 pixels_processed = 0 for row in connection.execute(select([PIXEL_RESULT]).where(PIXEL_RESULT.c.galaxy_id == galaxy[GALAXY.c.galaxy_id])): row__x = row[PIXEL_RESULT.c.x] row__y = row[PIXEL_RESULT.c.y] pixel_count += 1 if row[PIXEL_RESULT.c.mu] is not None and row[PIXEL_RESULT.c.m] is not None and row[PIXEL_RESULT.c.ldust] is not None and row[PIXEL_RESULT.c.sfr] is not None: pixels_processed += 1
def data_string(connection, user, galaxies): s3_connection = get_s3_connection() bucket = get_bucket(s3_connection, get_galaxy_image_bucket()) hasParam = 1 # Prep. data for send to docsmosis dl = [] dl.append('{\n') dl.append('"accessKey":"' + DOCMOSIS_KEY + '",\n') dl.append('"templateName":"' + DOCMOSIS_TEMPLATE + '",\n') dl.append('"outputName":"DetailedUserReport.pdf",\n') dl.append('"storeTo":"mailto:' + user.email + '",\n') dl.append('"mailSubject":"theSkyNet POGS - Detailed User Report",\n') dl.append('"data":{\n') dl.append('"user":"******",\n') dl.append('"date":"' + str(datetime.date.today()) + '",\n') dl.append('"galaxy":[\n') # Loop through galaxies user has worked on. for galaxy in galaxies: galaxy_key = get_galaxy_file_name(galaxy.name, galaxy.run_id, galaxy.galaxy_id) dl.append('{\n') dl.append('"galid":"' + galaxy.name + ' (version ' + str(galaxy.version_number) + ')",\n') dl.append('"pic1":"image:base64:' + user_galaxy_image( bucket, galaxy_key, connection, user.id, galaxy.galaxy_id, 1) + '",\n') dl.append('"pic2":"image:base64:' + user_galaxy_image( bucket, galaxy_key, connection, user.id, galaxy.galaxy_id, 2) + '",\n') dl.append('"pic3":"image:base64:' + user_galaxy_image( bucket, galaxy_key, connection, user.id, galaxy.galaxy_id, 3) + '",\n') dl.append('"pic4":"image:base64:' + user_galaxy_image( bucket, galaxy_key, connection, user.id, galaxy.galaxy_id, 4) + '",\n') dl.append('"pic1_label":"' + galaxy_filter_label(connection, galaxy.galaxy_id, 1) + '",\n') dl.append('"pic2_label":"' + galaxy_filter_label(connection, galaxy.galaxy_id, 2) + '",\n') dl.append('"pic3_label":"' + galaxy_filter_label(connection, galaxy.galaxy_id, 3) + '",\n') dl.append('"pic4_label":"' + galaxy_filter_label(connection, galaxy.galaxy_id, 4) + '",\n') # Only if there is parameter images if hasParam: dl.append('"add":"true",\n') dl.append('"pic5":"image:base64:' + galaxy_parameter_image(bucket, galaxy_key, 'mu') + '",\n') dl.append('"pic6":"image:base64:' + galaxy_parameter_image(bucket, galaxy_key, 'm') + '",\n') dl.append('"pic7":"image:base64:' + galaxy_parameter_image(bucket, galaxy_key, 'ldust') + '",\n') dl.append('"pic8":"image:base64:' + galaxy_parameter_image(bucket, galaxy_key, 'sfr') + '",\n') dl.append('"gatype":"' + galaxy.galaxy_type + '",\n') dl.append('"gars":"' + str(galaxy.redshift) + '",\n') dl.append('"gades":"' + galaxy.design + '",\n') dl.append('"gara_eqj2000":"' + str(galaxy.ra_eqj2000) + '",\n') dl.append('"gadec_eqj2000":"' + str(galaxy.dec_eqj2000) + '",\n') dl.append('"gara_eqb1950":"' + str(galaxy.ra_eqb1950) + '",\n') dl.append('"gadec_eqb1950":"' + str(galaxy.dec_eqb1950) + '",\n') dl.append('},\n') dl.append(']\n') dl.append('}\n') dl.append('}\n') data = ''.join(dl) return data
:return: """ old_name = galaxy[GALAXY.c.name] new_name = old_name[:-1] galaxy_id = galaxy[GALAXY.c.galaxy_id] run_id = galaxy[GALAXY.c.run_id] LOG.info('Fixing {0}({1}) t0 {2}'.format(old_name, galaxy_id, new_name)) for extension in ['fits', 'hdf5']: copy_files(old_name, new_name, run_id, galaxy_id, extension, bucket_files) remove_files_folder(old_name, run_id, galaxy_id, bucket_files) for file_name in ['colour_1.png', 'colour_2.png', 'colour_3.png', 'colour_4.png', 'ldust.png', 'm.png', 'mu.png', 'sfr.png', 'tn_colour_1.png']: copy_galaxy_images(old_name, new_name, run_id, galaxy_id, file_name, bucket_galaxy_image) remove_galaxy_images_folder(old_name, run_id, galaxy_id, bucket_galaxy_image) if DRY_RUN: LOG.info('Updating {0} to {1}'.format(galaxy_id, new_name)) else: connection.execute(GALAXY.update().where(GALAXY.c.galaxy_id == galaxy_id).values(name=new_name)) for galaxy in connection.execute(select([GALAXY])): s3helper = S3Helper() bucket_files = s3helper.get_bucket(get_files_bucket()) bucket_galaxy_image = s3helper.get_bucket(get_galaxy_image_bucket()) if needs_fixing(galaxy[GALAXY.c.name]): fix_galaxy(galaxy, bucket_files, bucket_galaxy_image) connection.close()
210, 206, 202, 199, 195, 191, 188, 184, 181, 177, 173, 169, 166, 162, 158, 154, 151, 147, 143, 140, 136, 132, 129, 125, 122, 118, 114, 111, 107, 103, 100, 96, 93, 89, 85, 82, 78, 74, 71, 67, 64, 60, 56, 53, 49, 45, 42, 38, 35, 31, 27, 23, 20, 16, 12, 8, 5, 4, 3, 3, 2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 8, 13, 17, 21, 26, 30, 35, 42, 50, 58, 66, 74, 82, 90, 98, 105, 113, 121, 129, 136, 144, 152, 160, 167, 175, 183, 191, 199, 207, 215, 223, 227, 231, 235, 239, 243, 247, 251, 255, 255, 255, 255, 255, 255, 255, 255 ] fits_image = fitsimage.FitsImage(connection) galaxy_count = 0 s3helper = S3Helper() bucket_name = get_galaxy_image_bucket() for galaxy in connection.execute(query): LOG.info('Working on galaxy %s', galaxy[GALAXY.c.name]) array = numpy.empty( (galaxy[GALAXY.c.dimension_y], galaxy[GALAXY.c.dimension_x], len(PNG_IMAGE_NAMES)), dtype=numpy.float) array.fill(numpy.NaN) # Return the rows pixel_count = 0 pixels_processed = 0 for row in connection.execute( select([ PIXEL_RESULT ]).where(PIXEL_RESULT.c.galaxy_id == galaxy[GALAXY.c.galaxy_id])):