def retrieve(self, filetype, run, camcol, field=None, band=None, skipExisting=True, tempsuffix='.tmp', rerun=None): outfn = self.getPath(filetype, run, camcol, field, band, rerun=rerun) print('Checking for file', outfn) if outfn is None: return None if skipExisting and os.path.exists(outfn): #print('Exists') return outfn outdir = os.path.dirname(outfn) if not os.path.exists(outdir): try: os.makedirs(outdir) except: pass url = self.get_url(filetype, run, camcol, field, band=band, rerun=rerun) #print 'Did not find file:', outfn print('Retrieving from URL:', url) if self.curl: cmd = "curl -o '%(outfn)s' '%(url)s'" else: cmd = "wget --continue -nv -O %(outfn)s '%(url)s'" # suffix to add to the downloaded filename suff = self.dassuffix.get(filetype, '') oo = outfn + suff if tempsuffix is not None: oo += tempsuffix cmd = cmd % dict(outfn=oo, url=url) self.logger.debug('cmd: %s' % cmd) (rtn,out,err) = run_command(cmd) if rtn: print('Command failed: command', cmd) print('Output:', out) print('Error:', err) print('Return val:', rtn) return None if tempsuffix is not None: # self.logger.debug('Renaming %s to %s' % (oo, outfn+suff)) os.rename(oo, outfn + suff) if filetype in self.processcmds: cmd = self.processcmds[filetype] cmd = cmd % dict(input = outfn + suff, output = outfn) self.logger.debug('cmd: %s' % cmd) (rtn,out,err) = run_command(cmd) if rtn: print('Command failed: command', cmd) print('Output:', out) print('Error:', err) print('Return val:', rtn) return None return outfn
def new_fits_file(req, jobid=None): job = get_object_or_404(Job, pk=jobid) wcsfn = job.get_wcs_file() img = job.user_image.image df = img.disk_file infn = df.get_path() if df.is_fits_image(): fitsinfn = infn else: ## FIXME -- could convert other formats to FITS... pnmfn = get_temp_file() fitsinfn = get_temp_file() cmd = 'image2pnm.py -i %s -o %s && an-pnmtofits %s > %s' % (infn, pnmfn, pnmfn, fitsinfn) logmsg('Running: ' + cmd) (rtn, out, err) = run_command(cmd) if rtn: logmsg('out: ' + out) logmsg('err: ' + err) return HttpResponse('image2pnm.py failed: out ' + out + ', err ' + err) outfn = get_temp_file() cmd = 'new-wcs -i %s -w %s -o %s -d' % (fitsinfn, wcsfn, outfn) logmsg('Running: ' + cmd) (rtn, out, err) = run_command(cmd) if rtn: logmsg('out: ' + out) logmsg('err: ' + err) return HttpResponse('plot failed: out ' + out + ', err ' + err) res = HttpResponse(open(outfn)) res['Content-Type'] = 'application/fits' res['Content-Length'] = file_size(outfn) res['Content-Disposition'] = 'attachment; filename=new-image.fits' return res
def new_fits_file(req, jobid=None): job = get_object_or_404(Job, pk=jobid) wcsfn = job.get_wcs_file() img = job.user_image.image df = img.disk_file infn = df.get_path() if df.is_fits_image(): fitsinfn = infn else: ## FIXME -- could convert other formats to FITS... pnmfn = get_temp_file() fitsinfn = get_temp_file() cmd = 'image2pnm.py -i %s -o %s && an-pnmtofits %s > %s' % ( infn, pnmfn, pnmfn, fitsinfn) logmsg('Running: ' + cmd) (rtn, out, err) = run_command(cmd) if rtn: logmsg('out: ' + out) logmsg('err: ' + err) return HttpResponse('image2pnm.py failed: out ' + out + ', err ' + err) outfn = get_temp_file() cmd = 'new-wcs -i %s -w %s -o %s -d' % (fitsinfn, wcsfn, outfn) logmsg('Running: ' + cmd) (rtn, out, err) = run_command(cmd) if rtn: logmsg('out: ' + out) logmsg('err: ' + err) return HttpResponse('plot failed: out ' + out + ', err ' + err) res = HttpResponse(open(outfn)) res['Content-Type'] = 'application/fits' res['Content-Length'] = file_size(outfn) res['Content-Disposition'] = 'attachment; filename=new-image.fits' return res
def retrieve(self, filetype, run, camcol, field=None, band=None, skipExisting=True, tempsuffix='.tmp'): outfn = self.getPath(filetype, run, camcol, field, band) if outfn is None: return None if skipExisting and os.path.exists(outfn): return outfn print 'Did not find file:', outfn url = self.get_url(filetype, run, camcol, field, band=band) #print 'URL:', url if self.curl: cmd = "curl -o '%(outfn)s' '%(url)s'" else: cmd = "wget --continue -nv -O %(outfn)s '%(url)s'" # suffix to add to the downloaded filename suff = self.dassuffix.get(filetype, '') oo = outfn + suff if tempsuffix is not None: oo += tempsuffix cmd = cmd % dict(outfn=oo, url=url) self.logger.debug('cmd: %s' % cmd) (rtn, out, err) = run_command(cmd) if rtn: print 'Command failed: command', cmd print 'Output:', out print 'Error:', err print 'Return val:', rtn return None if tempsuffix is not None: # os.rename(oo, outfn + suff) if filetype in self.processcmds: cmd = self.processcmds[filetype] cmd = cmd % dict(input=outfn + suff, output=outfn) self.logger.debug('cmd: %s' % cmd) (rtn, out, err) = run_command(cmd) if rtn: print 'Command failed: command', cmd print 'Output:', out print 'Error:', err print 'Return val:', rtn return None return outfn
def retrieve(self, filetype, run, camcol, field=None, band=None, skipExisting=True, tempsuffix='.tmp'): outfn = self.getPath(filetype, run, camcol, field, band) if outfn is None: return None if skipExisting and os.path.exists(outfn): return outfn url = self.get_url(filetype, run, camcol, field, band=band) #print 'URL:', url if self.curl: cmd = "curl -o '%(outfn)s' '%(url)s'" else: cmd = "wget --continue -nv -O %(outfn)s '%(url)s'" # suffix to add to the downloaded filename suff = self.dassuffix.get(filetype, '') oo = outfn + suff if tempsuffix is not None: oo += tempsuffix cmd = cmd % dict(outfn=oo, url=url) self.logger.debug('cmd: %s' % cmd) (rtn,out,err) = run_command(cmd) if rtn: print 'Command failed: command', cmd print 'Output:', out print 'Error:', err print 'Return val:', rtn return None if tempsuffix is not None: # os.rename(oo, outfn + suff) if filetype in self.processcmds: cmd = self.processcmds[filetype] cmd = cmd % dict(input = outfn + suff, output = outfn) self.logger.debug('cmd: %s' % cmd) (rtn,out,err) = run_command(cmd) if rtn: print 'Command failed: command', cmd print 'Output:', out print 'Error:', err print 'Return val:', rtn return None return outfn
def filetype(fn): filecmd = 'file -b -N -L -k %s' cmd = filecmd % shell_escape(fn) (rtn,out,err) = run_command(cmd) if rtn: logverb('"file" command failed. Command: "%s"' % cmd) logverb(' ', out) logverb(' ', err) return None out = out.strip() logverb('File: "%s"' % out) lst = [] # The "file -r" flag, removed in some Ubuntu versions, used to # tell it not to convert non-printable characters to octal. Without -r, # some versions print the string r'\012- ' instead of "\n- ". Do that # manually here. out = out.replace(r'\012- ', '\n- ') for line in out.split('\n- '): if line.endswith('\n-'): line = line[:-2] if len(line) == 0: continue p = line.split(', ', 1) if len(p) == 2: lst.append(tuple(p)) else: lst.append((p[0], '')) return lst
def get_git_version(dir=None): ''' Runs 'git describe' in the current directory (or given dir) and returns the result as a string. Parameters ---------- dir : string If non-None, "cd" to the given directory before running 'git describe' Returns ------- Git version string ''' from astrometry.util.run_command import run_command cmd = '' if dir is not None: cmd = "cd '%s' && " % dir cmd += 'git describe' rtn,version,err = run_command(cmd) if rtn: raise RuntimeError('Failed to get version string (%s): ' % cmd + version + err) version = version.strip() return version
def filetype(fn): filecmd = 'file -b -N -L -k %s' cmd = filecmd % shell_escape(fn) (rtn, out, err) = run_command(cmd) if rtn: logverb('"file" command failed. Command: "%s"' % cmd) logverb(' ', out) logverb(' ', err) return None out = out.strip() logverb('File: "%s"' % out) lst = [] # The "file -r" flag, removed in some Ubuntu versions, used to # tell it not to convert non-printable characters to octal. Without -r, # some versions print the string r'\012- ' instead of "\n- ". Do that # manually here. out = out.replace(r'\012- ', '\n- ') for line in out.split('\n- '): if line.endswith('\n-'): line = line[:-2] if len(line) == 0: continue p = line.split(', ', 1) if len(p) == 2: lst.append(tuple(p)) else: lst.append((p[0], '')) return lst
def sdss_das_get(filetype, outfn, run, camcol, field, band=None, reruns=None, suffix=None, gunzip=True, curl=False, ): if reruns is None: reruns = [40,41,42,44] urls = [] for rerun in reruns: url = sdss_das_get_url(filetype, run, camcol, field, rerun, band, suffix=suffix) if url is None: return False urls.append(url) if suffix is None: suffix = sdss_das_get_suffix(filetype) if outfn: outfn = outfn % { 'run':run, 'camcol':camcol, 'field':field, 'band':band } + suffix else: outfn = sdss_filename(filetype, run, camcol, field, band) + suffix if not get_urls(urls, outfn, curl): return False if suffix == '.gz' and gunzip: print 'gzipped file; outfn=', outfn gzipfn = outfn outfn = gzipfn.replace('.gz', '') if os.path.exists(gzipfn): cmd = 'gunzip -cd %s > %s' % (gzipfn, outfn) print 'Running:', cmd (rtn, out, err) = run_command(cmd) if rtn: print 'Command failed: command', cmd print 'Output:', out print 'Error:', err print 'Return val:', rtn return outfn
def render(self, f): if hasattr(self, 'sourcelist'): # image is a source list self.sourcelist.render(f) else: if self.disk_file.is_fits_image(): # convert fits image to jpg for browser rendering key = 'jpg_image%i' % self.id df = CachedFile.get(key) if df is None: imagefn = get_temp_file() pnmfn = self.get_pnm_path() cmd = 'pnmtojpeg < %s > %s' % (pnmfn, imagefn) logmsg("Making resized image: %s" % cmd) rtn, out, err = run_command(cmd) if rtn: logmsg('pnmtojpeg failed: rtn %i' % rtn) logmsg('out: ' + out) logmsg('err: ' + err) raise RuntimeError( 'Failed to make jpg image for %s: pnmtojpeg: %s' % (str(self), err)) # cache logmsg('Caching key "%s"' % key) df = CachedFile.add(key, imagefn) else: logmsg('Cache hit for key "%s"' % key) else: df = self.disk_file dfile = open(df.get_path()) f.write(dfile.read()) dfile.close()
def render(self, f): if hasattr(self, 'sourcelist'): # image is a source list self.sourcelist.render(f) else: if self.disk_file.is_fits_image(): # convert fits image to jpg for browser rendering key = 'jpg_image%i' % self.id df = CachedFile.get(key) if df is None: imagefn = get_temp_file() pnmfn = self.get_pnm_path() cmd = 'pnmtojpeg < %s > %s' % (pnmfn, imagefn) logmsg("Making resized image: %s" % cmd) rtn,out,err = run_command(cmd) if rtn: logmsg('pnmtojpeg failed: rtn %i' % rtn) logmsg('out: ' + out) logmsg('err: ' + err) raise RuntimeError('Failed to make jpg image for %s: pnmtojpeg: %s' % (str(self), err)) # cache logmsg('Caching key "%s"' % key) df = CachedFile.add(key, imagefn) else: logmsg('Cache hit for key "%s"' % key) else: df = self.disk_file dfile = open(df.get_path()) f.write(dfile.read()) dfile.close()
def filetype(fn): filecmd = 'file -b -N -L -k -r %s' cmd = filecmd % shell_escape(fn) (rtn, out, err) = run_command(cmd) if rtn: logverb('"file" command failed. Command: "%s"' % cmd) logverb(' ', out) logverb(' ', err) return None out = out.strip() logverb('File: "%s"' % out) lst = [] for line in out.split('\n- '): if line.endswith('\n-'): line = line[:-2] if len(line) == 0: continue p = line.split(', ', 1) if len(p) == 2: lst.append(tuple(p)) else: lst.append((p[0], '')) return lst
def filetype(fn): filecmd = 'file -b -N -L -k -r %s' cmd = filecmd % shell_escape(fn) (rtn,out,err) = run_command(cmd) if rtn: logverb('"file" command failed. Command: "%s"' % cmd) logverb(' ', out) logverb(' ', err) return None out = out.strip() logverb('File: "%s"' % out) lst = [] for line in out.split('\n- '): if line.endswith('\n-'): line = line[:-2] if len(line) == 0: continue p = line.split(', ', 1) if len(p) == 2: lst.append(tuple(p)) else: lst.append((p[0], '')) return lst
def create_resized_image(self, maxsize): if max(self.width, self.height) <= maxsize: return self pnmfn = self.get_pnm_path() imagefn = get_temp_file() # find scale scale = float(maxsize) / float(max(self.width, self.height)) W, H = int(round(scale * self.width)), int(round(scale * self.height)) cmd = 'pnmscale -width %i -height %i %s | pnmtojpeg > %s' % ( W, H, pnmfn, imagefn) logmsg("Making resized image: %s" % cmd) rtn, out, err = run_command(cmd) if rtn: logmsg('pnmscale failed: rtn %i' % rtn) logmsg('out: ' + out) logmsg('err: ' + err) raise RuntimeError( 'Failed to make resized image for %s: pnmscale: %s' % (str(self), err)) df = DiskFile.from_file(imagefn, Image.RESIZED_COLLECTION) #logmsg('Resized disk file:', df) try: image, created = Image.objects.get_or_create(disk_file=df, width=W, height=H) #if created: # logmsg('Created:', created) except Image.MultipleObjectsReturned: image = Image.objects.filter(disk_file=df, width=W, height=H) image = image[0] return image
def run_convert_command(cmd, deleteonfail=None): logmsg('Command: ' + cmd) (rtn, stdout, stderr) = run_command(cmd) if rtn: errmsg = 'Command failed: ' + cmd + ': ' + stderr logmsg(errmsg + '; rtn val %d' % rtn) logmsg('out: ' + stdout) logmsg('err: ' + stderr) if deleteonfail: os.unlink(deleteonfail) raise FileConversionError(errmsg)
def run_convert_command(cmd, deleteonfail=None): logmsg('Command: ' + cmd) (rtn, stdout, stderr) = run_command(cmd) if rtn: errmsg = 'Command failed: ' + cmd + ': ' + stderr logmsg(errmsg + '; rtn val %d' % rtn) logmsg('out: ' + stdout); logmsg('err: ' + stderr); if deleteonfail: os.unlink(deleteonfail) raise FileConversionError(errmsg)
def _unzip_frame(self, fn, run, camcol): if self.readBz2: return None, True # No, PJM reported that pyfits failed on SDSS frame*.bz2 files # if not fitsio: # # pyfits can read .bz2 # return None,True tempfn = None keep = False filetype = 'frame' if not (filetype in self.postprocesscmds and fn.endswith('.bz2')): return None, True cmd = self.postprocesscmds[filetype] if self.unzip_dir is not None: udir = os.path.join(self.unzip_dir, '%i' % run, '%i' % camcol) if not os.path.exists(udir): try: os.makedirs(udir) except: pass tempfn = os.path.join(udir, os.path.basename(fn).replace('.bz2', '')) #print 'Checking', tempfn if os.path.exists(tempfn): print 'File exists:', tempfn return tempfn, True else: print 'Saving to', tempfn keep = True else: fid, tempfn = tempfile.mkstemp() os.close(fid) cmd = cmd % dict(input=fn, output=tempfn) self.logger.debug('cmd: %s' % cmd) print 'command:', cmd (rtn, out, err) = run_command(cmd) if rtn: print 'Command failed: command', cmd print 'Output:', out print 'Error:', err print 'Return val:', rtn raise RuntimeError('Command failed (return val %i): %s' % (rtn, cmd)) print out print err return tempfn, keep
def _unzip_frame(self, fn, run, camcol): if self.readBz2: return None,True # No, PJM reported that pyfits failed on SDSS frame*.bz2 files # if not fitsio: # # pyfits can read .bz2 # return None,True tempfn = None keep = False filetype = 'frame' if not(filetype in self.postprocesscmds and fn.endswith('.bz2')): return None,True cmd = self.postprocesscmds[filetype] if self.unzip_dir is not None: udir = os.path.join(self.unzip_dir, '%i' % run, '%i' % camcol) if not os.path.exists(udir): try: os.makedirs(udir) except: pass tempfn = os.path.join(udir, os.path.basename(fn).replace('.bz2', '')) #print 'Checking', tempfn if os.path.exists(tempfn): print 'File exists:', tempfn return tempfn,True else: print 'Saving to', tempfn keep = True else: fid,tempfn = tempfile.mkstemp() os.close(fid) cmd = cmd % dict(input = fn, output = tempfn) self.logger.debug('cmd: %s' % cmd) print 'command:', cmd (rtn,out,err) = run_command(cmd) if rtn: print 'Command failed: command', cmd print 'Output:', out print 'Error:', err print 'Return val:', rtn raise RuntimeError('Command failed (return val %i): %s' % (rtn, cmd)) print out print err return tempfn,keep
def get_urls(urls, outfn): for url in urls: cmd = 'wget --continue -nv ' if outfn: cmd += '-O %s ' % outfn cmd += '\"%s\"' % url print 'Running:', cmd (rtn, out, err) = run_command(cmd) if rtn == 0: return True if rtn: print 'Command failed: command', cmd print 'Output:', out print 'Error:', err print 'Return val:', rtn return False
def sdss_das_get(filetype, outfn, run, camcol, field, band=None, reruns=None, suffix=None, gunzip=True): if suffix is None: suffix = sdss_das_get_suffix(filetype) if reruns is None: reruns = [40, 41, 42, 44] urls = [] for rerun in reruns: path = sdss_path(filetype, run, camcol, field, band, rerun) if path is None: print 'Unknown SDSS filetype', filetype return False urls.append('http://das.sdss.org/imaging/' + path + suffix) if outfn: outfn = outfn % { 'run': run, 'camcol': camcol, 'field': field, 'band': band } else: outfn = sdss_filename(filetype, run, camcol, field, band) + suffix if not get_urls(urls, outfn): return False if suffix == '.gz' and gunzip: print 'gzipped file; outfn=', outfn gzipfn = outfn outfn = gzipfn.replace('.gz', '') if os.path.exists(gzipfn): cmd = 'gunzip -cd %s > %s' % (gzipfn, outfn) print 'Running:', cmd (rtn, out, err) = run_command(cmd) if rtn == 0: return True if rtn: print 'Command failed: command', cmd print 'Output:', out print 'Error:', err print 'Return val:', rtn
def kml_file(req, jobid=None): #return HttpResponse('KMZ requests are off for now. Post at https://groups.google.com/forum/#!forum/astrometry for help.') import tempfile import PIL.Image job = get_object_or_404(Job, pk=jobid) wcsfn = job.get_wcs_file() img = job.user_image.image df = img.disk_file pnmfn = img.get_pnm_path(tempfiles=req.tempfiles) imgfn = get_temp_file(tempfiles=req.tempfiles) image = PIL.Image.open(pnmfn) image.save(imgfn, 'PNG') dirnm = tempfile.mkdtemp() req.tempdirs.append(dirnm) warpedimgfn = 'image.png' kmlfn = 'doc.kml' outfn = get_temp_file(tempfiles=req.tempfiles) cmd = ('cd %(dirnm)s' '; %(wcs2kml)s ' '--input_image_origin_is_upper_left ' '--fitsfile=%(wcsfn)s ' '--imagefile=%(imgfn)s ' '--kmlfile=%(kmlfn)s ' '--outfile=%(warpedimgfn)s ' '; zip -j - %(warpedimgfn)s %(kmlfn)s > %(outfn)s ' % dict(dirnm=dirnm, wcsfn=wcsfn, imgfn=imgfn, kmlfn=kmlfn, wcs2kml=settings.WCS2KML, warpedimgfn=warpedimgfn, outfn=outfn)) logmsg('Running: ' + cmd) (rtn, out, err) = run_command(cmd) if rtn: logmsg('out: ' + out) logmsg('err: ' + err) return HttpResponse('kml generation failed: ' + err) res = HttpResponse(open(outfn, 'rb')) res['Content-Type'] = 'application/x-zip-compressed' res['Content-Length'] = file_size(outfn) res['Content-Disposition'] = 'attachment; filename=image.kmz' return res
def create_resized_image(self, maxsize): if max(self.width, self.height) <= maxsize: return self pnmfn = self.get_pnm_path() imagefn = get_temp_file() # find scale scale = float(maxsize) / float(max(self.width, self.height)) W,H = int(round(scale * self.width)), int(round(scale * self.height)) cmd = 'pnmscale -width %i -height %i %s | pnmtojpeg > %s' % (W, H, pnmfn, imagefn) logmsg("Making resized image: %s" % cmd) rtn,out,err = run_command(cmd) if rtn: logmsg('pnmscale failed: rtn %i' % rtn) logmsg('out: ' + out) logmsg('err: ' + err) raise RuntimeError('Failed to make resized image for %s: pnmscale: %s' % (str(self), err)) df = DiskFile.from_file(imagefn, Image.RESIZED_COLLECTION) image, created = Image.objects.get_or_create(disk_file=df, width=W, height=H) return image
def kml_file(req, jobid=None): job = get_object_or_404(Job, pk=jobid) wcsfn = job.get_wcs_file() img = job.user_image.image df = img.disk_file pnmfn = img.get_pnm_path() imgfn = get_temp_file() image = PIL.Image.open(pnmfn) image.save(imgfn, 'PNG') dirnm = tempfile.mkdtemp() warpedimgfn = 'image.png' kmlfn = 'doc.kml' outfn = get_temp_file() cmd = ('cd %(dirnm)s' '; /usr/local/wcs2kml/bin/wcs2kml ' '--input_image_origin_is_upper_left ' '--fitsfile=%(wcsfn)s ' '--imagefile=%(imgfn)s ' '--kmlfile=%(kmlfn)s ' '--outfile=%(warpedimgfn)s ' '; zip -j - %(warpedimgfn)s %(kmlfn)s > %(outfn)s ' % dict(dirnm=dirnm, wcsfn=wcsfn, imgfn=imgfn, kmlfn=kmlfn, warpedimgfn=warpedimgfn, outfn=outfn)) logmsg('Running: ' + cmd) (rtn, out, err) = run_command(cmd) if rtn: logmsg('out: ' + out) logmsg('err: ' + err) return HttpResponse('kml generation failed: ' + err) res = HttpResponse(open(outfn)) res['Content-Type'] = 'application/x-zip-compressed' res['Content-Length'] = file_size(outfn) res['Content-Disposition'] = 'attachment; filename=image.kmz' return res
def filetype(fn): filecmd = 'file -b -N -L -k -r %s' cmd = filecmd % shell_escape(fn) (rtn, out, err) = run_command(cmd) if rtn: logverb('"file" command failed. Command: "%s"' % cmd) logverb(' ', out) logverb(' ', err) return None out = out.strip() logverb('File: "%s"' % out) parts = [line.split(', ', 1) for line in out.split('\n- ')] lst = [] for p in parts: if len(p) == 2: lst.append(tuple(p)) else: lst.append((p[0], '')) return lst
def kml_file(req, jobid=None): import PIL.Image job = get_object_or_404(Job, pk=jobid) wcsfn = job.get_wcs_file() img = job.user_image.image df = img.disk_file pnmfn = img.get_pnm_path() imgfn = get_temp_file() image = PIL.Image.open(pnmfn) image.save(imgfn, 'PNG') dirnm = tempfile.mkdtemp() warpedimgfn = 'image.png' kmlfn = 'doc.kml' outfn = get_temp_file() cmd = ('cd %(dirnm)s' '; %(wcs2kml)s ' '--input_image_origin_is_upper_left ' '--fitsfile=%(wcsfn)s ' '--imagefile=%(imgfn)s ' '--kmlfile=%(kmlfn)s ' '--outfile=%(warpedimgfn)s ' '; zip -j - %(warpedimgfn)s %(kmlfn)s > %(outfn)s ' % dict(dirnm=dirnm, wcsfn=wcsfn, imgfn=imgfn, kmlfn=kmlfn, wcs2kml=settings.WCS2KML, warpedimgfn=warpedimgfn, outfn=outfn)) logmsg('Running: ' + cmd) (rtn, out, err) = run_command(cmd) if rtn: logmsg('out: ' + out) logmsg('err: ' + err) return HttpResponse('kml generation failed: ' + err) res = HttpResponse(open(outfn)) res['Content-Type'] = 'application/x-zip-compressed' res['Content-Length'] = file_size(outfn) res['Content-Disposition'] = 'attachment; filename=image.kmz' return res
def get_tarball_files(fn, tempdirs=None): # create temp dir to extract tarfile. tempdir = tempfile.mkdtemp() if tempdirs is not None: tempdirs.append(tempdir) cmd = 'tar xvf %s -C %s' % (fn, tempdir) print('Extracting tarball: %s' % cmd) (rtn, out, err) = run_command(cmd) if rtn: print('Failed to un-tar file:\n' + err) #bailout(submission, 'failed to extract tar file') print('failed to extract tar file') return None fns = out.strip('\n').split('\n') validpaths = [] for fn in fns: path = os.path.join(tempdir, fn) logmsg('Path "%s"' % path) if not os.path.exists(path): logmsg('Path "%s" does not exist.' % path) continue if os.path.islink(path): logmsg('Path "%s" is a symlink.' % path) continue if os.path.isfile(path): validpaths.append(path) else: logmsg('Path "%s" is not a file.' % path) if len(validpaths) == 0: #userlog('Tar file contains no regular files.') #bailout(submission, "tar file contains no regular files.") #return -1 logmsg('No real files in tar file') return None logmsg('Got %i paths.' % len(validpaths)) return validpaths
def get_tarball_files(fn): # create temp dir to extract tarfile. tempdir = tempfile.mkdtemp() cmd = 'tar xvf %s -C %s' % (fn, tempdir) #userlog('Extracting tarball...') (rtn, out, err) = run_command(cmd) if rtn: #userlog('Failed to un-tar file:\n' + err) #bailout(submission, 'failed to extract tar file') print 'failed to extract tar file' return None fns = out.strip('\n').split('\n') validpaths = [] for fn in fns: path = os.path.join(tempdir, fn) logmsg('Path "%s"' % path) if not os.path.exists(path): logmsg('Path "%s" does not exist.' % path) continue if os.path.islink(path): logmsg('Path "%s" is a symlink.' % path) continue if os.path.isfile(path): validpaths.append(path) else: logmsg('Path "%s" is not a file.' % path) if len(validpaths) == 0: #userlog('Tar file contains no regular files.') #bailout(submission, "tar file contains no regular files.") #return -1 logmsg('No real files in tar file') return None logmsg('Got %i paths.' % len(validpaths)) return validpaths
def get_urls(urls, outfn, curl=False): for url in urls: if curl: cmd = 'curl ' if outfn: cmd += '-o %s ' % outfn else: cmd += '-O ' else: cmd = 'wget --continue -nv ' if outfn: cmd += '-O %s ' % outfn cmd += '\"%s\"' % url print 'Running:', cmd (rtn, out, err) = run_command(cmd) if rtn == 0: return True if rtn: print 'Command failed: command', cmd print 'Output:', out print 'Error:', err print 'Return val:', rtn return False
def get_urls(urls, outfn, curl=False): for url in urls: if curl: cmd = 'curl ' if outfn: cmd += '-o %s ' % outfn else: cmd += '-O ' else: cmd = 'wget --continue -nv ' if outfn: cmd += '-O %s ' % outfn cmd += '\"%s\"' % url print('Running:', cmd) (rtn, out, err) = run_command(cmd) if rtn == 0: return True if rtn: print('Command failed: command', cmd) print('Output:', out) print('Error:', err) print('Return val:', rtn) return False
def read_photoobjs(sdss, wcs, margin, cols=None, pa=None, wfn='window_flist.fits'): ''' Read photoObjs that are inside the given 'wcs', plus 'margin' in degrees. If 'pa' is not None, assume it is a PrimaryArea object. ''' log = logging.getLogger('read_photoobjs') #wfn = os.path.join(resolvedir, 'window_flist.fits') ra,dec = wcs.radec_center() rad = wcs.radius() rad += np.hypot(13., 9.) / 2 / 60. # a little extra margin rad += margin print 'Searching for run,camcol,fields with radius', rad, 'deg' RCF = radec_to_sdss_rcf(ra, dec, radius=rad*60., tablefn=wfn) log.debug('Found %i fields possibly in range' % len(RCF)) RCF = [(run,camcol,field) for (run,camcol,field,r,d) in RCF] pixmargin = margin * 3600. / wcs.pixel_scale() W,H = wcs.get_width(), wcs.get_height() RR = '301' RCF = [(run,camcol,field) for (run,camcol,field) in RCF if (sdss.get_rerun(run, field=field) == RR)] log.debug('Found %i fields with rerun = %s' % (len(RCF), RR)) if pa is not None: rr = RR RCF = [(run,camcol,field) for (run,camcol,field) in RCF if (pa.get(rr, run, camcol, field) > 0)] log.debug('Found %i fields with positive primaryArea' % (len(RCF))) TT = [] for run,camcol,field in RCF: log.debug('RCF %i/%i/%i' % (run, camcol, field)) rr = sdss.get_rerun(run, field=field) fn = get_photoobj_filename(rr, run, camcol, field) if not os.path.exists(fn): url = sdss.get_url('photoObj', run, camcol, field) cmd = "wget --continue -nv -O %(outfn)s '%(url)s'" cmd = cmd % dict(outfn=fn, url=url) dirnm = os.path.dirname(fn) print 'Directory:', dirnm if not os.path.exists(dirnm): print 'Creating', dirnm try: os.makedirs(dirnm) except: pass log.debug('Retrieving photoObj from %s to %s' % (url, fn)) (rtn,out,err) = run_command(cmd) if rtn: print 'Command failed: command', cmd print 'Output:', out print 'Error:', err print 'Return val:', rtn return None T = fits_table(fn, columns=cols) if T is None: log.debug('read 0 from %s' % fn) continue log.debug('read %i from %s' % (len(T), fn)) # while we're reading it, record its length for later... #get_photoobj_length(rr, run, camcol, field) ok,x,y = wcs.radec2pixelxy(T.ra, T.dec) x -= 1 y -= 1 T.cut((x > -pixmargin) * (x < (W + pixmargin)) * (y > -pixmargin) * (y < (H + pixmargin)) * (T.resolve_status & 256) > 0) log.debug('cut to %i within target area and PRIMARY.' % len(T)) if len(T) == 0: continue TT.append(T) if not len(TT): return None T = merge_tables(TT) return T
def read_photoobjs(sdss, wcs, margin, cols=None, pa=None, wfn='window_flist.fits'): ''' Read photoObjs that are inside the given 'wcs', plus 'margin' in degrees. If 'pa' is not None, assume it is a PrimaryArea object. ''' log = logging.getLogger('read_photoobjs') #wfn = os.path.join(resolvedir, 'window_flist.fits') ra, dec = wcs.radec_center() rad = wcs.radius() rad += np.hypot(13., 9.) / 2 / 60. # a little extra margin rad += margin print 'Searching for run,camcol,fields with radius', rad, 'deg' RCF = radec_to_sdss_rcf(ra, dec, radius=rad * 60., tablefn=wfn) log.debug('Found %i fields possibly in range' % len(RCF)) RCF = [(run, camcol, field) for (run, camcol, field, r, d) in RCF] pixmargin = margin * 3600. / wcs.pixel_scale() W, H = wcs.get_width(), wcs.get_height() RR = '301' RCF = [(run, camcol, field) for (run, camcol, field) in RCF if (sdss.get_rerun(run, field=field) == RR)] log.debug('Found %i fields with rerun = %s' % (len(RCF), RR)) if pa is not None: rr = RR RCF = [(run, camcol, field) for (run, camcol, field) in RCF if (pa.get(rr, run, camcol, field) > 0)] log.debug('Found %i fields with positive primaryArea' % (len(RCF))) TT = [] for run, camcol, field in RCF: log.debug('RCF %i/%i/%i' % (run, camcol, field)) rr = sdss.get_rerun(run, field=field) fn = get_photoobj_filename(rr, run, camcol, field) if not os.path.exists(fn): url = sdss.get_url('photoObj', run, camcol, field) cmd = "wget --continue -nv -O %(outfn)s '%(url)s'" cmd = cmd % dict(outfn=fn, url=url) dirnm = os.path.dirname(fn) print 'Directory:', dirnm if not os.path.exists(dirnm): print 'Creating', dirnm try: os.makedirs(dirnm) except: pass log.debug('Retrieving photoObj from %s to %s' % (url, fn)) (rtn, out, err) = run_command(cmd) if rtn: print 'Command failed: command', cmd print 'Output:', out print 'Error:', err print 'Return val:', rtn return None T = fits_table(fn, columns=cols) if T is None: log.debug('read 0 from %s' % fn) continue log.debug('read %i from %s' % (len(T), fn)) # while we're reading it, record its length for later... #get_photoobj_length(rr, run, camcol, field) ok, x, y = wcs.radec2pixelxy(T.ra, T.dec) x -= 1 y -= 1 T.cut((x > -pixmargin) * (x < (W + pixmargin)) * (y > -pixmargin) * (y < (H + pixmargin)) * (T.resolve_status & 256) > 0) log.debug('cut to %i within target area and PRIMARY.' % len(T)) if len(T) == 0: continue TT.append(T) if not len(TT): return None T = merge_tables(TT) return T
cmd = 'augment-xylist ' def join(terms): return ' '.join(terms) cmd = 'augment-xylist {}'.format( join([ join([' '.join([str(y) for y in x]) for x in axyargs.iteritems()]), join(axyflags) ])) logger.info('running: ' + cmd) xylist_start = time.time() (rtn, out, err) = run_command(cmd, tee=True) xylist_end = time.time() if rtn: logger.info('out: ' + out) logger.info('err: ' + err) logger.critical('augment-xylist failed: rtn val: {} err: {}'.format( rtn, err)) sys.exit(-1) logger.info('created axy file: {} in {:0.2f} seconds'.format( axypath, xylist_end - xylist_start)) cmd = 'astrometry-engine {}'.format(axypath) astrometry_engine_start = time.time() (rtn, out, err) = run_command(cmd, tee=True) astrometry_engine_end = time.time() if rtn:
def run_ps_thread(pid, ppid, fn): from astrometry.util.run_command import run_command import time import re import fitsio #print('run_ps_thread starting:', pid, ppid, fn) #print('My pid:', os.getpid()) TT = [] step = 0 trex = re.compile('(((?P<days>\d*)-)?(?P<hours>\d*):)?(?P<minutes>\d*):(?P<seconds>[\d\.]*)') def parse_time_strings(ss): etime = [] any_failed = None for s in ss: m = trex.match(s) if m is None: any_failed = s break days,hours,mins,secs = m.group('days', 'hours', 'minutes', 'seconds') # print('Elapsed time', s, 'parsed to', days,hours,mins,secs) days = int(days, 10) if days is not None else 0 hours = int(hours, 10) if hours is not None else 0 mins = int(mins, 10) if secs.startswith('0'): secs = secs[1:] secs = float(secs) tt = days * 24 * 3600 + hours * 3600 + mins * 60 + secs #print('->', tt, 'seconds') etime.append(tt) return any_failed, etime fitshdr = fitsio.FITSHDR() fitshdr['PPID'] = pid while True: time.sleep(5) step += 1 #cmd = ('ps ax -o "user pcpu pmem state cputime etime pgid pid ppid ' + # 'psr rss session vsize args"') # OSX-compatible cmd = ('ps ax -o "user pcpu pmem state cputime etime pgid pid ppid ' + 'rss vsize command"') #print('Command:', cmd) rtn,out,err = run_command(cmd) if rtn: print('FAILED to run ps:', rtn, out, err) time.sleep(1) break #print('Got PS output') #print(out) #print('Err') #print(err) if len(err): print('Error string from ps:', err) lines = out.split('\n') hdr = lines.pop(0) cols = hdr.split() cols = [c.replace('%','P') for c in cols] cols = [c.lower() for c in cols] #print('Columns:', cols) vals = [[] for c in cols] # maximum length for 'command', command-line args field maxlen = 128 for line in lines: words = line.split() # "command" column can contain spaces; it is last if len(words) == 0: continue words = (words[:len(cols)-1] + [' '.join(words[len(cols)-1:])[:maxlen]]) assert(len(words) == len(cols)) for v,w in zip(vals, words): v.append(w) parsetypes = dict(pcpu = np.float32, pmem = np.float32, pgid = np.int32, pid = np.int32, ppid = np.int32, rs = np.float32, vsz = np.float32, ) T = fits_table() for c,v in zip(cols, vals): # print('Col', c, 'Values:', v[:3], '...') v = np.array(v) tt = parsetypes.get(c, None) if tt is not None: v = v.astype(tt) T.set(c, v) # Apply cuts! T.cut(reduce(np.logical_or, [ T.pcpu > 5, T.pmem > 5, (T.ppid == pid) * [not c.startswith('ps ax') for c in T.command]])) #print('Cut to', len(T), 'with significant CPU/MEM use or my PPID') if len(T) == 0: continue T.unixtime = np.zeros(len(T), np.float64) + time.time() T.step = np.zeros(len(T), np.int16) + step any_failed,etime = parse_time_strings(T.elapsed) if any_failed is not None: print('Failed to parse elapsed time string:', any_failed) else: T.elapsed = np.array(etime) any_failed,ctime = parse_time_strings(T.time) if any_failed is not None: print('Failed to parse elapsed time string:', any_failed) else: T.time = np.array(ctime) T.rename('time', 'cputime') TT.append(T) if step % 12 == 0: # Write out results every ~ minute. T = merge_tables(TT, columns='fillzero') tmpfn = os.path.join(os.path.dirname(fn), 'tmp-' + os.path.basename(fn)) T.writeto(tmpfn, header=fitshdr) os.rename(tmpfn, fn) print('Wrote', fn) TT = [T]
def run_ps_thread(parent_pid, parent_ppid, fn, shutdown, event_queue): from astrometry.util.run_command import run_command from astrometry.util.fits import fits_table, merge_tables import time import re import fitsio from functools import reduce # my pid = parent pid -- this is a thread. print('run_ps_thread starting: parent PID', parent_pid, ', my PID', os.getpid(), fn) TT = [] step = 0 events = [] trex = re.compile( '(((?P<days>\d*)-)?(?P<hours>\d*):)?(?P<minutes>\d*):(?P<seconds>[\d\.]*)' ) def parse_time_strings(ss): etime = [] any_failed = None for s in ss: m = trex.match(s) if m is None: any_failed = s break days, hours, mins, secs = m.group('days', 'hours', 'minutes', 'seconds') #print('Elapsed time', s, 'parsed to', days,hours,mins,secs) days = int(days, 10) if days is not None else 0 hours = int(hours, 10) if hours is not None else 0 mins = int(mins, 10) if secs.startswith('0'): secs = secs[1:] secs = float(secs) tt = days * 24 * 3600 + hours * 3600 + mins * 60 + secs #print('->', tt, 'seconds') etime.append(tt) return any_failed, etime def write_results(fn, T, events, hdr): T.mine = np.logical_or(T.pid == parent_pid, T.ppid == parent_pid) T.main = (T.pid == parent_pid) tmpfn = os.path.join(os.path.dirname(fn), 'tmp-' + os.path.basename(fn)) T.writeto(tmpfn, header=hdr) if len(events): E = fits_table() E.unixtime = np.array([e[0] for e in events]) E.event = np.array([e[1] for e in events]) E.step = np.array([e[2] for e in events]) E.writeto(tmpfn, append=True) os.rename(tmpfn, fn) print('Wrote', fn) fitshdr = fitsio.FITSHDR() fitshdr['PPID'] = parent_pid last_time = {} last_proc_time = {} clock_ticks = os.sysconf('SC_CLK_TCK') #print('Clock times:', clock_ticks) if clock_ticks == -1: #print('Failed to get clock times per second; assuming 100') clock_ticks = 100 while True: shutdown.wait(5.0) if shutdown.is_set(): print('ps shutdown flag set. Quitting.') break if event_queue is not None: while True: try: (t, msg) = event_queue.popleft() events.append((t, msg, step)) #print('Popped event', t,msg) except IndexError: # no events break step += 1 #cmd = ('ps ax -o "user pcpu pmem state cputime etime pgid pid ppid ' + # 'psr rss session vsize args"') # OSX-compatible cmd = ('ps ax -o "user pcpu pmem state cputime etime pgid pid ppid ' + 'rss vsize wchan command"') #print('Command:', cmd) rtn, out, err = run_command(cmd) if rtn: print('FAILED to run ps:', rtn, out, err) time.sleep(1) break # print('Got PS output') # print(out) # print('Err') # print(err) if len(err): print('Error string from ps:', err) lines = out.split('\n') hdr = lines.pop(0) cols = hdr.split() cols = [c.replace('%', 'P') for c in cols] cols = [c.lower() for c in cols] #print('Columns:', cols) vals = [[] for c in cols] # maximum length for 'command', command-line args field maxlen = 128 for line in lines: words = line.split() # "command" column can contain spaces; it is last if len(words) == 0: continue words = (words[:len(cols) - 1] + [' '.join(words[len(cols) - 1:])[:maxlen]]) assert (len(words) == len(cols)) for v, w in zip(vals, words): v.append(w) parsetypes = dict( pcpu=np.float32, pmem=np.float32, pgid=np.int32, pid=np.int32, ppid=np.int32, rs=np.float32, vsz=np.float32, ) T = fits_table() for c, v in zip(cols, vals): # print('Col', c, 'Values:', v[:3], '...') v = np.array(v) tt = parsetypes.get(c, None) if tt is not None: v = v.astype(tt) T.set(c, v) any_failed, etime = parse_time_strings(T.elapsed) if any_failed is not None: print('Failed to parse elapsed time string:', any_failed) else: T.elapsed = np.array(etime) any_failed, ctime = parse_time_strings(T.time) if any_failed is not None: print('Failed to parse elapsed time string:', any_failed) else: T.time = np.array(ctime) T.rename('time', 'cputime') # Compute 'instantaneous' (5-sec averaged) %cpu # BUT this only counts whole seconds in the 'ps' output. T.icpu = np.zeros(len(T), np.float32) icpu = T.icpu for i, (p, etime, ctime) in enumerate(zip(T.pid, T.elapsed, T.cputime)): try: elast, clast = last_time[p] # new process with an existing PID? if etime > elast: icpu[i] = 100. * (ctime - clast) / (etime - elast) except: pass last_time[p] = (etime, ctime) # print('Processes:') # J = np.argsort(-T.icpu) # for j in J: # p = T.pid[j] # pp = T.ppid[j] # print(' PID', p, '(main)' if p == parent_pid else '', # '(worker)' if pp == parent_pid else '', # 'pcpu', T.pcpu[j], 'pmem', T.pmem[j], 'icpu', T.icpu[j], # T.command[j][:20]) # Apply cuts! T.cut( reduce(np.logical_or, [ T.pcpu > 5, T.pmem > 5, T.icpu > 5, T.pid == parent_pid, (T.ppid == parent_pid) * np.array([not c.startswith('ps ax') for c in T.command]) ])) #print('Cut to', len(T), 'with significant CPU/MEM use or my PPID') # print('Kept:') # J = np.argsort(-T.icpu) # for j in J: # p = T.pid[j] # pp = T.ppid[j] # print(' PID', p, '(main)' if p == parent_pid else '', # '(worker)' if pp == parent_pid else '', # 'pcpu', T.pcpu[j], 'pmem', T.pmem[j], 'icpu', T.icpu[j], # T.command[j][:20]) if len(T) == 0: continue timenow = time.time() T.unixtime = np.zeros(len(T), np.float64) + timenow T.step = np.zeros(len(T), np.int16) + step if os.path.exists('/proc'): # Try to grab higher-precision CPU timing info from /proc/PID/stat T.proc_utime = np.zeros(len(T), np.float32) T.proc_stime = np.zeros(len(T), np.float32) T.processor = np.zeros(len(T), np.int16) T.proc_icpu = np.zeros(len(T), np.float32) for i, p in enumerate(T.pid): try: # See: # http://man7.org/linux/man-pages/man5/proc.5.html procfn = '/proc/%i/stat' % p txt = open(procfn).read() #print('Read', procfn, ':', txt) words = txt.split() utime = int(words[13]) / float(clock_ticks) stime = int(words[14]) / float(clock_ticks) proc = int(words[38]) #print('utime', utime, 'stime', stime, 'processor', proc) ctime = utime + stime try: tlast, clast = last_proc_time[p] #print('pid', p, 'Tnow,Cnow', timenow, ctime, 'Tlast,Clast', tlast,clast) if ctime >= clast: T.proc_icpu[i] = 100. * ( ctime - clast) / float(timenow - tlast) except: pass last_proc_time[p] = (timenow, ctime) T.proc_utime[i] = utime T.proc_stime[i] = stime T.processor[i] = proc except: pass TT.append(T) #print('ps -- step', step) if (step % 12 == 0) and len(TT) > 0: # Write out results every ~ minute. print('ps -- writing', fn) T = merge_tables(TT, columns='fillzero') write_results(fn, T, events, fitshdr) TT = [T] # Just before returning, write out results. if len(TT) > 0: print('ps -- writing', fn) T = merge_tables(TT, columns='fillzero') write_results(fn, T, events, fitshdr)
def annotated_image(req, jobid=None, size='full'): job = get_object_or_404(Job, pk=jobid) ui = job.user_image img = ui.image if size == 'display': scale = float(img.get_display_image().width) / img.width img = img.get_display_image() else: scale = 1.0 wcsfn = job.get_wcs_file() pnmfn = img.get_pnm_path() annfn = get_temp_file() #datadir = os.path.join(os.path.dirname(os.path.dirname(settings.WEB_DIR)), 'data') catdir = settings.CAT_DIR uzcfn = os.path.join(catdir, 'uzc2000.fits') abellfn = os.path.join(catdir, 'abell-all.fits') #hdfn = os.path.join(os.path.dirname(os.path.dirname(settings.WEB_DIR)), #'net', 'hd.fits') hdfn = settings.HENRY_DRAPER_CAT tycho2fn = settings.TYCHO2_KD rad = job.calibration.get_radius() #logmsg('pnm file: %s' % pnmfn) args = [ 'plotann.py --no-grid --toy -10', '--scale %s' % (str(scale)), ] #if rad < 10.: if rad < 1.: args.extend([ #'--uzccat %s' % uzcfn, '--abellcat %s' % abellfn, '--hdcat %s' % hdfn ]) if rad < 0.25: args.append('--tycho2cat %s' % tycho2fn) #if rad > 20: if rad > 10: args.append('--no-ngc') if rad > 30: args.append('--no-bright') cmd = ' '.join(args + ['%s %s %s' % (wcsfn, pnmfn, annfn)]) #cmd = 'plot-constellations -w %s -i %s -o %s -s %s -N -C -B -c' % (wcsfn, pnmfn, annfn, str(scale)) import sys # (rtn,out,err) = run_command('which plotann.py; echo pyp $PYTHONPATH; echo path $PATH; echo llp $LD_LIBRARY_PATH; echo "set"; set') # return HttpResponse('which: ' + out + err + '<br>sys.path<br>' + '<br>'.join(sys.path) + # "<br>PATH " + os.environ['PATH'] + # "<br>LLP " + os.environ['LD_LIBRARY_PATH'] + # "<br>sys.path " + ':'.join(sys.path) + # "<br>cmd " + cmd) os.environ['PYTHONPATH'] = ':'.join(sys.path) logmsg('Running: ' + cmd) #logmsg('PYTHONPATH: ' + os.environ['PYTHONPATH']) #logmsg('PATH: ' + os.environ['PATH']) #(rtn,out,err) = run_command('which plotann.py') #logmsg('which plotann.py: ' + out) (rtn, out, err) = run_command(cmd) if rtn: logmsg('out: ' + out) logmsg('err: ' + err) return HttpResponse('plot failed: ' + err + "<br><pre>" + out + "</pre><br><pre>" + err + "</pre>") f = open(annfn) res = HttpResponse(f) #res['Content-Type'] = 'image/png' # plotann.py produces jpeg by default res['Content-Type'] = 'image/jpeg' return res
def readFrame(self, run, camcol, field, band, filename=None): ''' http://data.sdss3.org/datamodel/files/BOSS_PHOTOOBJ/frames/RERUN/RUN/CAMCOL/frame.html ''' f = Frame(run, camcol, field, band) # ... if filename is None: fn = self.getPath('frame', run, camcol, field, band) else: fn = filename #print 'reading file', fn if fitsio: print 'Frame filename', fn # eg /clusterfs/riemann/raid006/dr10/boss/photoObj/frames/301/2825/1/frame-u-002825-1-0126.fits.bz2 tempfn = None keep = False cmd = None # bunzip2 filetype = 'frame' if filetype in self.processcmds: cmd = self.processcmds[filetype] if cmd is not None and self.unzip_dir is not None: udir = os.path.join(self.unzip_dir, '%i' % run, '%i' % camcol) if not os.path.exists(udir): try: os.makedirs(udir) except: pass tempfn = os.path.join(udir, os.path.basename(fn).replace('.bz2', '')) #print 'Checking', tempfn if os.path.exists(tempfn): print 'File exists:', tempfn fn = tempfn cmd = None else: print 'Saving to', tempfn keep = True elif cmd is not None and self.unzip_dir is None: fid, tempfn = tempfile.mkstemp() os.close(fid) if cmd is not None: cmd = cmd % dict(input=fn, output=tempfn) self.logger.debug('cmd: %s' % cmd) (rtn, out, err) = run_command(cmd) if rtn: print 'Command failed: command', cmd print 'Output:', out print 'Error:', err print 'Return val:', rtn return None fn = tempfn #f.image, f.header = fitsio.read(fn, header=True) #print 'Reading header...' f.header = fitsio.read_header(fn, 0) #print 'Reading image HDU...' # Allow later reading of just the ROI slice... f.image_proxy = fitsio.FITS(fn)[0] f.calib = fitsio.read(fn, ext=1) sky = fitsio.read(fn, ext=2, columns=['allsky', 'xinterp', 'yinterp']) #print 'sky', type(sky) # ... supposed to be a recarray, but it's not... f.sky, f.skyxi, f.skyyi = sky.tolist()[0] tab = fits_table(fn, hdu=3) if not keep and tempfn is not None: os.remove(tempfn) else: p = pyfits.open(fn) # in nanomaggies f.image = p[0].data f.header = p[0].header # converts counts -> nanomaggies f.calib = p[1].data # table with val,x,y -- binned; use bilinear interpolation to expand sky = p[2].data # table -- asTrans structure tab = fits_table(p[3].data) f.sky = sky.field('allsky')[0] f.skyxi = sky.field('xinterp')[0] f.skyyi = sky.field('yinterp')[0] #print 'sky shape', f.sky.shape if len(f.sky.shape) != 2: f.sky = f.sky.reshape((-1, 256)) assert (len(tab) == 1) tab = tab[0] # DR7 has NODE, INCL in radians... f.astrans = AsTrans(run, camcol, field, band, node=np.deg2rad(tab.node), incl=np.deg2rad(tab.incl), astrans=tab, cut_to_band=False) return f
def dojob(job, userimage, log=None, solve_command=None, solve_locally=None, tempfiles=None): print('dojob: tempdir:', tempfile.gettempdir()) jobdir = job.make_dir() #print('Created job dir', jobdir) #log = create_job_logger(job) #jobdir = job.get_dir() if log is None: log = create_job_logger(job) log.msg('Starting Job processing for', job) job.set_start_time() job.save() #os.chdir(dirnm) - not thread safe (working directory is global)! log.msg('Creating directory', jobdir) axyfn = 'job.axy' axypath = os.path.join(jobdir, axyfn) sub = userimage.submission log.msg('submission id', sub.id) df = userimage.image.disk_file img = userimage.image # Build command-line arguments for the augment-xylist program, which # detects sources in the image and adds processing arguments to the header # to produce a "job.axy" file. slo, shi = sub.get_scale_bounds() # Note, this must match Job.get_wcs_file(). wcsfile = 'wcs.fits' corrfile = 'corr.fits' axyflags = [] axyargs = { '--out': axypath, '--scale-low': slo, '--scale-high': shi, '--scale-units': sub.scale_units, '--wcs': wcsfile, '--corr': corrfile, '--rdls': 'rdls.fits', '--pixel-error': sub.positional_error, '--ra': sub.center_ra, '--dec': sub.center_dec, '--radius': sub.radius, '--downsample': sub.downsample_factor, # tuning-up maybe fixed; if not, turn it off with: #'--odds-to-tune': 1e9, # Other things we might want include... # --invert # -g / --guess-scale: try to guess the image scale from the FITS headers # --crpix-x <pix>: set the WCS reference point to the given position # --crpix-y <pix>: set the WCS reference point to the given position # -w / --width <pixels>: specify the field width # -e / --height <pixels>: specify the field height # -X / --x-column <column-name>: the FITS column name # -Y / --y-column <column-name> } if hasattr(img, 'sourcelist'): # image is a source list; use --xylist axyargs['--xylist'] = img.sourcelist.get_fits_path(tempfiles=tempfiles) w, h = img.width, img.height if sub.image_width: w = sub.image_width if sub.image_height: h = sub.image_height axyargs['--width'] = w axyargs['--height'] = h else: axyargs['--image'] = df.get_path() # UGLY if sub.parity == 0: axyargs['--parity'] = 'pos' elif sub.parity == 1: axyargs['--parity'] = 'neg' if sub.tweak_order == 0: axyflags.append('--no-tweak') else: axyargs['--tweak-order'] = '%i' % sub.tweak_order if sub.use_sextractor: axyflags.append('--use-source-extractor') if sub.crpix_center: axyflags.append('--crpix-center') if sub.invert: axyflags.append('--invert') cmd = 'augment-xylist ' for (k, v) in list(axyargs.items()): if v: cmd += k + ' ' + str(v) + ' ' for k in axyflags: cmd += k + ' ' log.msg('running: ' + cmd) (rtn, out, err) = run_command(cmd) if rtn: log.msg('out: ' + out) log.msg('err: ' + err) logmsg('augment-xylist failed: rtn val', rtn, 'err', err) raise Exception log.msg('created axy file', axypath) # shell into compute server... logfn = job.get_log_file() # the "tar" commands both use "-C" to chdir, and the ssh command # and redirect uses absolute paths. if solve_locally is not None: cmd = (('cd %(jobdir)s && %(solvecmd)s %(jobid)s %(axyfile)s >> ' + '%(logfile)s') % dict(jobid='job-%s-%i' % (settings.sitename, job.id), solvecmd=solve_locally, axyfile=axyfn, jobdir=jobdir, logfile=logfn)) log.msg('command:', cmd) w = os.system(cmd) if not os.WIFEXITED(w): log.msg('Solver failed (sent signal?)') logmsg('Call to solver failed for job', job.id) raise Exception rtn = os.WEXITSTATUS(w) if rtn: log.msg('Solver failed with return value %i' % rtn) logmsg('Call to solver failed for job', job.id, 'with return val', rtn) raise Exception log.msg('Solver completed successfully.') else: if solve_command is None: solve_command = 'ssh -x -T %(sshconfig)s' cmd = (( '(echo %(jobid)s; ' 'tar cf - --ignore-failed-read -C %(jobdir)s %(axyfile)s) | ' + solve_command + ' 2>>%(logfile)s | ' 'tar xf - --atime-preserve -m --exclude=%(axyfile)s -C %(jobdir)s ' '>>%(logfile)s 2>&1') % dict(jobid='job-%s-%i' % (settings.sitename, job.id), axyfile=axyfn, jobdir=jobdir, sshconfig=settings.ssh_solver_config, logfile=logfn)) log.msg('command:', cmd) w = os.system(cmd) if not os.WIFEXITED(w): log.msg('Solver failed (sent signal?)') logmsg('Call to solver failed for job', job.id) raise Exception rtn = os.WEXITSTATUS(w) if rtn: log.msg('Solver failed with return value %i' % rtn) logmsg('Call to solver failed for job', job.id, 'with return val', rtn) raise Exception log.msg('Solver completed successfully.') # Solved? wcsfn = os.path.join(jobdir, wcsfile) log.msg('Checking for WCS file', wcsfn) if os.path.exists(wcsfn): log.msg('WCS file exists') # Parse the wcs.fits file wcs = Tan(wcsfn, 0) # Convert to database model... tan = TanWCS(crval1=wcs.crval[0], crval2=wcs.crval[1], crpix1=wcs.crpix[0], crpix2=wcs.crpix[1], cd11=wcs.cd[0], cd12=wcs.cd[1], cd21=wcs.cd[2], cd22=wcs.cd[3], imagew=img.width, imageh=img.height) tan.save() log.msg('Created TanWCS:', tan) # Find field's healpix nside and index ra, dec, radius = tan.get_center_radecradius() nside = anutil.healpix_nside_for_side_length_arcmin(radius * 60) nside = int(2**round(math.log(nside, 2))) nside = max(1, nside) healpix = anutil.radecdegtohealpix(ra, dec, nside) try: sky_location, created = SkyLocation.objects.get_or_create( nside=nside, healpix=healpix) except MultipleObjectsReturned: log.msg('Multiple SkyLocations for nside %i, healpix %i' % (nside, healpix)) # arbitrarily take the first one. sky_location = SkyLocation.objects.filter(nside=nside, healpix=healpix)[0] log.msg('SkyLocation:', sky_location) # Find bounds for the Calibration object. r0, r1, d0, d1 = wcs.radec_bounds() # Find cartesian coordinates ra *= math.pi / 180 dec *= math.pi / 180 tempr = math.cos(dec) x = tempr * math.cos(ra) y = tempr * math.sin(ra) z = math.sin(dec) r = radius / 180 * math.pi calib = Calibration(raw_tan=tan, ramin=r0, ramax=r1, decmin=d0, decmax=d1, x=x, y=y, z=z, r=r, sky_location=sky_location) calib.save() log.msg('Created Calibration', calib) job.calibration = calib job.save() # save calib before adding machine tags job.status = 'S' job.user_image.add_machine_tags(job) job.user_image.add_sky_objects(job) else: job.status = 'F' job.set_end_time() job.save() log.msg('Finished job', job.id) logmsg('Finished job', job.id) return job.id
def annotated_image(req, jobid=None, size='full'): job = get_object_or_404(Job, pk=jobid) ui = job.user_image img = ui.image if size == 'display': scale = float(img.get_display_image().width)/img.width img = img.get_display_image() else: scale = 1.0 wcsfn = job.get_wcs_file() pnmfn = img.get_pnm_path() annfn = get_temp_file() #datadir = os.path.join(os.path.dirname(os.path.dirname(settings.WEB_DIR)), 'data') catdir = settings.CAT_DIR uzcfn = os.path.join(catdir, 'uzc2000.fits') abellfn = os.path.join(catdir, 'abell-all.fits') #hdfn = os.path.join(os.path.dirname(os.path.dirname(settings.WEB_DIR)), #'net', 'hd.fits') hdfn = settings.HENRY_DRAPER_CAT tycho2fn = settings.TYCHO2_KD rad = job.calibration.get_radius() #logmsg('pnm file: %s' % pnmfn) args = ['plotann.py --no-grid --toy -10', '--scale %s' % (str(scale)),] #if rad < 10.: if rad < 1.: args.extend([#'--uzccat %s' % uzcfn, '--abellcat %s' % abellfn, '--hdcat %s' % hdfn ]) if rad < 0.25: args.append('--tycho2cat %s' % tycho2fn) #if rad > 20: if rad > 10: args.append('--no-ngc') if rad > 30: args.append('--no-bright') cmd = ' '.join(args + ['%s %s %s' % (wcsfn, pnmfn, annfn)]) #cmd = 'plot-constellations -w %s -i %s -o %s -s %s -N -C -B -c' % (wcsfn, pnmfn, annfn, str(scale)) import sys # (rtn,out,err) = run_command('which plotann.py; echo pyp $PYTHONPATH; echo path $PATH; echo llp $LD_LIBRARY_PATH; echo "set"; set') # return HttpResponse('which: ' + out + err + '<br>sys.path<br>' + '<br>'.join(sys.path) + # "<br>PATH " + os.environ['PATH'] + # "<br>LLP " + os.environ['LD_LIBRARY_PATH'] + # "<br>sys.path " + ':'.join(sys.path) + # "<br>cmd " + cmd) os.environ['PYTHONPATH'] = ':'.join(sys.path) logmsg('Running: ' + cmd) #logmsg('PYTHONPATH: ' + os.environ['PYTHONPATH']) #logmsg('PATH: ' + os.environ['PATH']) #(rtn,out,err) = run_command('which plotann.py') #logmsg('which plotann.py: ' + out) (rtn, out, err) = run_command(cmd) if rtn: logmsg('out: ' + out) logmsg('err: ' + err) return HttpResponse('plot failed: ' + err + "<br><pre>" + out + "</pre><br><pre>" + err + "</pre>") f = open(annfn) res = HttpResponse(f) #res['Content-Type'] = 'image/png' # plotann.py produces jpeg by default res['Content-Type'] = 'image/jpeg' return res
def dojob(job, userimage, log=None): jobdir = job.make_dir() #print 'Created job dir', jobdir #log = create_job_logger(job) #jobdir = job.get_dir() if log is None: log = create_job_logger(job) log.msg('Starting Job processing for', job) job.set_start_time() job.save() #os.chdir(dirnm) - not thread safe (working directory is global)! log.msg('Creating directory', jobdir) axyfn = 'job.axy' axypath = os.path.join(jobdir, axyfn) sub = userimage.submission log.msg('submission id', sub.id) df = userimage.image.disk_file img = userimage.image # Build command-line arguments for the augment-xylist program, which # detects sources in the image and adds processing arguments to the header # to produce a "job.axy" file. slo,shi = sub.get_scale_bounds() # Note, this must match Job.get_wcs_file(). wcsfile = 'wcs.fits' axyflags = [] axyargs = { '--out': axypath, '--scale-low': slo, '--scale-high': shi, '--scale-units': sub.scale_units, '--wcs': wcsfile, '--rdls': 'rdls.fits', '--pixel-error': sub.positional_error, '--ra': sub.center_ra, '--dec': sub.center_dec, '--radius': sub.radius, '--downsample': sub.downsample_factor, # tuning-up maybe fixed; if not, turn it off with: #'--odds-to-tune': 1e9, # Other things we might want include... # --invert # -g / --guess-scale: try to guess the image scale from the FITS headers # --crpix-x <pix>: set the WCS reference point to the given position # --crpix-y <pix>: set the WCS reference point to the given position # -w / --width <pixels>: specify the field width # -e / --height <pixels>: specify the field height # -X / --x-column <column-name>: the FITS column name # -Y / --y-column <column-name> } if hasattr(img,'sourcelist'): # image is a source list; use --xylist axyargs['--xylist'] = img.sourcelist.get_fits_path() w,h = img.width, img.height if sub.image_width: w = sub.image_width if sub.image_height: h = sub.image_height axyargs['--width' ] = w axyargs['--height'] = h else: axyargs['--image'] = df.get_path() # UGLY if sub.parity == 0: axyargs['--parity'] = 'pos' elif sub.parity == 1: axyargs['--parity'] = 'neg' if sub.tweak_order == 0: axyflags.append('--no-tweak') else: axyargs['--tweak-order'] = '%i' % sub.tweak_order if sub.use_sextractor: axyflags.append('--use-sextractor') if sub.crpix_center: axyflags.append('--crpix-center') if sub.invert: axyflags.append('--invert') cmd = 'augment-xylist ' for (k,v) in axyargs.items(): if v: cmd += k + ' ' + str(v) + ' ' for k in axyflags: cmd += k + ' ' log.msg('running: ' + cmd) (rtn, out, err) = run_command(cmd) if rtn: log.msg('out: ' + out) log.msg('err: ' + err) logmsg('augment-xylist failed: rtn val', rtn, 'err', err) raise Exception log.msg('created axy file', axypath) # shell into compute server... logfn = job.get_log_file() # the "tar" commands both use "-C" to chdir, and the ssh command # and redirect uses absolute paths. cmd = ('(echo %(jobid)s; ' 'tar cf - --ignore-failed-read -C %(jobdir)s %(axyfile)s) | ' 'ssh -x -T %(sshconfig)s 2>>%(logfile)s | ' 'tar xf - --atime-preserve -m --exclude=%(axyfile)s -C %(jobdir)s ' '>>%(logfile)s 2>&1' % dict(jobid='job-%s-%i' % (settings.sitename, job.id), axyfile=axyfn, jobdir=jobdir, sshconfig=settings.ssh_solver_config, logfile=logfn)) log.msg('command:', cmd) w = os.system(cmd) if not os.WIFEXITED(w): log.msg('Solver failed (sent signal?)') logmsg('Call to solver failed for job', job.id) raise Exception rtn = os.WEXITSTATUS(w) if rtn: log.msg('Solver failed with return value %i' % rtn) logmsg('Call to solver failed for job', job.id, 'with return val', rtn) raise Exception log.msg('Solver completed successfully.') # Solved? wcsfn = os.path.join(jobdir, wcsfile) log.msg('Checking for WCS file', wcsfn) if os.path.exists(wcsfn): log.msg('WCS file exists') # Parse the wcs.fits file wcs = Tan(wcsfn, 0) # Convert to database model... tan = TanWCS(crval1=wcs.crval[0], crval2=wcs.crval[1], crpix1=wcs.crpix[0], crpix2=wcs.crpix[1], cd11=wcs.cd[0], cd12=wcs.cd[1], cd21=wcs.cd[2], cd22=wcs.cd[3], imagew=img.width, imageh=img.height) tan.save() log.msg('Created TanWCS:', tan) # Find field's healpix nside and index ra, dec, radius = tan.get_center_radecradius() nside = anutil.healpix_nside_for_side_length_arcmin(radius*60) nside = int(2**round(math.log(nside, 2))) healpix = anutil.radecdegtohealpix(ra, dec, nside) sky_location, created = SkyLocation.objects.get_or_create(nside=nside, healpix=healpix) log.msg('SkyLocation:', sky_location) # Find bounds for the Calibration object. r0,r1,d0,d1 = wcs.radec_bounds() # Find cartesian coordinates ra *= math.pi/180 dec *= math.pi/180 tempr = math.cos(dec) x = tempr*math.cos(ra) y = tempr*math.sin(ra) z = math.sin(dec) r = radius/180*math.pi calib = Calibration(raw_tan=tan, ramin=r0, ramax=r1, decmin=d0, decmax=d1, x=x,y=y,z=z,r=r, sky_location=sky_location) calib.save() log.msg('Created Calibration', calib) job.calibration = calib job.save() # save calib before adding machine tags job.status = 'S' job.user_image.add_machine_tags(job) job.user_image.add_sky_objects(job) else: job.status = 'F' job.set_end_time() job.save() log.msg('Finished job', job.id) logmsg('Finished job',job.id) return job.id