Commit 07d04536 authored by skamann's avatar skamann
Browse files

Started translation of INITFIT into proper class.

parent 3a08eade
import logging
import sys
from multiprocessing import cpu_count, Pool
import numpy as np
from .background import BackgroundGrid
from .sources import Sources
from ..instruments import Instrument, Muse, MusePixtable
from ..filters import ALIASES
from ..psf.profiles import *
from ..psf.variables import Variables
from ..utils.io import read_asciifile
from ..utils.image import find_offset
logger = logging.getLogger(__name__)
def init_worker(variance, *args):
global g_variance
g_variance = variance.flatten()
def image_from_psf(sources_data):
psf = sources_data['psf']
image = np.zeros((psf.n_pixel, ), dtype='<f4')
for i in range(len(sources_data['uc'])):
psf.uc = sources_data['uc'][i]
psf.vc = sources_data['vc'][i]
psf.mag = sources_data['mag'][i]
psf.reset()
image[psf.used] += psf.f
logging.debug('Image creation: Processed star {0}/{1}'.format(i+1, len(sources_data['uc'])))
return image
def estimate_snr(sources_data):
"""
Calculate the S/N ratio of all stars in the scene.
Parameters
----------
psf_instances : list
A list containing a PSF-instance for each every source in the
scene. This is required to determine the flux distribution
across the FoV
variance : nd_array
The average uncertainties in the individual spaxels.
Returns
-------
snratios : nd_array
A 1dim. array containing the S/N-ratio for each source
totfluxes : float
The integrated flux of all point sources over the individual
spaxels.
"""
global g_variance
snratios = []
totfluxes = 0.
psf = sources_data['psf']
for i in range(len(sources_data['uc'])):
psf.uc = sources_data['uc'][i]
psf.vc = sources_data['vc'][i]
psf.mag = sources_data['mag'][i]
psf.reset()
if (g_variance[psf.used] == 0).any():
logger.error("Sigma image contains invalid pixels with values <=0.")
# measure S/N
snratios.append(np.sqrt(((psf.f / np.sqrt(g_variance[psf.used])) ** 2).sum()))
logger.debug("Estimated S/N for source {0}/{1}: {2}.".format(i+1, len(sources_data['uc']), snratios[-1]))
# add the flux contribution of this source in the FoV
totfluxes += psf.f.sum()
return np.array(snratios, dtype=np.float32), totfluxes
class SourceSelector(object):
def __init__(self, photometry_file, ifs_data, passband="f814w"):
logger.info('Starting source selection based on {0}-magnitudes.'.format(passband.upper()))
self.passband = passband
assert isinstance(ifs_data, Instrument), 'Parameter "ifs_data" muse be instance of an Instrument-class.'
self.ifs_data = ifs_data
self.use_wcs = self._check_wcs()
# get results from crowded-field photometry, initialize sources
logger.info('Reading photometry results from file "{0}".'.format(photometry_file))
# Note that the routine will first check if the first (uncommented) line in the file is a header line from
# which it can infer the column content. Only if that fails, it will use the association in 'usecols' and
# 'usenames' the 'aliases' provide for each valid passband a list of other possible names the column may have
self.cfp_results = read_asciifile(photometry_file, usecols=(0, 1, 2, 3, 4, 5),
usenames="id,x,y,{0},ra,dec".format(self.passband),
optional="x,y,ra,dec", aliases=ALIASES)
# make sure IDs are available
if "id" not in self.cfp_results.keys():
logger.critical("Did not find IDs in photometry file.")
raise IOError("Source IDs must be provided with photometry")
# make sure magnitudes are available
if self.passband.lower() not in self.cfp_results.keys():
logger.critical("Did not find magnitudes in photometry file.")
raise IOError("Source magnitudes must be provided with photometry.")
# make sure either RA & Dec or x & y are available; if both are available, RA & Dec are preferred over x & y
if self.use_wcs:
if "ra" not in self.cfp_results.keys() or "dec" not in self.cfp_results.keys():
self.use_wcs = False
if self.use_wcs:
logger.info("Using RA & DEC coordinates as input.")
ra = np.asarray(self.cfp_results["ra"], dtype=np.float64)
dec = np.asarray(self.cfp_results["dec"], dtype=np.float64)
# get IFS spaxel coordinates from reference RA and DEC
if self.ifs_data.wcs.wcs.naxis == 3:
x, y, _ = self.ifs_data.wcs.wcs_world2pix(ra, dec, np.ones(ra.shape), 1)
else:
x, y = self.ifs_data.wcs.wcs_world2pix(ra, dec, 1)
self.cfp_results["x"] = x.tolist()
self.cfp_results["y"] = y.tolist()
else:
if "x" not in self.cfp_results.keys() or "y" not in self.cfp_results.keys():
logger.critical("Cannot find input source coordinates.")
raise IOError("Either x & y or RA & Dec must be provided with photometry.")
# Perform sanity checks on provided source list:
self.valid = ~np.isnan(np.asarray(self.cfp_results[passband.lower()], dtype=np.float32))
if not self.valid.all():
logger.warning("Ignoring {0} source(s) with undefined magnitudes.".format((~self.valid).sum()))
if len(np.unique(self.cfp_results["id"])) < len(self.cfp_results["id"]):
logger.error("Catalog contains duplicated IDs.")
self.sources = None
self.psf_class = None
self.psf_attributes = None
self.identification_image = None
self.data_image = None
self.var_image = None
self.ccxoff = 0.0
self.ccyoff = 0.0
def _check_wcs(self):
if self.ifs_data.wcs is None:
return False
elif self.ifs_data.wcs.naxis < 2:
return False
elif (self.ifs_data.wcs.wcs.crval[:2] == 0).any():
return False
elif np.linalg.det(self.ifs_data.wcs.wcs.cd[:2, :2]) == 1.0:
return False
return True
def _background_setup(self, shape, skysize=-1):
if skysize <= 0:
background = BackgroundGrid.constant()
else:
background = BackgroundGrid.from_shape(shape=shape, rscale=skysize/2.)
for i, [y, x] in enumerate(background.centroids):
# ignore sky components outside FoV
if self.ifs_data.distance_to_edge([[x, y]])[0] < -skysize and skysize > 0:
continue
self.valid = np.r_[self.valid, True]
self.cfp_results["id"].append(str(-i-1))
self.cfp_results["x"].append(str(x))
self.cfp_results["y"].append(str(y))
self.cfp_results[self.passband.lower()].append("0")
if "ra" in self.cfp_results.keys():
self.cfp_results["ra"].append("0")
if "dec" in self.cfp_results.keys():
self.cfp_results["dec"].append("0")
def _make_sources_instance(self):
self.sources = Sources(ids=np.asarray(self.cfp_results["id"], dtype=np.int32)[self.valid])
self.sources.x = np.asarray(self.cfp_results["x"], dtype=np.float32)[self.valid]
self.sources.y = np.asarray(self.cfp_results["y"], dtype=np.float32)[self.valid]
self.sources.mag = np.asarray(self.cfp_results[self.passband.lower()], dtype=np.float32)[self.valid]
if "ra" in self.cfp_results.keys():
self.sources.ra = np.asarray(self.cfp_results["ra"], dtype=np.float64)[self.valid]
if "dec" in self.cfp_results.keys():
self.sources.dec = np.asarray(self.cfp_results["dec"], dtype=np.float64)[self.valid]
def prepare_psf(self, profile='moffat', parameters=None):
if profile == "moffat":
self.psf_class = Moffat
elif profile == "gauss":
self.psf_class = Gaussian
elif profile == "double_gauss":
self.psf_class = DoubleGaussian
else:
logger.error('Unknown PSF type: "{0}"'.format(profile))
return
if parameters is None:
parameters = [{'name': 'beta', 'value': 2.5, 'free': False},
{'name': 'fwhm', 'value': 3.0, 'free': False}]
self.psf_attributes = Variables(profile=profile)
for parameter in parameters:
# include value and fit request in PSF variables
j = self.psf_attributes.names.index(parameter['name'])
self.psf_attributes.data[0, j, 0] = parameter['value']
self.psf_attributes.free[j] = parameter['free']
self.psf_attributes.info()
def create_mock_image(self, reslim, zeropoint, n_cpu=1):
logger.info("Creating mock image from photometry catalogue...")
if self.sources is None:
logger.warning('Need to initialize instance of Sources-class first.')
self._make_sources_instance()
if self.psf_class is None:
logger.warning('Need to initialize PSF first.')
self.prepare_psf()
# make sure that at least some sources are resolved according to the 'reslim' parameter.
# If not, raise a ConfigurationError
if not (self.sources.mag[~self.sources.is_background] < reslim).any():
logger.critical("All sources near FoV fainter than resolution limit (RESLIM={0}).".format(reslim))
return None
shape = (int(self.sources.y.max()+1), int(self.sources.x.max()+1))
# to get reasonable count rates in mock image, adjust zeropoint unless provided
if zeropoint > 99.:
zeropoint = reslim + 7.5
# initialize single PSF instance per CPU
logger.info(" adding sources, working on {0} CPUs...".format(n_cpu))
if n_cpu is None or n_cpu == -1:
n_cpu = cpu_count() # use all available CPUs
psf_parameters = dict(
(name, self.psf_attributes.data[:, i, 0]) for i, name in enumerate(self.psf_attributes.names))
# only add sources that are likely resolved
include = np.flatnonzero(self.sources.mag[:-self.sources.nbackground] < reslim)
to_cpu = np.arange(include.size) % n_cpu
args = []
for n in range(n_cpu):
used = include[to_cpu == n]
psf = self.psf_class.from_shape(shape=shape, uc=0., vc=0., mag=0., maxrad=10., **psf_parameters)
args.append({'psf': psf, 'uc': self.sources.x[used], 'vc': self.sources.y[used],
'mag': self.sources.mag[used] - zeropoint})
if n_cpu > 1:
pool = Pool(n_cpu)
_results = pool.map_async(image_from_psf, args)
_image = np.sum(_results.get(), axis=0)
else:
_image = image_from_psf(args[0])
# 3. resample image on regular (x, y) grid
logger.info(" resampling image on regular 2D grid...")
self.identification_image = np.zeros(shape, dtype=np.float32)
self.identification_image[psf.y.astype('<i4'), psf.x.astype('<i4')] = _image
def create_image_from_ifs(self, n_bin=10):
logger.info("Creating mock {0}-band image from IFS data...".format(self.passband))
(self.data_image, self.var_image), weights = self.ifs_data.integrate_over_filtercurve(
self.passband, apply_variances=True, nbin=n_bin)
# The sigma values are valid for the broadband image and must be increased to be representative for a typical
# layer. As the image was obtained by integrating over a filter curve, the sigma values are multiplied by
# sqrt(N) where N is the effective width of the filter curve in units of the spectral sampling.
self.var_image *= weights[(np.arange(weights.size) % 10) == 0].sum()
mask = np.isnan(self.data_image)
if mask.all():
logger.error("{0}-image created from IFS data contains only NaNs.".format(self.passband))
logger.error("Check overlap between {0} throughput and IFS wavelength coverage.".format(self.passband))
# if a MUSE data cube has been provided, try to determine edge of field of view as smallest polygon to includes
# all valid spaxels.
if mask.any() and isinstance(self.ifs_data, Muse):
self.ifs_data.edge = ~mask
self.var_image[mask] = np.inf
self.data_image[mask] = 0
if self.data_image.sum() <= 0:
logger.error("Flux sum in {0}-image is negative.".format(self.passband))
def cross_correlate_images(self, ccmaxoff, init_guess=(0, 0)):
if not self.use_wcs:
logging.error('Cannot perform cross-correlation unless valid WCS instance is available.')
return
if self.identification_image is None:
logger.warning('Need to create identification image for cross-correlation first.')
self.create_mock_image(reslim=np.median(self.sources.mag))
if self.data_image is None:
logger.warning('Need to create image from IFS data for cross-correlation first.')
self.create_image_from_ifs()
if self.data_image.ndim == 1:
cc_data_image = self.ifs_data.make_image(self.data_image, fill_value=0)
else:
cc_data_image = self.data_image
self.ccxoff, self.ccyoff = find_offset(cc_data_image, self.identification_image, max_off=int(ccmaxoff),
init_guess=init_guess)
# Note that the offset are provided with negative signs because the cross-correlation routine determines
# the offset from the reference image to the IFS data, while we need it vice versa.
logger.info(" pointing offset from cross-correlation dx={0:.1f}, dy={1:.1f}".format(self.ccxoff, self.ccyoff))
# initialize/update coordinate transformation
self.sources.transformation.data = np.array([1., 0., 0., 1., -self.ccxoff, -self.ccyoff], dtype=np.float32)
def identify_footprint(self, reslim=20.):
from PyQt4 import QtGui
from ..graphics import PampelMuseGui
logger.info("Preparing GUI for source identification...")
app = QtGui.QApplication(sys.argv)
identifier = PampelMuseGui(sources=self.sources, ifsData=self.ifs_data, refImage=self.identification_image)
identifier.maxMagSpinBox.setValue(reslim)
if self.use_wcs: # overplot positions of sources on IFS as determined via cross-correlation
identifier.addFovToReference(self.ccxoff + self.ifs_data.shape[1]//2,
self.ccyoff + self.ifs_data.shape[0]//2)
sizes = 10.*(10**(-0.2*(self.sources.mag - reslim - 1.)))
sizes[sizes <= 1] = 0
select = (self.sources.status < 4) & (
self.sources.mag <= reslim) & (
self.ifs_data.infov(np.vstack((self.sources.x - self.ccxoff, self.sources.y - self.ccyoff)).T))
identifier.ifsPlot.addData(self.sources.x[select] - self.ccxoff, self.sources.y[select] - self.ccyoff,
s=sizes[select], marker="o", edgecolor="g", facecolor="None")
identifier.show()
app.exec_()
identified = identifier.identifiedSources
if len(identified) >= 2:
# determine coordinate transformation using identified sources, when providing the IFS coordinates for the
# sources that were identified, they need to be provided in the right order (the sources were not
# necessarily identified in the same order they appear in the catalogue)
sorted_indices = np.argsort([list(self.sources.ID).index(entry) for entry in identified.keys()])
self.sources.load_ifs_coordinates(ids=identified.keys())
self.sources.ifs_coordinates[..., 0] = np.asarray(identified.values())[sorted_indices, :2]
self.sources.fit_coordinate_transformation()
elif not self.sources.transformation.valid:
logger.error("At least 2 sources must be identified.")
def calibrate_photometry(self, n_cpu, zeropoint=25.):
# Perform photometric calibration (unless provided) and S/N estimation:
# This is done as follows:
# 1) Based on a hypothetical zeropoint, the flux profiles of all sources are determined and a hypothetical S/N
# estimate is obtained. This step is parallelized and also yields an estimate of the total flux in the image.
# 2) The integrated flux is compared to the sum of all pixel values in the broadband image created from the IFS
# data before. The ratio of these two fluxes can be converted into a magnitude difference between the
# hypothetical and the real zeropoint.
# 3) All S/N estimates are corrected for this magnitude difference
logger.info("Estimating S/N for all sources...")
if self.var_image is None:
logging.warning('Need to c<lculate variances before estimating S/N.')
self.create_image_from_ifs()
psf_parameters = dict(
(name, self.psf_attributes.data[:, i, 0]) for i, name in enumerate(self.psf_attributes.names))
# Assign sources to CPUs
to_cpu = np.arange(self.sources.nsrc - self.sources.nbackground) % n_cpu
args = []
for n in range(n_cpu):
used = np.flatnonzero(to_cpu)
if isinstance(self.ifs_data, MusePixtable):
psf = self.psf_class(shape=self.var_image.shape, uc=0., vc=0., mag=0., maxrad=10., **psf_parameters)
else:
psf = self.psf_class(transform=self.ifs_data.transform, uc=0., vc=0., mag=0., maxrad=10.,
**psf_parameters)
args.append({'psf': psf, 'uc': self.sources.x[used], 'vc': self.sources.y[used],
'mag': self.sources.mag[used] - zeropoint})
# estimate S/N and integrated flux from sources in image
if n_cpu > 1:
pool = Pool(n_cpu, initializer=init_worker, initargs=(self.var_image))
_results = pool.map_async(estimate_snr, args)
results =_results.get()
signal_to_noise = np.zeros(to_cpu.size, dtype='<f4')
for n in range(n_cpu):
signal_to_noise[to_cpu == n] = results[n][0]
intflux = np.sum([r[1] for r in results])
else:
init_worker(*(self.var_image,))
signal_to_noise, intflux = estimate_snr(args[0])
logger.info("Estimating the photometric zeropoint...")
# Calculate ratio between expected flux for current zeropoint and flux in image
fratio = intflux/self.data_image.sum()
dmag = -2.5*np.log10(fratio)
# determine zeropoint if requested and update S/N estimates
signal_to_noise /= fratio
zeropoint += dmag
logger.info("Estimated photometric zeropoint ({0}) is {1:.2f}".format(self.passband, float(zeropoint)))
......@@ -763,8 +763,9 @@ def initfit(pampelmuse):
continue
if (sources.primary == sources.ID[i]).sum() != 1:
continue
d_edge = pampelmuse.ifs_data.distance_to_edge(ifs_coordinates[[i]])[0]
j = sources.ifs_coordinates_loaded[:i].sum()
d_edge = pampelmuse.ifs_data.distance_to_edge(ifs_coordinates[[j]])[0]
if d_edge < fwhm:
continue
elif d_edge < 2*fwhm and isinstance(pampelmuse.ifs_data, (Muse, MusePixtable)):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment