diff --git a/.travis.yml b/.travis.yml index e1506c4..40a85ed 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,5 +1,7 @@ language: python -sudo: false +sudo: true +dist: xenial + branches: only: - master @@ -7,8 +9,10 @@ branches: python: - 2.7 +# - 3.4 Not in travis repositories +- 3.5 - 3.6 -# - 3.7 +- 3.7 addons: apt: diff --git a/aotools/__init__.py b/aotools/__init__.py index 5916ae8..de9fe59 100644 --- a/aotools/__init__.py +++ b/aotools/__init__.py @@ -1,4 +1,4 @@ -from . import astronomy, functions, image_processing, wfs, turbulence +from . import astronomy, functions, image_processing, wfs, turbulence, opticalpropagation from .astronomy import * from .functions import * diff --git a/aotools/functions/karhunenLoeve.py b/aotools/functions/karhunenLoeve.py index 30d457a..6d2eb89 100644 --- a/aotools/functions/karhunenLoeve.py +++ b/aotools/functions/karhunenLoeve.py @@ -9,27 +9,22 @@ (wavefront modelling and reconstruction). A closely similar implementation can also be find in Yorick in the YAO package. -USAGE +Usage ----- -Main routine is 'make_kl' to generate KL basis of dimension [dim, dim, nmax]. +Main routine is 'make_kl' to generate KL basis of dimension ``[dim, dim, nmax]``. -For Kolmogorov statistics, e.g.: - kl, _, _, _ = make_kl(150, 128, ri = 0.2, stf='kolmogorov') +For Kolmogorov statistics, e.g. :: -REQUIREMENTS ------------- -numpy -scipy + kl, _, _, _ = make_kl(150, 128, ri = 0.2, stf='kolmogorov') -TO FIX ------- - - make_kl with von Karman stf fails: - -> implemented but KL generation failed in 'while loop' of gkl_fcom... +.. warning:: -@author: Gilles Orban de Xivry (ULiege) -@date: November 2017 + make_kl with von Karman stf fails. It has been implemented but KL generation failed in 'while loop' of gkl_fcom... +.. codeauthor:: Gilles Orban de Xivry (ULieve) +:date: November 2017 ''' + import numpy as np import scipy from scipy.ndimage.interpolation import map_coordinates @@ -449,8 +444,8 @@ def set_pctr(bas, ncp=None, ncmar=None): Parameters ---------- - bas : dic - gkl_basis dic built with the gkl_bas routine + bas : dic + gkl_basis dic built with the gkl_bas routine ''' if ncmar is None: ncmar = 2 @@ -505,11 +500,15 @@ def pcgeom(nr, npp, ncp, ri, ncmar): ''' This routine builds a geom dic. - px, py : the x, y coordinates of points in the polar arrays. - cr, cp : the r, phi coordinates of points in the cartesian grids. - ncmar : allows the possibility that there is a margin of - ncmar points in the cartesian arrays outside the region of - interest. + Parameters + ---------- + px, py : int + the x, y coordinates of points in the polar arrays. + cr, cp : + the r, phi coordinates of points in the cartesian grids. + ncmar : + allows the possibility that there is a margin of ncmar points in the + cartesian arrays outside the region of interest. ''' nused = ncp - 2 * ncmar ff = 0.5 * nused @@ -571,13 +570,14 @@ def make_kl(nmax, dim, ri=0.0, nr=40, ''' Main routine to generatre a KL basis of dimension [nmax, dim, dim]. - For Kolmogorov statistics, e.g.: + For Kolmogorov statistics, e.g. :: + kl, _, _, _ = make_kl(150, 128, ri = 0.2, stf='kolmogorov') - As a rule of thumb - nr x npp = 50 x 250 is fine up to 500 functions - 60 x 300 for a thousand - 80 x 400 for three thousands. + | As a rule of thumb + | nr x npp = 50 x 250 is fine up to 500 functions + | 60 x 300 for a thousand + | 80 x 400 for three thousands. Parameters ---------- diff --git a/aotools/image_processing/centroiders.py b/aotools/image_processing/centroiders.py index 8f152df..5ada4de 100644 --- a/aotools/image_processing/centroiders.py +++ b/aotools/image_processing/centroiders.py @@ -9,7 +9,7 @@ import numpy -def correlation_centroid(im, ref, threshold=0.): +def correlation_centroid(im, ref, threshold=0., padding=1): """ Correlation Centroider, currently only works for 3d im shape. Performs a simple thresholded COM on the correlation. @@ -18,6 +18,7 @@ def correlation_centroid(im, ref, threshold=0.): im: sub-aperture images (t, y, x) ref: reference image (y, x) threshold: fractional threshold for COM (0=all pixels, 1=brightest pixel) + padding: factor to zero-pad arrays in Fourier transforms Returns: ndarray: centroids of im (2, t), given in order x, y """ @@ -38,17 +39,20 @@ def correlation_centroid(im, ref, threshold=0.): centroids = numpy.zeros((2, nt)) for frame in range(nt): # Correlate frame with reference image - corr = cross_correlate(im[frame], ref) + corr = cross_correlate(im[frame], ref, padding=padding) cx, cy = centreOfGravity(corr, threshold=threshold) + cy -= float(ny) / 2. * (float(padding) - 1) + cx -= float(nx) / 2. * (float(padding) - 1) + centroids[:, frame] = cx, cy return centroids def centreOfGravity(img, threshold=0, **kwargs): - ''' + """ Centroids an image, or an array of images. Centroids over the last 2 dimensions. Sets all values under "threshold*max_value" to zero before centroiding @@ -61,22 +65,22 @@ def centreOfGravity(img, threshold=0, **kwargs): Returns: ndarray: Array of centroid values (2[, n]) - ''' - if threshold!=0: - if len(img.shape)==2: + """ + if threshold != 0: + if len(img.shape) == 2: img = numpy.where(img>threshold*img.max(), img, 0 ) else: img_temp = (img.T - threshold*img.max(-1).max(-1)).T - zero_coords = numpy.where(img_temp<0) + zero_coords = numpy.where(img_temp < 0) img[zero_coords] = 0 - if len(img.shape)==2: - y_cent,x_cent = numpy.indices(img.shape) + if len(img.shape) == 2: + y_cent, x_cent = numpy.indices(img.shape) y_centroid = (y_cent*img).sum()/img.sum() x_centroid = (x_cent*img).sum()/img.sum() else: - y_cent, x_cent = numpy.indices((img.shape[-2],img.shape[-1])) + y_cent, x_cent = numpy.indices((img.shape[-2], img.shape[-1])) y_centroid = (y_cent*img).sum(-1).sum(-1)/img.sum(-1).sum(-1) x_centroid = (x_cent*img).sum(-1).sum(-1)/img.sum(-1).sum(-1) @@ -116,19 +120,20 @@ def brightestPxl(img, threshold, **kwargs): return centreOfGravity(img) -def cross_correlate(x, y): +def cross_correlate(x, y, padding=1): """ 2D convolution using FFT, use to generate cross-correlations. Args: x (array): subap image y (array): reference image + padding (int): Factor to zero-pad arrays in Fourier transforms Returns: ndarray: cross-correlation of x and y """ - reference_image = numpy.conjugate(numpy.fft.fft2(y)) + reference_image = numpy.conjugate(numpy.fft.fft2(y, s=[y.shape[0] * padding, y.shape[1] * padding])) + frame = numpy.fft.fft2(x, s=[x.shape[0] * padding, x.shape[1] * padding]) - frame = numpy.fft.fft2(x) cross_correlation = frame * reference_image cross_correlation = numpy.fft.fftshift(numpy.abs(numpy.fft.ifft2(cross_correlation))) diff --git a/doc/source/zernike.rst b/doc/source/zernike.rst index 127753f..66ec58b 100644 --- a/doc/source/zernike.rst +++ b/doc/source/zernike.rst @@ -1,8 +1,17 @@ +Circular Functions +================== + Zernike Modes -============= ++++++++++++++ .. automodule:: aotools.functions.zernike :members: :undoc-members: :show-inheritance: +Karhunen Loeve Modes +++++++++++++++++++++ +.. automodule:: aotools.functions.karhunenLoeve + :members: + :undoc-members: + :show-inheritance: diff --git a/setup.py b/setup.py index c29e16f..9f52ef0 100644 --- a/setup.py +++ b/setup.py @@ -24,5 +24,9 @@ description='A set of useful functions for Adaptive Optics in Python', long_description=long_description, version=versioneer.get_version(), - cmdclass=versioneer.get_cmdclass() + cmdclass=versioneer.get_cmdclass(), + install_requires=[ + 'numpy', + 'scipy', + ], ) diff --git a/test/test_centroiders.py b/test/test_centroiders.py index ce4447f..0278cab 100644 --- a/test/test_centroiders.py +++ b/test/test_centroiders.py @@ -50,7 +50,7 @@ def test_quadCell_many(): def test_convolution(): im = numpy.random.random((10, 10)) ref = numpy.random.random((10, 10)) - corr = image_processing.cross_correlate(im, ref) + corr = image_processing.cross_correlate(im, ref, padding=1) assert(corr.shape == im.shape)