Simu: 8.1 Move beacon delay code into lib

This commit is contained in:
Eric Teunis de Boone 2022-08-04 20:41:50 +02:00
parent 95e3af3d94
commit a575eca6b1
3 changed files with 217 additions and 223 deletions

View file

@ -1,7 +1,6 @@
from . import signals from . import signals
from . import location from . import location
from . import sampling from . import sampling
from .plotting import *
from .util import * from .util import *

214
lib/beacon.py Normal file
View file

@ -0,0 +1,214 @@
"""
Routines needed to analyse a beacon signal
"""
import numpy as np
from scipy import signal
# monkey patch correlation_lags into signal if it does not exist
if not hasattr(signal, 'correlation_lags'):
def correlation_lags(in1_len, in2_len, mode='full'):
r"""
Calculates the lag / displacement indices array for 1D cross-correlation.
Parameters
----------
in1_size : int
First input size.
in2_size : int
Second input size.
mode : str {'full', 'valid', 'same'}, optional
A string indicating the size of the output.
See the documentation `correlate` for more information.
See Also
--------
correlate : Compute the N-dimensional cross-correlation.
Returns
-------
lags : array
Returns an array containing cross-correlation lag/displacement indices.
Indices can be indexed with the np.argmax of the correlation to return
the lag/displacement.
Notes
-----
Cross-correlation for continuous functions :math:`f` and :math:`g` is
defined as:
.. math::
\left ( f\star g \right )\left ( \tau \right )
\triangleq \int_{t_0}^{t_0 +T}
\overline{f\left ( t \right )}g\left ( t+\tau \right )dt
Where :math:`\tau` is defined as the displacement, also known as the lag.
Cross correlation for discrete functions :math:`f` and :math:`g` is
defined as:
.. math::
\left ( f\star g \right )\left [ n \right ]
\triangleq \sum_{-\infty}^{\infty}
\overline{f\left [ m \right ]}g\left [ m+n \right ]
Where :math:`n` is the lag.
Examples
--------
Cross-correlation of a signal with its time-delayed self.
>>> from scipy import signal
>>> from numpy.random import default_rng
>>> rng = default_rng()
>>> x = rng.standard_normal(1000)
>>> y = np.concatenate([rng.standard_normal(100), x])
>>> correlation = signal.correlate(x, y, mode="full")
>>> lags = signal.correlation_lags(x.size, y.size, mode="full")
>>> lag = lags[np.argmax(correlation)]
"""
# calculate lag ranges in different modes of operation
if mode == "full":
# the output is the full discrete linear convolution
# of the inputs. (Default)
lags = np.arange(-in2_len + 1, in1_len)
elif mode == "same":
# the output is the same size as `in1`, centered
# with respect to the 'full' output.
# calculate the full output
lags = np.arange(-in2_len + 1, in1_len)
# determine the midpoint in the full output
mid = lags.size // 2
# determine lag_bound to be used with respect
# to the midpoint
lag_bound = in1_len // 2
# calculate lag ranges for even and odd scenarios
if in1_len % 2 == 0:
lags = lags[(mid-lag_bound):(mid+lag_bound)]
else:
lags = lags[(mid-lag_bound):(mid+lag_bound)+1]
elif mode == "valid":
# the output consists only of those elements that do not
# rely on the zero-padding. In 'valid' mode, either `in1` or `in2`
# must be at least as large as the other in every dimension.
# the lag_bound will be either negative or positive
# this let's us infer how to present the lag range
lag_bound = in1_len - in2_len
if lag_bound >= 0:
lags = np.arange(lag_bound + 1)
else:
lags = np.arange(lag_bound, 1)
return lags
signal.correlation_lags = correlation_lags
##### end of monkey patch correlation_lags
def beacon_time_delay(samplerate, ref_beacon, beacon):
grid = correlation_grid(in1_len=len(ref_beacon), in2_len=len(beacon), mode='full')
time_lag, errs = lag_gridsearch(grid, samplerate, ref_beacon, beacon)
return time_lag, errs
def beacon_phase_delay(samplerate, f_beacon, ref_beacon, beacon):
time_delay, errs = beacon_time_delay(samplerate, ref_beacon, beacon)
phase = 2*np.pi*f_beacon*time_delay
phase_err = 2*np.pi*f_beacon*errs
return phase, phase_err
def lag_gridsearch(grid, sample_rate, reference, signal_data):
"""
Return the best time shift found when doing a grid search.
Parameters
----------
lag_grid - ndarray
The array specifying the grid that is to be searched.
sample_rate - float
Sample rate of signal_data to transform index to time.
signal_data - ndarray
The real signal to find the time shift for.
reference - ndarray
Real signal to use as reference to obtain lag.
Returns
-------
lag : ndarray
The best time shift obtained
err : tuple
Difference to the previous and next time shift from lag, resp.
"""
assert signal_data.shape >= reference.shape, str(signal_data.shape) + " " + str(reference.shape)
corrs = grid_correlate(grid, reference, signal_data)
idx = np.argmax(corrs)
lag = grid[idx]/sample_rate
err_min = (grid[idx-1]-grid[idx])/(2*sample_rate)
err_plus = (grid[idx+1]-grid[idx])/(2*sample_rate)
return lag, np.array([err_min, err_plus])
def grid_correlate(grid, reference, x):
"""
Determine correlation between x and reference using grid as
the lags to be used for the correlation.
Parameters
----------
grid - ndarray
The array specifying the grid that is to be searched.
x - ndarray
The real signal to find the time shift for.
reference - ndarray
Real signal to use as reference to obtain lag.
Returns
-------
corrs - ndarray
The correlations along grid.
"""
grid = np.asarray(grid)
x = np.asarray(x)
reference = np.asarray(reference)
assert x.shape >= reference.shape, str(signal_data.shape) + " " + str(reference.shape)
reference = np.pad(reference, (0,len(x)-len(reference)), 'constant', constant_values=0)
ref_conj = np.conjugate(reference)
corrs = np.array([np.dot(np.roll(ref_conj, lag), x) for lag in grid], dtype=np.float64)
return corrs
def correlation_grid(grid_size=None, in1_len=None, in2_len = None, end = None, start=None, mode='full'):
"""
Abuse correlation_lags to determine the endpoints of the grid.
"""
if in1_len is not None or in2_len is not None:
if in2_len is None:
in2_len = in1_len
elif in1_len is None:
in1_len = in2_len
lags = signal.correlation_lags(in1_len, in2_len, mode=mode)
max_lag = max(lags)
min_lag = min(lags)
else:
max_lag = np.inf
min_lag = -np.inf
if end is None:
end = max_lag
elif end > max_lag:
raise ValueError("Grid end is too high")
if start is None:
start = min_lag
elif start < min_lag:
raise ValueError("Grid start is too low")
if grid_size is None:
grid_size = end - start
return np.linspace(start, end, grid_size, dtype=int, endpoint=False)

View file

@ -24,205 +24,7 @@
"sys.path.append(os.path.dirname(os.path.abspath(os.getcwd())))\n", "sys.path.append(os.path.dirname(os.path.abspath(os.getcwd())))\n",
"from lib.util import *\n", "from lib.util import *\n",
"\n", "\n",
"\n", "from lib.beacon import *"
"# monkey patch correlation_lags into signal if it does not exist\n",
"if not hasattr(signal, 'correlation_lags'):\n",
" def correlation_lags(in1_len, in2_len, mode='full'):\n",
" r\"\"\"\n",
" Calculates the lag / displacement indices array for 1D cross-correlation.\n",
" Parameters\n",
" ----------\n",
" in1_size : int\n",
" First input size.\n",
" in2_size : int\n",
" Second input size.\n",
" mode : str {'full', 'valid', 'same'}, optional\n",
" A string indicating the size of the output.\n",
" See the documentation `correlate` for more information.\n",
" See Also\n",
" --------\n",
" correlate : Compute the N-dimensional cross-correlation.\n",
" Returns\n",
" -------\n",
" lags : array\n",
" Returns an array containing cross-correlation lag/displacement indices.\n",
" Indices can be indexed with the np.argmax of the correlation to return\n",
" the lag/displacement.\n",
" Notes\n",
" -----\n",
" Cross-correlation for continuous functions :math:`f` and :math:`g` is\n",
" defined as:\n",
" .. math::\n",
" \\left ( f\\star g \\right )\\left ( \\tau \\right )\n",
" \\triangleq \\int_{t_0}^{t_0 +T}\n",
" \\overline{f\\left ( t \\right )}g\\left ( t+\\tau \\right )dt\n",
" Where :math:`\\tau` is defined as the displacement, also known as the lag.\n",
" Cross correlation for discrete functions :math:`f` and :math:`g` is\n",
" defined as:\n",
" .. math::\n",
" \\left ( f\\star g \\right )\\left [ n \\right ]\n",
" \\triangleq \\sum_{-\\infty}^{\\infty}\n",
" \\overline{f\\left [ m \\right ]}g\\left [ m+n \\right ]\n",
" Where :math:`n` is the lag.\n",
" Examples\n",
" --------\n",
" Cross-correlation of a signal with its time-delayed self.\n",
" >>> from scipy import signal\n",
" >>> from numpy.random import default_rng\n",
" >>> rng = default_rng()\n",
" >>> x = rng.standard_normal(1000)\n",
" >>> y = np.concatenate([rng.standard_normal(100), x])\n",
" >>> correlation = signal.correlate(x, y, mode=\"full\")\n",
" >>> lags = signal.correlation_lags(x.size, y.size, mode=\"full\")\n",
" >>> lag = lags[np.argmax(correlation)]\n",
" \"\"\"\n",
"\n",
" # calculate lag ranges in different modes of operation\n",
" if mode == \"full\":\n",
" # the output is the full discrete linear convolution\n",
" # of the inputs. (Default)\n",
" lags = np.arange(-in2_len + 1, in1_len)\n",
" elif mode == \"same\":\n",
" # the output is the same size as `in1`, centered\n",
" # with respect to the 'full' output.\n",
" # calculate the full output\n",
" lags = np.arange(-in2_len + 1, in1_len)\n",
" # determine the midpoint in the full output\n",
" mid = lags.size // 2\n",
" # determine lag_bound to be used with respect\n",
" # to the midpoint\n",
" lag_bound = in1_len // 2\n",
" # calculate lag ranges for even and odd scenarios\n",
" if in1_len % 2 == 0:\n",
" lags = lags[(mid-lag_bound):(mid+lag_bound)]\n",
" else:\n",
" lags = lags[(mid-lag_bound):(mid+lag_bound)+1]\n",
" elif mode == \"valid\":\n",
" # the output consists only of those elements that do not\n",
" # rely on the zero-padding. In 'valid' mode, either `in1` or `in2`\n",
" # must be at least as large as the other in every dimension.\n",
"\n",
" # the lag_bound will be either negative or positive\n",
" # this let's us infer how to present the lag range\n",
" lag_bound = in1_len - in2_len\n",
" if lag_bound >= 0:\n",
" lags = np.arange(lag_bound + 1)\n",
" else:\n",
" lags = np.arange(lag_bound, 1)\n",
" return lags\n",
"\n",
" signal.correlation_lags = correlation_lags"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"def lag_gridsearch(grid, sample_rate, reference, signal_data):\n",
" \"\"\"\n",
" Return the best time shift found when doing a grid search.\n",
" \n",
" Parameters\n",
" ----------\n",
" lag_grid - ndarray\n",
" The array specifying the grid that is to be searched.\n",
" sample_rate - float\n",
" Sample rate of signal_data to transform index to time.\n",
" signal_data - ndarray\n",
" The real signal to find the time shift for.\n",
" reference - ndarray\n",
" Real signal to use as reference to obtain lag.\n",
" \n",
" Returns\n",
" -------\n",
" lag : ndarray\n",
" The best time shift obtained\n",
" err : tuple\n",
" Difference to the previous and next time shift from lag, resp.\n",
" \"\"\"\n",
"\n",
" assert signal_data.shape >= reference.shape, str(signal_data.shape) + \" \" + str(reference.shape)\n",
" \n",
" corrs = grid_correlate(grid, reference, signal_data)\n",
" \n",
" idx = np.argmax(corrs)\n",
"\n",
" lag = grid[idx]/sample_rate\n",
" \n",
" err_min = (grid[idx-1]-grid[idx])/(2*sample_rate)\n",
" err_plus = (grid[idx+1]-grid[idx])/(2*sample_rate)\n",
"\n",
" return lag, np.array([err_min, err_plus])\n",
" \n",
"\n",
"def grid_correlate(grid, reference, x):\n",
" \"\"\"\n",
" Determine correlation between x and reference using grid as \n",
" the lags to be used for the correlation.\n",
" \n",
" Parameters\n",
" ----------\n",
" grid - ndarray\n",
" The array specifying the grid that is to be searched.\n",
" x - ndarray\n",
" The real signal to find the time shift for.\n",
" reference - ndarray\n",
" Real signal to use as reference to obtain lag.\n",
" \n",
" Returns\n",
" -------\n",
" corrs - ndarray\n",
" The correlations along grid.\n",
" \"\"\"\n",
" grid = np.asarray(grid)\n",
" x = np.asarray(x)\n",
" reference = np.asarray(reference)\n",
"\n",
" assert x.shape >= reference.shape, str(signal_data.shape) + \" \" + str(reference.shape)\n",
" \n",
" reference = np.pad(reference, (0,len(x)-len(reference)), 'constant', constant_values=0)\n",
" \n",
" ref_conj = np.conjugate(reference)\n",
" \n",
" corrs = np.array([np.dot(np.roll(ref_conj, lag), x) for lag in grid], dtype=np.float64)\n",
" \n",
" return corrs\n",
"\n",
"def correlation_grid(grid_size=None, in1_len=None, in2_len = None, end = None, start=None, mode='full'):\n",
" \"\"\"\n",
" Abuse correlation_lags to determine the endpoints of the grid.\n",
" \"\"\"\n",
" \n",
" if in1_len is not None or in2_len is not None:\n",
" if in2_len is None:\n",
" in2_len = in1_len\n",
" elif in1_len is None:\n",
" in1_len = in2_len\n",
"\n",
" lags = signal.correlation_lags(in1_len, in2_len, mode=mode)\n",
"\n",
" max_lag = max(lags)\n",
" min_lag = min(lags)\n",
" else:\n",
" max_lag = np.inf\n",
" min_lag = -np.inf\n",
"\n",
" if end is None:\n",
" end = max_lag\n",
" elif end > max_lag:\n",
" raise ValueError(\"Grid end is too high\")\n",
"\n",
" if start is None:\n",
" start = min_lag\n",
" elif start < min_lag:\n",
" raise ValueError(\"Grid start is too low\")\n",
" \n",
" if grid_size is None:\n",
" grid_size = end - start\n",
"\n",
" return np.linspace(start, end, grid_size, dtype=int, endpoint=False)"
] ]
}, },
{ {
@ -230,27 +32,6 @@
"execution_count": 3, "execution_count": 3,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [
"def beacon_time_delay(samplerate, ref_beacon, beacon):\n",
" grid = correlation_grid(in1_len=len(ref_beacon), in2_len=len(beacon), mode='full')\n",
" time_lag, errs = lag_gridsearch(grid, samplerate, ref_beacon, beacon)\n",
"\n",
" return time_lag, errs\n",
"\n",
"def beacon_phase_delay(samplerate, f_beacon, ref_beacon, beacon):\n",
" time_delay, errs = beacon_time_delay(samplerate, ref_beacon, beacon)\n",
"\n",
" phase = time2phase(time_delay, f_beacon)\n",
" phase_err = time2phase(errs, f_beacon)\n",
" \n",
" return phase, phase_err"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [ "source": [
"us = 1e3 # ns\n", "us = 1e3 # ns\n",
"ns = 1/us # us\n", "ns = 1/us # us\n",
@ -284,7 +65,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 5, "execution_count": 4,
"metadata": {}, "metadata": {},
"outputs": [ "outputs": [
{ {
@ -378,7 +159,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 6, "execution_count": 5,
"metadata": {}, "metadata": {},
"outputs": [ "outputs": [
{ {