shaper_calibrate: Reworked multi-file shaper autocalibration

Signed-off-by: Dmitry Butyugin <dmbutyugin@google.com>
This commit is contained in:
Dmitry Butyugin
2025-10-19 02:04:02 +02:00
committed by KevinOConnor
parent b4c7cf4a33
commit 2aff840f68
4 changed files with 265 additions and 171 deletions

View File

@@ -307,7 +307,7 @@ class ResonanceTester:
"'%s' is not an accelerometer" % chip_name) "'%s' is not an accelerometer" % chip_name)
self.accel_chips.append((chip_axis, chip)) self.accel_chips.append((chip_axis, chip))
def _run_test(self, gcmd, axes, helper, raw_name_suffix=None, def _run_test(self, gcmd, axes, helper, name_suffix, raw_name_suffix=None,
accel_chips=None, test_point=None): accel_chips=None, test_point=None):
toolhead = self.printer.lookup_object('toolhead') toolhead = self.printer.lookup_object('toolhead')
calibration_data = {axis: None for axis in axes} calibration_data = {axis: None for axis in axes}
@@ -367,7 +367,12 @@ class ResonanceTester:
raise gcmd.error( raise gcmd.error(
"accelerometer '%s' measured no data" % ( "accelerometer '%s' measured no data" % (
chip_name,)) chip_name,))
new_data = helper.process_accelerometer_data(aclient) name = self.get_filename(
'resonances', name_suffix, axis,
point if len(test_points) > 1 else None,
chip_name if (accel_chips is not None
or len(raw_values) > 1) else None)
new_data = helper.process_accelerometer_data(name, aclient)
if calibration_data[axis] is None: if calibration_data[axis] is None:
calibration_data[axis] = new_data calibration_data[axis] = new_data
else: else:
@@ -427,7 +432,7 @@ class ResonanceTester:
helper = None helper = None
data = self._run_test( data = self._run_test(
gcmd, [axis], helper, gcmd, [axis], helper, name_suffix,
raw_name_suffix=name_suffix if raw_output else None, raw_name_suffix=name_suffix if raw_output else None,
accel_chips=accel_chips, test_point=test_point)[axis] accel_chips=accel_chips, test_point=test_point)[axis]
if csv_output: if csv_output:
@@ -463,7 +468,7 @@ class ResonanceTester:
helper = shaper_calibrate.ShaperCalibrate(self.printer) helper = shaper_calibrate.ShaperCalibrate(self.printer)
calibration_data = self._run_test(gcmd, calibrate_axes, helper, calibration_data = self._run_test(gcmd, calibrate_axes, helper,
accel_chips=accel_chips) name_suffix, accel_chips=accel_chips)
configfile = self.printer.lookup_object('configfile') configfile = self.printer.lookup_object('configfile')
for axis in calibrate_axes: for axis in calibrate_axes:
@@ -512,7 +517,7 @@ class ResonanceTester:
raise gcmd.error( raise gcmd.error(
"%s-axis accelerometer measured no data" % ( "%s-axis accelerometer measured no data" % (
chip_axis,)) chip_axis,))
data = helper.process_accelerometer_data(aclient) data = helper.process_accelerometer_data(name=None, data=aclient)
vx = data.psd_x.mean() vx = data.psd_x.mean()
vy = data.psd_y.mean() vy = data.psd_y.mean()
vz = data.psd_z.mean() vz = data.psd_z.mean()

View File

@@ -20,7 +20,8 @@ AUTOTUNE_SHAPERS = ['zv', 'mzv', 'ei', '2hump_ei', '3hump_ei']
###################################################################### ######################################################################
class CalibrationData: class CalibrationData:
def __init__(self, freq_bins, psd_sum, psd_x, psd_y, psd_z): def __init__(self, name, freq_bins, psd_sum, psd_x, psd_y, psd_z):
self.name = name
self.freq_bins = freq_bins self.freq_bins = freq_bins
self.psd_sum = psd_sum self.psd_sum = psd_sum
self.psd_x = psd_x self.psd_x = psd_x
@@ -29,35 +30,37 @@ class CalibrationData:
self._psd_list = [self.psd_sum, self.psd_x, self.psd_y, self.psd_z] self._psd_list = [self.psd_sum, self.psd_x, self.psd_y, self.psd_z]
self._psd_map = {'x': self.psd_x, 'y': self.psd_y, 'z': self.psd_z, self._psd_map = {'x': self.psd_x, 'y': self.psd_y, 'z': self.psd_z,
'all': self.psd_sum} 'all': self.psd_sum}
self.data_sets = 1 self._normalized = False
self.data_sets = []
def add_data(self, other): def add_data(self, other):
np = self.numpy self.data_sets.extend(other.get_datasets())
joined_data_sets = self.data_sets + other.data_sets
for psd, other_psd in zip(self._psd_list, other._psd_list):
# `other` data may be defined at different frequency bins,
# interpolating to fix that.
other_normalized = other.data_sets * np.interp(
self.freq_bins, other.freq_bins, other_psd)
psd *= self.data_sets
psd[:] = (psd + other_normalized) * (1. / joined_data_sets)
self.data_sets = joined_data_sets
def set_numpy(self, numpy): def set_numpy(self, numpy):
self.numpy = numpy self.numpy = numpy
def normalize_to_frequencies(self): def normalize_to_frequencies(self):
if not self._normalized:
for psd in self._psd_list: for psd in self._psd_list:
if psd is None:
continue
# Avoid division by zero errors # Avoid division by zero errors
psd /= self.freq_bins + .1 psd /= self.freq_bins + .1
# Remove low-frequency noise # Remove low-frequency noise
low_freqs = self.freq_bins < 2. * MIN_FREQ low_freqs = self.freq_bins < 2. * MIN_FREQ
psd[low_freqs] *= self.numpy.exp( psd[low_freqs] *= self.numpy.exp(
-(2. * MIN_FREQ / (self.freq_bins[low_freqs] + .1))**2 + 1.) -(2. * MIN_FREQ / (
self.freq_bins[low_freqs] + .1))**2 + 1.)
self._normalized = True
for other in self.data_sets:
other.normalize_to_frequencies()
def get_psd(self, axis='all'): def get_psd(self, axis='all'):
return self._psd_map[axis] return self._psd_map[axis]
def get_datasets(self):
return [self] + self.data_sets
CalibrationResult = collections.namedtuple( CalibrationResult = collections.namedtuple(
'CalibrationResult', 'CalibrationResult',
('name', 'freq', 'vals', 'vibrs', 'smoothing', 'score', 'max_accel')) ('name', 'freq', 'freq_bins', 'vals', 'vibrs',
'smoothing', 'score', 'max_accel'))
class ShaperCalibrate: class ShaperCalibrate:
def __init__(self, printer): def __init__(self, printer):
@@ -147,7 +150,7 @@ class ShaperCalibrate:
freqs = np.fft.rfftfreq(nfft, 1. / fs) freqs = np.fft.rfftfreq(nfft, 1. / fs)
return freqs, psd return freqs, psd
def calc_freq_response(self, raw_values): def calc_freq_response(self, name, raw_values):
np = self.numpy np = self.numpy
if raw_values is None: if raw_values is None:
return None return None
@@ -172,11 +175,11 @@ class ShaperCalibrate:
fx, px = self._psd(data[:,1], SAMPLING_FREQ, M) fx, px = self._psd(data[:,1], SAMPLING_FREQ, M)
fy, py = self._psd(data[:,2], SAMPLING_FREQ, M) fy, py = self._psd(data[:,2], SAMPLING_FREQ, M)
fz, pz = self._psd(data[:,3], SAMPLING_FREQ, M) fz, pz = self._psd(data[:,3], SAMPLING_FREQ, M)
return CalibrationData(fx, px+py+pz, px, py, pz) return CalibrationData(name, fx, px+py+pz, px, py, pz)
def process_accelerometer_data(self, data): def process_accelerometer_data(self, name, data):
calibration_data = self.background_process_exec( calibration_data = self.background_process_exec(
self.calc_freq_response, (data,)) self.calc_freq_response, (name, data))
if calibration_data is None: if calibration_data is None:
raise self.error( raise self.error(
"Internal error processing accelerometer data %s" % (data,)) "Internal error processing accelerometer data %s" % (data,))
@@ -250,19 +253,26 @@ class ShaperCalibrate:
max_freq = max(max_freq or MAX_FREQ, test_freqs.max()) max_freq = max(max_freq or MAX_FREQ, test_freqs.max())
freq_bins = calibration_data.freq_bins
psd = calibration_data.psd_sum[freq_bins <= max_freq]
freq_bins = freq_bins[freq_bins <= max_freq]
best_res = None best_res = None
results = [] results = []
min_freq = max_freq
for data in calibration_data.get_datasets():
min_freq = min(min_freq, data.freq_bins.min())
for test_freq in test_freqs[::-1]: for test_freq in test_freqs[::-1]:
shaper_vibrations = 0. shaper_vibrations = 0.
shaper_vals = np.zeros(shape=freq_bins.shape)
shaper = shaper_cfg.init_func(test_freq, damping_ratio) shaper = shaper_cfg.init_func(test_freq, damping_ratio)
shaper_smoothing = self._get_shaper_smoothing(shaper, scv=scv) shaper_smoothing = self._get_shaper_smoothing(shaper, scv=scv)
if max_smoothing and shaper_smoothing > max_smoothing and best_res: if max_smoothing and shaper_smoothing > max_smoothing and best_res:
return best_res return best_res
max_accel = self.find_shaper_max_accel(shaper, scv)
all_shaper_vals = []
for data in calibration_data.get_datasets():
freq_bins = data.freq_bins
psd = data.psd_sum[freq_bins <= max_freq]
freq_bins = freq_bins[freq_bins <= max_freq]
shaper_vals = np.zeros(shape=freq_bins.shape)
# Exact damping ratio of the printer is unknown, pessimizing # Exact damping ratio of the printer is unknown, pessimizing
# remaining vibrations over possible damping values # remaining vibrations over possible damping values
for dr in test_damping_ratios: for dr in test_damping_ratios:
@@ -271,15 +281,22 @@ class ShaperCalibrate:
shaper_vals = np.maximum(shaper_vals, vals) shaper_vals = np.maximum(shaper_vals, vals)
if vibrations > shaper_vibrations: if vibrations > shaper_vibrations:
shaper_vibrations = vibrations shaper_vibrations = vibrations
max_accel = self.find_shaper_max_accel(shaper, scv) all_shaper_vals.append((freq_bins, shaper_vals))
# The score trying to minimize vibrations, but also accounting # The score trying to minimize vibrations, but also accounting
# the growth of smoothing. The formula itself does not have any # the growth of smoothing. The formula itself does not have any
# special meaning, it simply shows good results on real user data # special meaning, it simply shows good results on real user data
shaper_score = shaper_smoothing * (shaper_vibrations**1.5 + shaper_score = shaper_smoothing * (shaper_vibrations**1.5 +
shaper_vibrations * .2 + .01) shaper_vibrations * .2 + .01)
shaper_freq_bins = np.arange(min_freq, max_freq, 0.2)
shaper_vals = np.zeros(shape=shaper_freq_bins.shape)
for freq_bins, vals in all_shaper_vals:
shaper_vals = np.maximum(
shaper_vals, np.interp(shaper_freq_bins,
freq_bins, vals))
results.append( results.append(
CalibrationResult( CalibrationResult(
name=shaper_cfg.name, freq=test_freq, vals=shaper_vals, name=shaper_cfg.name, freq=test_freq,
freq_bins=shaper_freq_bins, vals=shaper_vals,
vibrs=shaper_vibrations, smoothing=shaper_smoothing, vibrs=shaper_vibrations, smoothing=shaper_smoothing,
score=shaper_score, max_accel=max_accel)) score=shaper_score, max_accel=max_accel))
if best_res is None or best_res.vibrs > results[-1].vibrs: if best_res is None or best_res.vibrs > results[-1].vibrs:
@@ -370,27 +387,56 @@ class ShaperCalibrate:
"SHAPER_TYPE_" + axis: shaper_name, "SHAPER_TYPE_" + axis: shaper_name,
"SHAPER_FREQ_" + axis: shaper_freq})) "SHAPER_FREQ_" + axis: shaper_freq}))
def _escape_for_csv(self, name):
name = name.replace('"', '""')
if ',' in name:
return '"' + name + '"'
return name
def save_calibration_data(self, output, calibration_data, shapers=None, def save_calibration_data(self, output, calibration_data, shapers=None,
max_freq=None): max_freq=None):
try: try:
np = calibration_data.numpy
datasets = calibration_data.get_datasets()
max_freq = max_freq or MAX_FREQ max_freq = max_freq or MAX_FREQ
with open(output, "w") as csvfile: if len(datasets) > 1:
csvfile.write("freq,psd_x,psd_y,psd_z,psd_xyz")
if shapers: if shapers:
freq_bins = shapers[0].freq_bins
else:
min_freq = max_freq
for data in datasets:
min_freq = min(min_freq, data.freq_bins.min())
freq_bins = np.arange(min_freq, max_freq, 0.2)
psd_data_to_write = []
for data in datasets:
psd_data_to_write.append(np.interp(
freq_bins, data.freq_bins, data.psd_sum))
else:
freq_bins = calibration_data.freq_bins
psd_data_to_write = [
calibration_data.psd_x, calibration_data.psd_y,
calibration_data.psd_z, calibration_data.psd_sum]
with open(output, "w") as csvfile:
csvfile.write("freq,")
if len(datasets) > 1:
csvfile.write(','.join([self._escape_for_csv(d.name)
for d in datasets]))
else:
csvfile.write("psd_x,psd_y,psd_z,psd_xyz")
if shapers:
csvfile.write(',shapers:')
for shaper in shapers: for shaper in shapers:
csvfile.write(",%s(%.1f)" % (shaper.name, shaper.freq)) csvfile.write(",%s(%.1f)" % (shaper.name, shaper.freq))
csvfile.write("\n") csvfile.write("\n")
num_freqs = calibration_data.freq_bins.shape[0] num_freqs = freq_bins.shape[0]
for i in range(num_freqs): for i in range(num_freqs):
if calibration_data.freq_bins[i] >= max_freq: if freq_bins[i] >= max_freq:
break break
csvfile.write("%.1f,%.3e,%.3e,%.3e,%.3e" % ( csvfile.write("%.1f" % freq_bins[i])
calibration_data.freq_bins[i], for psd in psd_data_to_write:
calibration_data.psd_x[i], csvfile.write(",%.3e" % psd[i])
calibration_data.psd_y[i],
calibration_data.psd_z[i],
calibration_data.psd_sum[i]))
if shapers: if shapers:
csvfile.write(',')
for shaper in shapers: for shaper in shapers:
csvfile.write(",%.3f" % (shaper.vals[i],)) csvfile.write(",%.3f" % (shaper.vals[i],))
csvfile.write("\n") csvfile.write("\n")

View File

@@ -1,12 +1,12 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# Shaper auto-calibration script # Shaper auto-calibration script
# #
# Copyright (C) 2020-2024 Dmitry Butyugin <dmbutyugin@google.com> # Copyright (C) 2020-2025 Dmitry Butyugin <dmbutyugin@google.com>
# Copyright (C) 2020 Kevin O'Connor <kevin@koconnor.net> # Copyright (C) 2020 Kevin O'Connor <kevin@koconnor.net>
# #
# This file may be distributed under the terms of the GNU GPLv3 license. # This file may be distributed under the terms of the GNU GPLv3 license.
from __future__ import print_function from __future__ import print_function
import importlib, optparse, os, sys import csv, importlib, optparse, os, sys
from textwrap import wrap from textwrap import wrap
import numpy as np, matplotlib import numpy as np, matplotlib
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)),
@@ -20,18 +20,39 @@ def parse_log(logname):
for header in f: for header in f:
if not header.startswith('#'): if not header.startswith('#'):
break break
if not header.startswith('freq,psd_x,psd_y,psd_z,psd_xyz'): if not header.startswith('freq,'):
# Raw accelerometer data # Process raw accelerometer data
return np.loadtxt(logname, comments='#', delimiter=',') data = np.loadtxt(logname, comments='#', delimiter=',')
helper = shaper_calibrate.ShaperCalibrate(printer=None)
calibration_data = helper.process_accelerometer_data(logname, data)
calibration_data.normalize_to_frequencies()
return calibration_data
# Parse power spectral density data # Parse power spectral density data
data = np.loadtxt(logname, skiprows=1, comments='#', delimiter=',') data = np.genfromtxt(logname, dtype=np.float64, skip_header=1,
comments='#', delimiter=',', filling_values=0.)
if header.startswith('freq,psd_x,psd_y,psd_z,psd_xyz'):
calibration_data = shaper_calibrate.CalibrationData( calibration_data = shaper_calibrate.CalibrationData(
freq_bins=data[:,0], psd_sum=data[:,4], name=logname, freq_bins=data[:,0], psd_sum=data[:,4],
psd_x=data[:,1], psd_y=data[:,2], psd_z=data[:,3]) psd_x=data[:,1], psd_y=data[:,2], psd_z=data[:,3])
calibration_data.set_numpy(np) calibration_data.set_numpy(np)
else:
parsed_header = next(csv.reader([header], delimiter=','))
calibration_data = None
for i, dataset_name in enumerate(parsed_header[1:]):
if dataset_name == 'shapers:':
break
cdata = shaper_calibrate.CalibrationData(
name=dataset_name, freq_bins=data[:,0], psd_sum=data[:,i+1],
# Individual per-axis data is not stored
psd_x=None, psd_y=None, psd_z=None)
cdata.set_numpy(np)
if calibration_data is None:
calibration_data = cdata
else:
calibration_data.add_data(cdata)
# If input shapers are present in the CSV file, the frequency # If input shapers are present in the CSV file, the frequency
# response is already normalized to input frequencies # response is already normalized to input frequencies
if 'mzv' not in header: if ',shapers:' not in header:
calibration_data.normalize_to_frequencies() calibration_data.normalize_to_frequencies()
return calibration_data return calibration_data
@@ -43,19 +64,14 @@ def parse_log(logname):
def calibrate_shaper(datas, csv_output, *, shapers, damping_ratio, scv, def calibrate_shaper(datas, csv_output, *, shapers, damping_ratio, scv,
shaper_freqs, max_smoothing, test_damping_ratios, shaper_freqs, max_smoothing, test_damping_ratios,
max_freq): max_freq):
helper = shaper_calibrate.ShaperCalibrate(printer=None) # Combine accelerometer data
if isinstance(datas[0], shaper_calibrate.CalibrationData):
calibration_data = datas[0] calibration_data = datas[0]
for data in datas[1:]: for data in datas[1:]:
calibration_data.add_data(data) calibration_data.add_data(data)
else:
# Process accelerometer data
calibration_data = helper.process_accelerometer_data(datas[0])
for data in datas[1:]:
calibration_data.add_data(helper.process_accelerometer_data(data))
calibration_data.normalize_to_frequencies()
print("Processing resonances from %s"
% ",".join(d.name for d in calibration_data.get_datasets()))
helper = shaper_calibrate.ShaperCalibrate(printer=None)
shaper, all_shapers = helper.find_best_shaper( shaper, all_shapers = helper.find_best_shaper(
calibration_data, shapers=shapers, damping_ratio=damping_ratio, calibration_data, shapers=shapers, damping_ratio=damping_ratio,
scv=scv, shaper_freqs=shaper_freqs, max_smoothing=max_smoothing, scv=scv, shaper_freqs=shaper_freqs, max_smoothing=max_smoothing,
@@ -75,32 +91,48 @@ def calibrate_shaper(datas, csv_output, *, shapers, damping_ratio, scv,
# Plot frequency response and suggested input shapers # Plot frequency response and suggested input shapers
###################################################################### ######################################################################
def plot_freq_response(lognames, calibration_data, shapers, def plot_freq_response(calibration_data, shapers,
selected_shaper, max_freq): selected_shaper, max_freq):
max_freq_bin = calibration_data.freq_bins.max() selected_shaper_data = [s for s in shapers if s.name == selected_shaper][0]
max_freq_bin = selected_shaper_data.freq_bins.max()
if max_freq > max_freq_bin: if max_freq > max_freq_bin:
max_freq = max_freq_bin max_freq = max_freq_bin
fontP = matplotlib.font_manager.FontProperties()
fontP.set_size('x-small')
fig, ax = matplotlib.pyplot.subplots(figsize=(8, 5))
ax.set_xlabel('Frequency, Hz')
ax.set_xlim([0, max_freq])
ax.set_ylabel('Power spectral density')
datasets = calibration_data.get_datasets()
if len(datasets) == 1:
freqs = calibration_data.freq_bins freqs = calibration_data.freq_bins
psd = calibration_data.psd_sum[freqs <= max_freq] psd = calibration_data.psd_sum[freqs <= max_freq]
px = calibration_data.psd_x[freqs <= max_freq] px = calibration_data.psd_x[freqs <= max_freq]
py = calibration_data.psd_y[freqs <= max_freq] py = calibration_data.psd_y[freqs <= max_freq]
pz = calibration_data.psd_z[freqs <= max_freq] pz = calibration_data.psd_z[freqs <= max_freq]
freqs = freqs[freqs <= max_freq] freqs = freqs[freqs <= max_freq]
after_shaper = np.interp(selected_shaper_data.freq_bins, freqs, psd)
fontP = matplotlib.font_manager.FontProperties()
fontP.set_size('x-small')
fig, ax = matplotlib.pyplot.subplots()
ax.set_xlabel('Frequency, Hz')
ax.set_xlim([0, max_freq])
ax.set_ylabel('Power spectral density')
ax.plot(freqs, psd, label='X+Y+Z', color='purple') ax.plot(freqs, psd, label='X+Y+Z', color='purple')
ax.plot(freqs, px, label='X', color='red') ax.plot(freqs, px, label='X', color='red')
ax.plot(freqs, py, label='Y', color='green') ax.plot(freqs, py, label='Y', color='green')
ax.plot(freqs, pz, label='Z', color='blue') ax.plot(freqs, pz, label='Z', color='blue')
title = "Frequency response and shapers (%s)" % calibration_data.name
else:
after_shaper = np.zeros(shape=selected_shaper_data.freq_bins.shape)
for data in datasets:
freqs = data.freq_bins
psd = data.psd_sum[freqs <= max_freq]
freqs = freqs[freqs <= max_freq]
after_shaper = np.maximum(
after_shaper, np.interp(selected_shaper_data.freq_bins,
freqs, psd))
ax.plot(freqs, psd, label=data.name)
title = "Frequency responses and shapers"
after_shaper *= selected_shaper_data.vals
title = "Frequency response and shapers (%s)" % (', '.join(lognames))
ax.set_title("\n".join(wrap(title, MAX_TITLE_LENGTH))) ax.set_title("\n".join(wrap(title, MAX_TITLE_LENGTH)))
ax.xaxis.set_minor_locator(matplotlib.ticker.MultipleLocator(5)) ax.xaxis.set_minor_locator(matplotlib.ticker.MultipleLocator(5))
ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
@@ -119,10 +151,11 @@ def plot_freq_response(lognames, calibration_data, shapers,
linestyle = 'dotted' linestyle = 'dotted'
if shaper.name == selected_shaper: if shaper.name == selected_shaper:
linestyle = 'dashdot' linestyle = 'dashdot'
best_shaper_vals = shaper.vals ax2.plot(shaper.freq_bins, shaper.vals,
ax2.plot(freqs, shaper.vals, label=label, linestyle=linestyle) label=label, linestyle=linestyle)
ax.plot(freqs, psd * best_shaper_vals, ax.plot(selected_shaper_data.freq_bins, after_shaper,
label='After\nshaper', color='cyan') label='After%sshaper' % ('\n' if len(datasets) == 1 else ' '),
color='cyan')
# A hack to add a human-readable shaper recommendation to legend # A hack to add a human-readable shaper recommendation to legend
ax2.plot([], [], ' ', ax2.plot([], [], ' ',
label="Recommended shaper: %s" % (selected_shaper.upper())) label="Recommended shaper: %s" % (selected_shaper.upper()))
@@ -153,7 +186,7 @@ def main():
default=None, help="filename of output graph") default=None, help="filename of output graph")
opts.add_option("-c", "--csv", type="string", dest="csv", opts.add_option("-c", "--csv", type="string", dest="csv",
default=None, help="filename of output csv file") default=None, help="filename of output csv file")
opts.add_option("-f", "--max_freq", type="float", default=200., opts.add_option("-f", "--max_freq", type="float", default=None,
help="maximum frequency to plot") help="maximum frequency to plot")
opts.add_option("-s", "--max_smoothing", type="float", dest="max_smoothing", opts.add_option("-s", "--max_smoothing", type="float", dest="max_smoothing",
default=None, help="maximum shaper smoothing to allow") default=None, help="maximum shaper smoothing to allow")
@@ -201,12 +234,14 @@ def main():
opts.error("--shaper_freq param does not specify correct range " + opts.error("--shaper_freq param does not specify correct range " +
"in the format [start]:end[:step]") "in the format [start]:end[:step]")
shaper_freqs = (freq_start, freq_end, freq_step) shaper_freqs = (freq_start, freq_end, freq_step)
if max_freq is not None:
max_freq = max(max_freq, freq_end * 4./3.) max_freq = max(max_freq, freq_end * 4./3.)
else: else:
try: try:
shaper_freqs = [float(s) for s in options.shaper_freq.split(',')] shaper_freqs = [float(s) for s in options.shaper_freq.split(',')]
except ValueError: except ValueError:
opts.error("invalid floating point value in --shaper_freq param") opts.error("invalid floating point value in --shaper_freq param")
if max_freq is not None:
max_freq = max(max_freq, max(shaper_freqs) * 4./3.) max_freq = max(max_freq, max(shaper_freqs) * 4./3.)
if options.test_damping_ratios: if options.test_damping_ratios:
try: try:
@@ -235,12 +270,15 @@ def main():
max_freq=max_freq) max_freq=max_freq)
if selected_shaper is None: if selected_shaper is None:
return return
if max_freq is None:
max_freq = 0.
for data in calibration_data.get_datasets():
max_freq = max(max_freq, data.freq_bins.max())
if not options.csv or options.output: if not options.csv or options.output:
# Draw graph # Draw graph
setup_matplotlib(options.output is not None) setup_matplotlib(options.output is not None)
fig = plot_freq_response(args, calibration_data, shapers, fig = plot_freq_response(calibration_data, shapers,
selected_shaper, max_freq) selected_shaper, max_freq)
# Show graph # Show graph

View File

@@ -2,10 +2,10 @@
# Generate adxl345 accelerometer graphs # Generate adxl345 accelerometer graphs
# #
# Copyright (C) 2020 Kevin O'Connor <kevin@koconnor.net> # Copyright (C) 2020 Kevin O'Connor <kevin@koconnor.net>
# Copyright (C) 2020 Dmitry Butyugin <dmbutyugin@google.com> # Copyright (C) 2020-2025 Dmitry Butyugin <dmbutyugin@google.com>
# #
# This file may be distributed under the terms of the GNU GPLv3 license. # This file may be distributed under the terms of the GNU GPLv3 license.
import importlib, optparse, os, sys import csv, importlib, optparse, os, sys
from textwrap import wrap from textwrap import wrap
import numpy as np, matplotlib import numpy as np, matplotlib
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)),
@@ -19,31 +19,48 @@ def parse_log(logname, opts):
for header in f: for header in f:
if header.startswith('#'): if header.startswith('#'):
continue continue
if header.startswith('freq,psd_x,psd_y,psd_z,psd_xyz'): if header.startswith('freq,'):
# Processed power spectral density file # Processed power spectral density file
break break
# Raw accelerometer data # Raw accelerometer data
return np.loadtxt(logname, comments='#', delimiter=',') return np.loadtxt(logname, comments='#', delimiter=',')
# Parse power spectral density data # Parse power spectral density data
data = np.loadtxt(logname, skiprows=1, comments='#', delimiter=',') data = np.genfromtxt(logname, dtype=np.float64, skip_header=1,
comments='#', delimiter=',', filling_values=0.)
if header.startswith('freq,psd_x,psd_y,psd_z,psd_xyz'):
calibration_data = shaper_calibrate.CalibrationData( calibration_data = shaper_calibrate.CalibrationData(
freq_bins=data[:,0], psd_sum=data[:,4], name=logname, freq_bins=data[:,0], psd_sum=data[:,4],
psd_x=data[:,1], psd_y=data[:,2], psd_z=data[:,3]) psd_x=data[:,1], psd_y=data[:,2], psd_z=data[:,3])
calibration_data.set_numpy(np) calibration_data.set_numpy(np)
else:
parsed_header = next(csv.reader([header], delimiter=','))
calibration_data = None
for i, dataset_name in enumerate(parsed_header[1:]):
if dataset_name == 'shapers:':
break
cdata = shaper_calibrate.CalibrationData(
name=dataset_name, freq_bins=data[:,0], psd_sum=data[:,i+1],
# Individual axes data is not stored
psd_x=None, psd_y=None, psd_z=None)
cdata.set_numpy(np)
if calibration_data is None:
calibration_data = cdata
else:
calibration_data.add_data(cdata)
return calibration_data return calibration_data
###################################################################### ######################################################################
# Raw accelerometer graphing # Raw accelerometer graphing
###################################################################### ######################################################################
def plot_accel(datas, lognames): def plot_accel(opts, datas, lognames):
fig, axes = matplotlib.pyplot.subplots(nrows=3, sharex=True) fig, axes = matplotlib.pyplot.subplots(nrows=3, sharex=True)
axes[0].set_title("\n".join(wrap( axes[0].set_title("\n".join(wrap(
"Accelerometer data (%s)" % (', '.join(lognames)), MAX_TITLE_LENGTH))) "Accelerometer data (%s)" % (', '.join(lognames)), MAX_TITLE_LENGTH)))
axis_names = ['x', 'y', 'z'] axis_names = ['x', 'y', 'z']
for data, logname in zip(datas, lognames): for data, logname in zip(datas, lognames):
if isinstance(data, shaper_calibrate.CalibrationData): if isinstance(data, shaper_calibrate.CalibrationData):
raise error("Cannot plot raw accelerometer data using the processed" opts.error("Cannot plot raw accelerometer data using the processed"
" resonances, raw_data input is required") " resonances, raw_data input is required")
first_time = data[0, 0] first_time = data[0, 0]
times = data[:,0] - first_time times = data[:,0] - first_time
@@ -70,16 +87,13 @@ def plot_accel(datas, lognames):
###################################################################### ######################################################################
# Calculate estimated "power spectral density" # Calculate estimated "power spectral density"
def calc_freq_response(data, max_freq): def calc_freq_response(name, data, max_freq):
if isinstance(data, shaper_calibrate.CalibrationData): if isinstance(data, shaper_calibrate.CalibrationData):
return data return data
helper = shaper_calibrate.ShaperCalibrate(printer=None) helper = shaper_calibrate.ShaperCalibrate(printer=None)
return helper.process_accelerometer_data(data) return helper.process_accelerometer_data(name, data)
def calc_specgram(data, axis): def calc_specgram(data, axis):
if isinstance(data, shaper_calibrate.CalibrationData):
raise error("Cannot calculate the spectrogram using the processed"
" resonances, raw_data input is required")
N = data.shape[0] N = data.shape[0]
Fs = N / (data[-1,0] - data[0,0]) Fs = N / (data[-1,0] - data[0,0])
# Round up to a power of 2 for faster FFT # Round up to a power of 2 for faster FFT
@@ -99,28 +113,45 @@ def calc_specgram(data, axis):
pdata += _specgram(d[ax])[0] pdata += _specgram(d[ax])[0]
return pdata, bins, t return pdata, bins, t
def plot_frequency(datas, lognames, max_freq): def plot_frequency(opts, datas, lognames, max_freq, axis):
calibration_data = calc_freq_response(datas[0], max_freq) calibration_data = calc_freq_response(lognames[0], datas[0], max_freq)
for data in datas[1:]: for logname, data in zip(lognames[1:], datas[1:]):
calibration_data.add_data(calc_freq_response(data, max_freq)) calibration_data.add_data(calc_freq_response(logname, data, max_freq))
fig, ax = matplotlib.pyplot.subplots()
ax.set_xlabel('Frequency (Hz)')
ax.set_ylabel('Power spectral density')
datasets = calibration_data.get_datasets()
if len(datasets) == 1:
freqs = calibration_data.freq_bins freqs = calibration_data.freq_bins
if axis == 'all':
psd = calibration_data.psd_sum[freqs <= max_freq] psd = calibration_data.psd_sum[freqs <= max_freq]
px = calibration_data.psd_x[freqs <= max_freq] px = calibration_data.psd_x[freqs <= max_freq]
py = calibration_data.psd_y[freqs <= max_freq] py = calibration_data.psd_y[freqs <= max_freq]
pz = calibration_data.psd_z[freqs <= max_freq] pz = calibration_data.psd_z[freqs <= max_freq]
freqs = freqs[freqs <= max_freq] freqs = freqs[freqs <= max_freq]
fig, ax = matplotlib.pyplot.subplots()
ax.set_title("\n".join(wrap(
"Frequency response (%s)" % (', '.join(lognames)), MAX_TITLE_LENGTH)))
ax.set_xlabel('Frequency (Hz)')
ax.set_ylabel('Power spectral density')
ax.plot(freqs, psd, label='X+Y+Z', alpha=0.6) ax.plot(freqs, psd, label='X+Y+Z', alpha=0.6)
ax.plot(freqs, px, label='X', alpha=0.6) ax.plot(freqs, px, label='X', alpha=0.6)
ax.plot(freqs, py, label='Y', alpha=0.6) ax.plot(freqs, py, label='Y', alpha=0.6)
ax.plot(freqs, pz, label='Z', alpha=0.6) ax.plot(freqs, pz, label='Z', alpha=0.6)
else:
psd = calibration_data.get_psd(axis)[freqs <= max_freq]
freqs = freqs[freqs <= max_freq]
ax.plot(freqs, psd, label=axis.upper(), alpha=0.6)
title = "Frequency response (%s)" % calibration_data.name
else:
for data in datasets:
freqs = data.freq_bins
psd = data.get_psd(axis)
if psd is None:
opts.error("Per-axis data is not present in the input file(s)")
psd = psd[freqs <= max_freq]
freqs = freqs[freqs <= max_freq]
ax.plot(freqs, psd, label=data.name)
title = "Frequency responses comparison"
ax.set_title("\n".join(wrap(title, MAX_TITLE_LENGTH)))
ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator()) ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
ax.grid(which='major', color='grey') ax.grid(which='major', color='grey')
@@ -133,31 +164,11 @@ def plot_frequency(datas, lognames, max_freq):
fig.tight_layout() fig.tight_layout()
return fig return fig
def plot_compare_frequency(datas, lognames, max_freq, axis):
fig, ax = matplotlib.pyplot.subplots()
ax.set_title('Frequency responses comparison')
ax.set_xlabel('Frequency (Hz)')
ax.set_ylabel('Power spectral density')
for data, logname in zip(datas, lognames):
calibration_data = calc_freq_response(data, max_freq)
freqs = calibration_data.freq_bins
psd = calibration_data.get_psd(axis)[freqs <= max_freq]
freqs = freqs[freqs <= max_freq]
ax.plot(freqs, psd, label="\n".join(wrap(logname, 60)), alpha=0.6)
ax.xaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
ax.yaxis.set_minor_locator(matplotlib.ticker.AutoMinorLocator())
ax.grid(which='major', color='grey')
ax.grid(which='minor', color='lightgrey')
fontP = matplotlib.font_manager.FontProperties()
fontP.set_size('x-small')
ax.legend(loc='best', prop=fontP)
fig.tight_layout()
return fig
# Plot data in a "spectrogram colormap" # Plot data in a "spectrogram colormap"
def plot_specgram(data, logname, max_freq, axis): def plot_specgram(opts, data, logname, max_freq, axis):
if isinstance(data, shaper_calibrate.CalibrationData):
opts.error("Cannot calculate the spectrogram using the processed"
" resonances, raw_data input is required")
pdata, bins, t = calc_specgram(data, axis) pdata, bins, t = calc_specgram(data, axis)
fig, ax = matplotlib.pyplot.subplots() fig, ax = matplotlib.pyplot.subplots()
@@ -174,11 +185,12 @@ def plot_specgram(data, logname, max_freq, axis):
# CSV output # CSV output
###################################################################### ######################################################################
def write_frequency_response(datas, output): def write_frequency_response(lognames, datas, output):
helper = shaper_calibrate.ShaperCalibrate(printer=None) helper = shaper_calibrate.ShaperCalibrate(printer=None)
calibration_data = helper.process_accelerometer_data(datas[0]) calibration_data = helper.process_accelerometer_data(lognames[0], datas[0])
for data in datas[1:]: for logname, data in zip(lognames[1:], datas[1:]):
calibration_data.add_data(helper.process_accelerometer_data(data)) calibration_data.add_data(
helper.process_accelerometer_data(logname, data))
helper.save_calibration_data(output, calibration_data) helper.save_calibration_data(output, calibration_data)
def write_specgram(psd, freq_bins, time, output): def write_specgram(psd, freq_bins, time, output):
@@ -223,9 +235,6 @@ def main():
help="maximum frequency to graph") help="maximum frequency to graph")
opts.add_option("-r", "--raw", action="store_true", opts.add_option("-r", "--raw", action="store_true",
help="graph raw accelerometer data") help="graph raw accelerometer data")
opts.add_option("-c", "--compare", action="store_true",
help="graph comparison of power spectral density "
"between different accelerometer data files")
opts.add_option("-s", "--specgram", action="store_true", opts.add_option("-s", "--specgram", action="store_true",
help="graph spectrogram of accelerometer data") help="graph spectrogram of accelerometer data")
opts.add_option("-a", type="string", dest="axis", default="all", opts.add_option("-a", type="string", dest="axis", default="all",
@@ -242,29 +251,25 @@ def main():
if is_csv_output(options.output): if is_csv_output(options.output):
if options.raw: if options.raw:
opts.error("raw mode is not supported with csv output") opts.error("raw mode is not supported with csv output")
if options.compare:
opts.error("comparison mode is not supported with csv output")
if options.specgram: if options.specgram:
if len(args) > 1: if len(args) > 1:
opts.error("Only 1 input is supported in specgram mode") opts.error("Only 1 input is supported in specgram mode")
pdata, bins, t = calc_specgram(datas[0], options.axis) pdata, bins, t = calc_specgram(datas[0], options.axis)
write_specgram(pdata, bins, t, options.output) write_specgram(pdata, bins, t, options.output)
else: else:
write_frequency_response(datas, options.output) write_frequency_response(args, datas, options.output)
return return
# Draw graph # Draw graph
if options.raw: if options.raw:
fig = plot_accel(datas, args) fig = plot_accel(opts, datas, args)
elif options.specgram: elif options.specgram:
if len(args) > 1: if len(args) > 1:
opts.error("Only 1 input is supported in specgram mode") opts.error("Only 1 input is supported in specgram mode")
fig = plot_specgram(datas[0], args[0], options.max_freq, options.axis) fig = plot_specgram(opts, datas[0], args[0],
elif options.compare: options.max_freq, options.axis)
fig = plot_compare_frequency(datas, args, options.max_freq,
options.axis)
else: else:
fig = plot_frequency(datas, args, options.max_freq) fig = plot_frequency(opts, datas, args, options.max_freq, options.axis)
# Show graph # Show graph
if options.output is None: if options.output is None: