A 2D Range SLAM example using batch optimization (Levenberg-Marquardt) on the Plaza2 dataset.
This notebook mirrors the functionality of the MATLAB batch SLAM script, adapted using Python and GTSAM, drawing structural elements from the iSAM example.
Author: Frank Dellaert (adapted for batch by AI)
Data is second UWB ranging dataset, B2 or “plaza 2”, from
“Navigating with Ranging Radios: Five Data Sets with Ground Truth”, by Joseph Djugash, Bradley Hamner, and Stephan Roth, available at https://
www .ri .cmu .edu /pub _files /2009 /9 /Final _5datasetsRangingRadios
# pylint: disable=invalid-name, E1101
import gtsam
from gtsam import Point2, Pose2, symbol
import numpy as np
import math
import time
import plotly.express as px
from gtsam.utils import plot # Needs matplotlib
from numpy.random import default_rng
rng = default_rng()
NM = gtsam.noiseModel
Load Data¶
# load the odometry
# DR: Odometry Input (delta distance traveled and delta heading change)
# Time (sec) Delta Distance Traveled (m) Delta Heading (rad)
odometry_data = {}
data_file_dr = gtsam.findExampleDataFile("Plaza2_DR.txt")
odo_times_list = []
for row in np.loadtxt(data_file_dr):
t, distance_traveled, delta_heading = row
odometry_data[t] = Pose2(distance_traveled, 0, delta_heading)
odo_times_list.append(t)
odo_times_list.sort() # Ensure times are sorted
M = len(odometry_data)
print(f"Read {M} odometry entries.")
Read 4090 odometry entries.
# load the ranges from TD
# Time (sec) Sender / Antenna ID Receiver Node ID Range (m)
triples = []
landmark_ids = set()
data_file_td = gtsam.findExampleDataFile("Plaza2_TD.txt")
for row in np.loadtxt(data_file_td):
t, sender, receiver, _range = row
landmark_id = int(receiver)
triples.append((t, landmark_id, _range))
landmark_ids.add(landmark_id)
K = len(triples)
sorted_landmark_ids = sorted(list(landmark_ids))
print(f"Read {K} range triples for {len(landmark_ids)} unique landmarks.")
Read 1816 range triples for 4 unique landmarks.
Set Parameters and Noise Models¶
# parameters
sigmaR = 50 # range standard deviation (matches MATLAB)
sigmaInitialLandmark = 1.0 # Stddev for random landmark init (matches MATLAB)
robust = True
# Set Noise parameters (mirrors MATLAB script)
priorSigmas = gtsam.Point3(1, 1, math.pi) # Prior on Pose 0
pointPriorSigmas = gtsam.Point2(1, 1) # Loose prior on landmarks
odoSigmas = gtsam.Point3(0.05, 0.01, 0.2) # Odometry noise
priorNoise = NM.Diagonal.Sigmas(priorSigmas) # prior on pose 0
pointPriorNoise = NM.Diagonal.Sigmas(pointPriorSigmas) # loose LM prior
odoNoise = NM.Diagonal.Sigmas(odoSigmas) # odometry
# Range noise
gaussian = NM.Isotropic.Sigma(1, sigmaR) # non-robust
base = NM.mEstimator.Tukey(15) # Tukey M-estimator like MATLAB
tukey = NM.Robust.Create(base, gaussian) # robust
rangeNoise = tukey if robust else gaussian
Build Factor Graph and Initial Estimate¶
# Initialize Factor Graph and Initial Values
graph = gtsam.NonlinearFactorGraph()
initial = gtsam.Values()
# Add prior on first pose
# Estimate pose0 from the first odometry time's neighborhood or use a fixed reasonable guess.
# Using the same value as the iSAM example for consistency.
pose0 = Pose2(-34.2086489999201, 45.3007639991120, math.pi - 2.021089)
graph.add(gtsam.PriorFactorPose2(0, pose0, priorNoise))
initial.insert(0, pose0)
# Initialize Landmarks (similar to MATLAB approach)
print("Initializing Landmarks...")
for j in sorted_landmark_ids:
landmark_key = symbol('L', j)
# Initialize randomly around origin (adjust scale if needed)
initial_point = Point2(sigmaInitialLandmark * rng.normal(), sigmaInitialLandmark * rng.normal())
initial.insert(landmark_key, initial_point)
# Optional: Add a loose prior to help localization if landmarks are poorly constrained
# graph.add(gtsam.PriorFactorPoint2(landmark_key, Point2(0,0), pointPriorNoise))
print(f"Initialized {len(sorted_landmark_ids)} landmarks.")
Initializing Landmarks...
Initialized 4 landmarks.
print("\nBuilding Pose Chain and Odometry Factors...")
lastPose = pose0
pose_times = [odo_times_list[0]] # Approximate time for pose 0
# Loop over odometry measurements
for i, t in enumerate(odo_times_list, 1):
# get odometry measurement for this time step
odometry = odometry_data[t]
# add odometry factor
graph.add(gtsam.BetweenFactorPose2(i - 1, i, odometry, odoNoise))
# predict pose and add as initial estimate
predictedPose = lastPose.compose(odometry)
lastPose = predictedPose
initial.insert(i, predictedPose)
pose_times.append(t)
print(f"Added {M} odometry factors and initial pose estimates.")
Building Pose Chain and Odometry Factors...
Added 4090 odometry factors and initial pose estimates.
print("\nAdding Range Factors...")
# Convert pose_times to numpy array for efficient search
pose_times_np = np.array(pose_times)
# Add range factors
range_factors_added = 0
for t_range, landmark_id, measured_range in triples:
# Find the pose index active at the time of the range measurement
# Get the index of the *last* pose time that is <= t_range
pose_index = np.searchsorted(pose_times_np, t_range, side='right') - 1
# Ensure pose_index is valid (it might be -1 if t_range is before the first odometry time)
if pose_index < 0:
# print(f"Warning: Range measurement at time {t_range} is before the first pose time {pose_times_np[0]}. Skipping.")
pose_index = 0 # Or skip the measurement
landmark_key = symbol('L', landmark_id)
graph.add(gtsam.RangeFactor2D(pose_index, landmark_key, measured_range, rangeNoise))
range_factors_added += 1
print(f"Added {range_factors_added} range factors.")
Adding Range Factors...
Added 1816 range factors.
Optimize the Factor Graph¶
# Graph built, optimize!
print("\nOptimizing the factor graph...")
start_time = time.time()
params = gtsam.LevenbergMarquardtParams()
params.setRelativeErrorTol(1e-3) # Convergence tolerance
params.setVerbosityLM("SUMMARY") # Print LM progress
optimizer = gtsam.LevenbergMarquardtOptimizer(graph, initial, params)
result = optimizer.optimize()
end_time = time.time()
print(f"Optimization complete in {end_time - start_time:.2f} seconds.")
print(f"Initial Error: {graph.error(initial)}")
print(f"Final Error: {graph.error(result)}")
Optimizing the factor graph...
Initial error: 2.9e+02, values: 4095
iter cost cost_change lambda success iter_time
0 1.4e+05 -1.4e+05 1e-05 1 0.03
iter cost cost_change lambda success iter_time
0 9.1e+03 -8.8e+03 0.0001 1 0.03
iter cost cost_change lambda success iter_time
0 1.9e+02 1e+02 0.001 1 0.03
1 2.7e+03 -2.5e+03 0.0001 1 0.03
1 1.1e+02 86 0.001 1 0.02
2 1.7e+03 -1.6e+03 0.0001 1 0.02
2 85 22 0.001 1 0.03
3 6.3e+02 -5.5e+02 0.0001 1 0.03
3 76 8.4 0.001 1 0.02
4 3.1e+02 -2.3e+02 0.0001 1 0.02
4 68 8.6 0.001 1 0.03
5 1.9e+02 -1.2e+02 0.0001 1 0.03
5 55 13 0.001 1 0.03
6 2.6e+02 -2e+02 0.0001 1 0.02
6 39 16 0.001 1 0.02
7 1.4e+02 -99 0.0001 1 0.04
7 26 13 0.001 1 0.03
8 94 -68 0.0001 1 0.03
8 21 5.5 0.001 1 0.03
9 63 -43 0.0001 1 0.02
9 19 2.1 0.001 1 0.02
10 46 -27 0.0001 1 0.02
10 17 1.8 0.001 1 0.03
11 35 -18 0.0001 1 0.03
11 15 1.5 0.001 1 0.03
12 27 -12 0.0001 1 0.03
12 14 1.3 0.001 1 0.02
13 22 -7.9 0.0001 1 0.02
13 13 1.2 0.001 1 0.03
14 18 -5.2 0.0001 1 0.03
14 12 1 0.001 1 0.03
15 15 -3.3 0.0001 1 0.03
15 11 0.9 0.001 1 0.02
16 13 -2 0.0001 1 0.02
16 10 0.79 0.001 1 0.04
17 11 -1.1 0.0001 1 0.03
17 9.3 0.7 0.001 1 0.03
18 9.8 -0.44 0.0001 1 0.03
18 8.7 0.62 0.001 1 0.02
19 8.7 0.023 0.0001 1 0.02
20 67 -58 1e-05 1 0.02
20 5 3.7 0.0001 1 0.03
21 23 -18 1e-05 1 0.03
21 3.5 1.4 0.0001 1 0.03
22 9.4 -5.9 1e-05 1 0.03
22 3 0.57 0.0001 1 0.03
23 5 -2 1e-05 1 0.03
23 2.6 0.34 0.0001 1 0.02
24 3.3 -0.71 1e-05 1 0.02
24 2.4 0.26 0.0001 1 0.03
25 2.6 -0.19 1e-05 1 0.03
25 2.2 0.21 0.0001 1 0.03
26 2.1 0.034 1e-05 1 0.03
27 3.7 -1.5 1e-06 1 0.03
27 1.1 1 1e-05 1 0.02
28 1.4 -0.24 1e-06 1 0.04
28 0.93 0.2 1e-05 1 0.03
29 0.96 -0.031 1e-06 1 0.04
29 0.88 0.048 1e-05 1 0.09
30 0.88 0.00063 1e-06 1 0.03
Optimization complete in 1.93 seconds.
Initial Error: 294.2927831483431
Final Error: 0.8771120075818397
Print and Visualize Results¶
# Print optimized landmarks:
print("\nOptimized Landmark Locations:")
for j in [0, 1, 5, 6]: # Print specific landmarks like MATLAB example
if j in sorted_landmark_ids:
landmark_key = gtsam.symbol('L', j)
try:
p = result.atPoint2(landmark_key)
print(f" L{j}: {p}")
except Exception as e:
print(f" L{j}: Not found in result ({e})")
else:
print(f" L{j}: Not present in TD data.")
Optimized Landmark Locations:
L0: [-48.11741249 31.92467992]
L1: [-80.92811295 53.95372591]
L5: [-47.60339748 -19.97294509]
L6: [-17.70157972 66.35356579]
# Extract poses and landmarks for plotting
poses_result = gtsam.utilities.allPose2s(result)
landmarks_result = gtsam.utilities.extractPoint2(result)
positions_result = np.array([poses_result.atPose2(key).translation()
for key in range(M + 1)]) # Poses 0 to M
# Also extract initial estimate poses (dead reckoning)
poses_initial = gtsam.utilities.allPose2s(initial)
positions_initial = np.array([poses_initial.atPose2(key).translation()
for key in range(M + 1)])
print(f"\nExtracted {positions_result.shape[0]} final poses and {landmarks_result.shape[0]} landmarks.")
Extracted 4091 final poses and 4 landmarks.
# Plot using Plotly (similar to iSAM example)
fig = px.line(x=positions_initial[:,0], y=positions_initial[:,1], title='Plaza2 Batch SLAM Result')
fig.data[0].name = 'Initial (Odometry)'
fig.data[0].showlegend = True
fig.data[0].line.color = 'orange'
fig.data[0].line.dash = 'dash'
fig.add_scatter(x=positions_result[:,0], y=positions_result[:,1], mode='lines',
line=dict(color='black'), name='Optimized Path')
fig.add_scatter(x=landmarks_result[:,0], y=landmarks_result[:,1], mode='markers',
marker=dict(color='red', symbol='star', size=8), name='Landmarks')
# Configure layout
fig.update_layout(margin=dict(l=20, r=20, t=40, b=20),
legend=dict(yanchor="top", y=0.99, xanchor="left", x=0.01),
xaxis_title="X (m)", yaxis_title="Y (m)")
fig.update_yaxes(scaleanchor="x", scaleratio=1) # Ensure aspect ratio is 1:1
fig.show()
Loading...