~j-james/infinite-recharge

e7b1144f149cd3b96fd270a2c3648b23c390ba20 — Declan Freeman-Gleason 8 months ago da709e4
Make things actually compile
M src/main/java/com/spartronics4915/frc2020/Constants.java => src/main/java/com/spartronics4915/frc2020/Constants.java +6 -8
@@ 15,6 15,7 @@ import com.spartronics4915.lib.math.twodim.geometry.Rotation2d;
import com.spartronics4915.lib.util.Logger;
import com.spartronics4915.lib.util.TriFunction;

import com.spartronics4915.lib.util.VecBuilder;
import edu.wpi.first.wpilibj.Joystick;
import edu.wpi.first.wpilibj.util.Color;
import edu.wpi.first.wpilibj.util.Units;


@@ 24,6 25,7 @@ import edu.wpi.first.wpiutil.math.Matrix;
import edu.wpi.first.wpiutil.math.Nat;
import edu.wpi.first.wpiutil.math.numbers.N1;
import edu.wpi.first.wpiutil.math.numbers.N3;
import edu.wpi.first.wpiutil.math.numbers.N5;
import edu.wpi.first.wpiutil.math.numbers.N6;

public final class Constants


@@ 309,18 311,14 @@ public final class Constants
        //  have competing implementations.
        // If new measurements for Vision or Turret mounting are obtained,
        // please also update CoordSysMgr20202.java.
        public static final double kT265InternalMeasurementCovariance = 0.1;
        public static final double kT265InternalMeasurementCovariance = 0.5;
        public static final Pose2d kSlamraToRobot = new Pose2d(Units.inchesToMeters(-11.75),
                                                            Units.inchesToMeters(-4.75),
                                                            new Rotation2d());

        public static final Matrix<N3, N1> kStateStdDevs = new MatBuilder<>(Nat.N3(), Nat.N1())
            .fill(0.02, 0.02, 0.01);
        public static final Matrix<N6, N1> measurementStdDevs = new MatBuilder<>(Nat.N6(), Nat.N1())
            .fill(0.1, 0.1, 0.1, 0.005, 0.005, 0.002);
        public static final double kSlamStdDevsPerMeter = 3;
        public static final Pose2d kApproximateStartingPose = new Pose2d(Units.inchesToMeters(508), 0,
            Rotation2d.fromDegrees(180));
        public static final Matrix<N5, N1> kStateStdDevs = VecBuilder.fill(0.2, 0.2, 0.01, 0.2, 0.2);
        public static final Matrix<N6, N1> kLocalMeasurementStdDevs = VecBuilder.fill(0.1, 0.1, 0.1, 0.5, 0.5, 0.02);
        public static final Matrix<N3, N1> kVisionMeasurementStdDevs = VecBuilder.fill(0.2, 0.2, 0.2);
    }

    public static final class Vision

M src/main/java/com/spartronics4915/frc2020/CoordSysMgr2020.java => src/main/java/com/spartronics4915/frc2020/CoordSysMgr2020.java +1 -1
@@ 65,7 65,7 @@ public class CoordSysMgr2020 extends CoordSysMgr
    // camera with a mark. Measure the distance from height of the mark
    // to the camera origin. Measure the distance from the wall. Use
    // degrees(atan2(mark, dist)) to compute the tilt angle.
    public static final double kCamTilt = 20;
    public static final double kCamTilt = 20; // TODO: This is the only number that Declan is unsure about and hasn't rigorously verified.

    // camPos represents offset of camera's origin relative to turret origin.
    // (measured in inches). Camera origin is the center of the focal point.

M src/main/java/com/spartronics4915/frc2020/RobotContainer.java => src/main/java/com/spartronics4915/frc2020/RobotContainer.java +9 -4
@@ 25,6 25,7 @@ import com.spartronics4915.lib.hardware.sensors.T265Camera;
import com.spartronics4915.lib.hardware.sensors.T265Camera.CameraJNIException;
import com.spartronics4915.lib.math.twodim.control.RamseteTracker;
import com.spartronics4915.lib.math.twodim.control.TrajectoryTracker;
import com.spartronics4915.lib.math.twodim.geometry.Pose2d;
import com.spartronics4915.lib.subsystems.estimator.DrivetrainEstimator;
import com.spartronics4915.lib.subsystems.estimator.RobotStateEstimator;
import com.spartronics4915.lib.subsystems.estimator.RobotStateEstimator.EstimatorSource;


@@ 107,14 108,18 @@ public class RobotContainer
        mLED = LED.getInstance();
        mDrive = new Drive(mLauncher);

        var ekf = new DrivetrainEstimator(Constants.Estimator.kStateStdDevs,
            Constants.Estimator.measurementStdDevs, Constants.Estimator.kSlamStdDevsPerMeter,
            Constants.Estimator.kApproximateStartingPose);
        var ekf = new DrivetrainEstimator(
            mDrive.getIMUHeading(),
            Constants.Trajectory.kStartPointRight,
            Constants.Estimator.kStateStdDevs,
            Constants.Estimator.kLocalMeasurementStdDevs,
            Constants.Estimator.kVisionMeasurementStdDevs
        );
        mStateEstimator = new RobotStateEstimator(mDrive,
            new Kinematics(Constants.Drive.kTrackWidthMeters, Constants.Drive.kScrubFactor),
            slamra,
            ekf,
            slamra == null ? EstimatorSource.EncoderOdometry : EstimatorSource.VisualSLAM);
            slamra == null ? EstimatorSource.EncoderOdometry : EstimatorSource.Fused);
        StartEndCommand slamraCmd = new StartEndCommand(
            () -> mStateEstimator.enable(),
            () -> mStateEstimator.stop(),

M src/main/java/com/spartronics4915/frc2020/commands/LauncherCommands.java => src/main/java/com/spartronics4915/frc2020/commands/LauncherCommands.java +2 -2
@@ 49,8 49,8 @@ public class LauncherCommands
            Units.inchesToMeters(Constants.Vision.kAllianceGoalCoords[1]),
            Rotation2d.fromDegrees(180));

        mLauncher.setDefaultCommand(new TargetAndShoot());
        // mLauncher.setDefaultCommand(new ShootBallTest());
//        mLauncher.setDefaultCommand(new TargetAndShoot());
         mLauncher.setDefaultCommand(new ShootBallTest());
    }

    public Launcher getLauncher()

M src/main/java/com/spartronics4915/frc2020/subsystems/Vision.java => src/main/java/com/spartronics4915/frc2020/subsystems/Vision.java +2 -1
@@ 160,6 160,7 @@ public class Vision extends SpartronicsSubsystem
            }
            Vec3 tgtInCam = new Vec3(camx, camy, camz);
            Vec3 tgtInRobot = this.mCoordSysMgr.camPointToRobot(tgtInCam);
            tgtInRobot = new Vec3(tgtInRobot.getX() * -1, tgtInRobot.getY(), tgtInRobot.getZ()); // TODO: uh oh...

            dashboardPutString("Target In Robot ", "" + tgtInRobot.asPointString());
            // Logger.debug("Target in Robot:  " + tgtInRobot);


@@ 203,7 204,7 @@ public class Vision extends SpartronicsSubsystem
            this.mCoordSysMgr.updateRobotPose(robotHeading, tgtInRobot, fieldTarget);
            Vec3 robotPos = mCoordSysMgr.robotPointToField(Vec3.ZeroPt);
            // Use robot's heading in our poseEstimate - remember to convert
            // from inches to meters before commiting to RSM.
            // from inches to meters before committing to RSM.
            Pose2d poseEstimate = new Pose2d(Units.inchesToMeters(robotPos.getX()),
                Units.inchesToMeters(robotPos.getY()), r2d);
            Iterator<VisionEvent> it = this.mListeners.iterator();

M src/main/java/com/spartronics4915/lib/hardware/sensors/T265Camera.java => src/main/java/com/spartronics4915/lib/hardware/sensors/T265Camera.java +13 -12
@@ 6,6 6,7 @@ import java.util.function.Consumer;
import com.spartronics4915.lib.math.twodim.geometry.Pose2d;
import com.spartronics4915.lib.math.twodim.geometry.Rotation2d;
import com.spartronics4915.lib.math.twodim.geometry.Twist2d;
import edu.wpi.first.wpilibj.kinematics.ChassisSpeeds;

/**
 * Provides a convenient Java interface to the Intel RealSense


@@ 63,10 64,10 @@ public class T265Camera
        /**
         * The robot's velocity in meters/sec and radians/sec.
         */
        public final Twist2d velocity;
        public final ChassisSpeeds velocity;
        public final PoseConfidence confidence;

        public CameraUpdate(Pose2d pose, Twist2d velocity, PoseConfidence confidence)
        public CameraUpdate(Pose2d pose, ChassisSpeeds velocity, PoseConfidence confidence)
        {
            this.pose = pose;
            this.velocity = velocity;


@@ 83,7 84,7 @@ public class T265Camera

    /**
     * This method constructs a T265 camera and sets it up with the right info.
     * {@link T265Camera#start() start} will not be called, you must call it
     * {@link T265Camera#start start} will not be called, you must call it
     * yourself.
     *
     * @param robotOffset        Offset of the center of the robot from the center


@@ 98,7 99,7 @@ public class T265Camera

    /**
     * This method constructs a T265 camera and sets it up with the right info.
     * {@link T265Camera#start() start} will not be called, you must call it
     * {@link T265Camera#start start} will not be called, you must call it
     * yourself.
     *
     * @param robotOffsetMeters        Offset of the center of the robot from the center


@@ 165,14 166,14 @@ public class T265Camera
    /**
     * Sends robot velocity as computed from wheel encoders.
     *
     * @param velocity    The robot's translational velocity in meters/sec.
     * @param velocityXMetersPerSecond The robot-relative velocity along the X axis in meters/sec.
     * @param velocityYMetersPerSecond The robot-relative velocity along the Y axis in meters/sec.
     */
    public void sendOdometry(Twist2d velocity)
    public void sendOdometry(double velocityXMetersPerSecond, double velocityYMetersPerSecond)
    {
        Pose2d transVel = velocity.exp();
        // Only 1 odometry sensor is supported for now (index 0)
        sendOdometryRaw(0, (float) transVel.getTranslation().getX(),
            (float) transVel.getTranslation().getY());
        sendOdometryRaw(0, (float) velocityXMetersPerSecond,
            (float) velocityYMetersPerSecond);
    }

    /**


@@ 201,8 202,8 @@ public class T265Camera

    private static native void cleanup();

    private synchronized void consumePoseUpdate(float x, float y, float radians, float dx,
        float dtheta, int confOrdinal)
    private synchronized void consumePoseUpdate(float x, float y, float radians, float vx, float vy,
        float omega, int confOrdinal)
    {
        // First we apply an offset to go from the camera coordinate system to the
        // robot coordinate system with an origin at the center of the robot. This


@@ 243,7 244,7 @@ public class T265Camera

        final Pose2d transformedPose = mOrigin.transformBy(currentPose);
        mPoseConsumer.accept(new CameraUpdate(transformedPose,
            new Twist2d(dx, 0.0, Rotation2d.fromRadians(dtheta)), confidence));
            new ChassisSpeeds(vx, vy, omega), confidence));
    }

    /**

M src/main/java/com/spartronics4915/lib/subsystems/estimator/DrivetrainEstimator.java => src/main/java/com/spartronics4915/lib/subsystems/estimator/DrivetrainEstimator.java +320 -172
@@ 1,214 1,362 @@
package com.spartronics4915.lib.subsystems.estimator;

import java.util.ArrayDeque;
import java.util.Map;
import java.util.Queue;
import java.util.TreeMap;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.function.BiConsumer;

import com.spartronics4915.lib.math.twodim.geometry.Pose2d;
import com.spartronics4915.lib.math.twodim.geometry.Rotation2d;
import com.spartronics4915.lib.math.twodim.geometry.Twist2d;
import com.spartronics4915.lib.util.Logger;

import com.spartronics4915.lib.util.VecBuilder;
import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.wpilibj.estimator.ExtendedKalmanFilter;
import edu.wpi.first.wpilibj.math.StateSpaceUtils;
import edu.wpi.first.wpilibj.kinematics.DifferentialDriveWheelSpeeds;
import edu.wpi.first.wpilibj.estimator.KalmanFilterLatencyCompensator;
import edu.wpi.first.wpilibj.math.Discretization;
import edu.wpi.first.wpilibj.math.StateSpaceUtil;
import edu.wpi.first.wpiutil.math.MatBuilder;
import edu.wpi.first.wpiutil.math.Matrix;
import edu.wpi.first.wpiutil.math.MatrixUtils;
import edu.wpi.first.wpiutil.math.Nat;
import edu.wpi.first.wpiutil.math.numbers.N1;
import edu.wpi.first.wpiutil.math.numbers.N3;
import edu.wpi.first.wpiutil.math.numbers.N5;
import edu.wpi.first.wpiutil.math.numbers.N6;

/**
 * Wraps the Extended Kalman Filter I wrote and contributed to the prerelease version of WPILibStateSpace to fuse encoder odometry, VSLAM, and PnP to localize the robot.
 * 
 * We also do latency compensation for the vision.
 * 
 * Our state-space system is:
 * This class wraps an Extended Kalman Filter to fuse latency-compensated vision
 * measurements with differential drive encoder measurements. It will correct
 * for noisy vision measurements and encoder drift. It is intended to be an easy
 * drop-in for
 * {@link edu.wpi.first.wpilibj.kinematics.DifferentialDriveOdometry}; in fact,
 * if you never call {@link DrivetrainEstimator#addVisionMeasurement}
 * and only call {@link DrivetrainEstimator#update} then this will
 * behave exactly the same as DifferentialDriveOdometry.
 *
 * <p>{@link DrivetrainEstimator#update} should be called every robot
 * loop (if your robot loops are faster than the default then you should change
 * the {@link DrivetrainEstimator#DrivetrainEstimator(Rotation2d, Pose2d,
 * Matrix, Matrix, Matrix, double) nominal delta time}.)
 * {@link DrivetrainEstimator#addVisionMeasurement} can be called as
 * infrequently as you want; if you never call it then this class will behave
 * exactly like regular encoder odometry.
 *
 * <p>Our state-space system is:
 *
 * x = [[x, y, dtheta]]^T in the field coordinate system
 * <p>x = [[x, y, theta, dist_l, dist_r]]^T in the field coordinate system (dist_* are wheel
 * distances.)
 *
 * u = [[v_l, v_r, theta]]^T Not actual inputs, but the dynamics math required
 * to use actual inputs like voltage is gross.
 * <p>u = [[vx, vy, omega]]^T (robot-relative velocities) -- NB: using velocities make things
 * considerably easier, because it means that teams don't have to worry about getting an accurate
 * model. Basically, we suspect that it's easier for teams to get good encoder data than it is for
 * them to perform system identification well enough to get a good model.
 *
 * y = [[x_s, y_s, theta_s, x_r, y_r, theta_r]]^T All the subscript s ones are
 * measurements from vSLAM, while the sub r ones are from retroreflective tape
 * vision.
 * <p>y = [[x, y, theta]]^T from vision, or y = [[x_s, y_s, theta_s, dist_l, dist_r, theta_g]] from
 * encoders, VSLAM, and gyro (subscript s indicates VSLAM as the source)
 */
public class DrivetrainEstimator
{

    private static final double kNominalDt = 0.01;
    private static final int kMaxPastObserverStates = 200;

    private final ExtendedKalmanFilter<N3, N3, N3> mObserver;
    private final ExtendedKalmanFilter<N3, N3, N6> mVisionObserver;
    private final Matrix<N6, N1> mMeasurementStdDevs;
public class DrivetrainEstimator {
    private final ExtendedKalmanFilter<N5, N3, N6> m_observer;
    private final BiConsumer<Matrix<N3, N1>, Matrix<N3, N1>> m_visionCorrect;
    private final KalmanFilterLatencyCompensator<N5, N3, N6> m_latencyCompensator;

    private final Pose2d mBeginningRobotPose;
    private final double mSlamStdDevsPerMeter;
    private final double m_nominalDt; // Seconds
    private double m_prevTimeSeconds = -1.0;

    private final TreeMap<Double, ObserverState> mPastObserverStates;
    private Rotation2d m_gyroOffset;
    private Rotation2d m_previousAngle;

    public DrivetrainEstimator(Matrix<N3, N1> stateStdDevs, Matrix<N6, N1> measurementStdDevs,
        double slamStdDevsPerMeter, Pose2d beginningRobotPose)
    {
        mObserver = new ExtendedKalmanFilter<N3, N3, N3>(Nat.N3(), Nat.N3(), Nat.N3(), this::f,
            (x, u) -> x, stateStdDevs,
            new MatBuilder<>(Nat.N3(), Nat.N1()).fill(measurementStdDevs.get(0, 0),
                measurementStdDevs.get(1, 0), measurementStdDevs.get(2, 0)),
            false, kNominalDt);
        mVisionObserver = new ExtendedKalmanFilter<>(Nat.N3(), Nat.N3(), Nat.N6(), this::f, this::h,
            stateStdDevs, measurementStdDevs, false, kNominalDt);
        mMeasurementStdDevs = measurementStdDevs;

        mBeginningRobotPose = beginningRobotPose;
        mSlamStdDevsPerMeter = slamStdDevsPerMeter;

        mPastObserverStates = new TreeMap<>();
    /**
     * Constructs a DrivetrainEstimator.
     *
     * @param gyroAngle                The current gyro angle.
     * @param initialPoseMeters        The starting pose estimate.
     * @param stateStdDevs             Standard deviations of model states. Increase these numbers to
     *                                 trust your wheel and gyro velocities less.
     * @param localMeasurementStdDevs  Standard deviations of the encoder and gyro measurements.
     *                                 Increase these numbers to trust encoder distances and gyro
     *                                 angle less.
     * @param visionMeasurementStdDevs Standard deviations of the encoder measurements. Increase
     *                                 these numbers to trust vision less.
     */
    public DrivetrainEstimator(
            Rotation2d gyroAngle, Pose2d initialPoseMeters,
            Matrix<N5, N1> stateStdDevs,
            Matrix<N6, N1> localMeasurementStdDevs, Matrix<N3, N1> visionMeasurementStdDevs
    ) {
        this(gyroAngle, initialPoseMeters,
                stateStdDevs, localMeasurementStdDevs, visionMeasurementStdDevs, 0.02);
    }

    private final Matrix<N3, N1> f(Matrix<N3, N1> x, Matrix<N3, N1> u)
    {
        // Diff drive forward kinematics:
        // v_c = (v_l + v_r) / 2
        var exp = new Twist2d((u.get(0, 0) + u.get(1, 0)) / 2, 0.0, Rotation2d.fromRadians(u.get(2, 0))).exp();
        var newPose = new Pose2d(x.get(0, 0), x.get(1, 0), Rotation2d.fromRadians(x.get(2, 0))).transformBy(exp);

        return new MatBuilder<>(Nat.N3(), Nat.N1()).fill(newPose.getTranslation().getX(),
            newPose.getTranslation().getY(), x.get(2, 0) + u.get(2, 0));
    /**
     * Constructs a DrivetrainEstimator.
     *
     * @param gyroAngle                The current gyro angle.
     * @param initialPoseMeters        The starting pose estimate.
     * @param stateStdDevs             Standard deviations of model states. Increase these numbers to
     *                                 trust your wheel and gyro velocities less.
     * @param localMeasurementStdDevs  Standard deviations of the encoder and gyro measurements.
     *                                 Increase these numbers to trust encoder distances and gyro
     *                                 angle less.
     * @param visionMeasurementStdDevs Standard deviations of the encoder measurements. Increase
     *                                 these numbers to trust vision less.
     * @param nominalDtSeconds         The time in seconds between each robot loop.
     */
    @SuppressWarnings("ParameterName")
    public DrivetrainEstimator(
            Rotation2d gyroAngle, Pose2d initialPoseMeters,
            Matrix<N5, N1> stateStdDevs,
            Matrix<N6, N1> localMeasurementStdDevs, Matrix<N3, N1> visionMeasurementStdDevs,
            double nominalDtSeconds
    ) {
        m_nominalDt = nominalDtSeconds;

        m_observer = new ExtendedKalmanFilter<>(
                Nat.N5(), Nat.N3(), Nat.N6(),
                this::f,
                (x, u) -> VecBuilder.fill(
                        x.get(0, 0), x.get(1, 0), x.get(2, 0),
                        x.get(3, 0), x.get(4, 0),
                        x.get(2, 0)
                ),
                stateStdDevs, localMeasurementStdDevs,
                m_nominalDt
        );
        m_latencyCompensator = new KalmanFilterLatencyCompensator<>();

        var visionContR = StateSpaceUtil.makeCovarianceMatrix(Nat.N3(), visionMeasurementStdDevs);
        var visionDiscR = Discretization.discretizeR(visionContR, m_nominalDt);
        m_visionCorrect = (u, y) -> m_observer.correct(
                Nat.N3(), u, y,
                (x, u_) -> new Matrix<N3, N1>(x.getStorage().extractMatrix(0, 3, 0, 1)),
                visionDiscR
        );

        m_gyroOffset = initialPoseMeters.getRotation().rotateBy(gyroAngle.inverse());
        m_previousAngle = initialPoseMeters.getRotation();
        m_observer.setXhat(fillStateVector(initialPoseMeters, 0.0, 0.0));
    }

    private final Matrix<N6, N1> h(Matrix<N3, N1> x, Matrix<N3, N1> u)
    {
        return new MatBuilder<>(Nat.N6(), Nat.N1()).fill(x.get(0, 0), x.get(1, 0), x.get(2, 0),
            x.get(0, 0), x.get(1, 0), x.get(2, 0));
    @SuppressWarnings({"ParameterName", "MethodName"})
    private Matrix<N5, N1> f(Matrix<N5, N1> x, Matrix<N3, N1> u) {
        // Apply a rotation matrix. Note that we do *not* add x--Runge-Kutta does that for us.
        var theta = x.get(2, 0);
        var toFieldRotation = new MatBuilder<>(Nat.N5(), Nat.N5()).fill(
                Math.cos(theta), -Math.sin(theta), 0, 0, 0,
                Math.sin(theta), Math.cos(theta), 0, 0, 0,
                0, 0, 1, 0, 0,
                0, 0, 0, 1, 0,
                0, 0, 0, 0, 1
        );
        return toFieldRotation.times(VecBuilder.fill(
                u.get(0, 0), u.get(1, 0), u.get(2, 0), u.get(0, 0), u.get(1, 0)
        ));
    }

    public synchronized void addVisionMeasurement(Pose2d visionRobotPose, double timestampSeconds)
    {
        var low = mPastObserverStates.floorEntry(timestampSeconds);
        var high = mPastObserverStates.ceilingEntry(timestampSeconds);

        Map.Entry<Double, ObserverState> closestEntry = null;
        if (low != null && high != null)
        {
            closestEntry = Math.abs(timestampSeconds - low.getKey()) < Math
                .abs(timestampSeconds - high.getKey())
                    ? low
                    : high;
        }
        else
        {
            closestEntry = low != null ? low : high;
        }
    /**
     * Resets the robot's position on the field.
     *
     * <p>You NEED to reset your encoders (to zero) when calling this method.
     *
     * <p>The gyroscope angle does not need to be reset here on the user's robot code.
     * The library automatically takes care of offsetting the gyro angle.
     *
     * @param poseMeters The position on the field that your robot is at.
     * @param gyroAngle  The angle reported by the gyroscope.
     */
    public void resetPosition(Pose2d poseMeters, Rotation2d gyroAngle) {
        m_previousAngle = poseMeters.getRotation();
        m_gyroOffset = getEstimatedPosition().getRotation().rotateBy(gyroAngle.inverse());
        m_observer.setXhat(fillStateVector(poseMeters, 0.0, 0.0));
    }

        if (closestEntry == null)
        {
            Logger.warning("Observer state map was empty; ignorning vision update");
            return;
        }
    /**
     * Gets the pose of the robot at the current time as estimated by the Extended Kalman Filter.
     *
     * @return The estimated robot pose in meters.
     */
    public Pose2d getEstimatedPosition() {
        return new Pose2d(
                m_observer.getXhat(0),
                m_observer.getXhat(1),
                Rotation2d.fromRadians(m_observer.getXhat(2))
        );
    }

        var tailMap = mPastObserverStates.tailMap(closestEntry.getKey(), true);
        for (var st : tailMap.values())
        {
            if (visionRobotPose != null)
            {
                mObserver.setP(st.errorCovariances);
                mObserver.setXhat(st.xHat);
    /**
     * Add a vision measurement to the Extended Kalman Filter. This will correct the
     * odometry pose estimate while still accounting for measurement noise.
     *
     * <p>This method can be called as infrequently as you want, as long as you are
     * calling {@link DrivetrainEstimator#update} every loop.
     *
     * @param visionRobotPoseMeters The pose of the robot as measured by the vision
     *                              camera.
     * @param timestampSeconds      The timestamp of the vision measurement in seconds. Note that if
     *                              you don't use your own time source by calling
     *                              {@link DrivetrainEstimator#updateWithTime} then you
     *                              must use a timestamp with an epoch since FPGA startup
     *                              (i.e. the epoch of this timestamp is the same epoch as
     *                              {@link edu.wpi.first.wpilibj.Timer#getFPGATimestamp
     *                              Timer.getFPGATimestamp}.) This means that you should
     *                              use Timer.getFPGATimestamp as your time source in
     *                              this case.
     */
    public void addVisionMeasurement(Pose2d visionRobotPoseMeters, double timestampSeconds) {
        var oldXHat = m_observer.getXhat();
        var oldP = m_observer.getP();
        var oldSnapshots = new ArrayList<>(m_latencyCompensator.m_pastObserverSnapshots);

	// You're probably looking at this thinking "WTF is in that catch block"
	// Well... There was a bug that I couldn't track down so I used all this code to dump the EKF state
	// Hopefully I will have found the bug and fixed it by the time you're reading this, and then you can remove the stuff in the catch block
        try {
            m_latencyCompensator.applyPastGlobalMeasurement(
                    Nat.N3(),
                    m_observer, m_nominalDt,
                    VecBuilder.fill(
                            visionRobotPoseMeters.getTranslation().getX(),
                            visionRobotPoseMeters.getTranslation().getY(),
                            visionRobotPoseMeters.getRotation().getRadians()
                    ),
                    m_visionCorrect,
                    timestampSeconds
            );
        } catch (Exception e) {
            try (PrintWriter writer = new PrintWriter(new File("/home/lvuser/out.txt"))) {
                writer.println("y:");
                writer.println(VecBuilder.fill(
                        visionRobotPoseMeters.getTranslation().getX(),
                        visionRobotPoseMeters.getTranslation().getY(),
                        visionRobotPoseMeters.getRotation().getRadians()
                ).toString());

                writer.println("Timestamp:");
                writer.println(timestampSeconds);

                writer.println("xHat old:");
                writer.println(oldXHat);

                writer.println("P old:");
                writer.println(oldP);

                writer.println("xHat:");
                writer.println(m_observer.getXhat());

                writer.println("P:");
                writer.println(m_observer.getP());

                writer.println("Snapshots old:");
                oldSnapshots.forEach((it) -> {
                    var snap = it.getValue();
                    writer.println(snap.xHat);
                    writer.println(snap.errorCovariances);
                    writer.println(snap.inputs);
                    writer.println(snap.localMeasurements);
                    writer.println("=======");
                });

                writer.println("Snapshots:");
                writer.println(m_latencyCompensator.m_pastObserverSnapshots.size());
                m_latencyCompensator.m_pastObserverSnapshots.forEach((it) -> {
                    var snap = it.getValue();
                    writer.println(snap.xHat);
                    writer.println(snap.errorCovariances);
                    writer.println(snap.inputs);
                    writer.println(snap.localMeasurements);
                    writer.println("=======");
                });

                writer.println("done");
                writer.flush();
            } catch (FileNotFoundException ex) {
                ex.printStackTrace();
            } finally {
                System.exit(1);
            }
            update(st.slamMeasurements, visionRobotPose, st.inputs);

            visionRobotPose = null;
        }
    }

    /**
     * @param slamRobotPose The robot pose just given by the vSLAM camera.
     * @param 
     * 
     * @return The estimated pose of the robot.
     * Updates the the Extended Kalman Filter using only wheel encoder information.
     * Note that this should be called every loop.
     *
     * @param gyroAngle                      The current gyro angle.
     * @param wheelVelocitiesMetersPerSecond The velocities of the wheels in meters per second.
     * @param distanceLeftMeters             The total distance travelled by the left wheel in meters
     *                                       since the last time you called
     *                                       {@link DrivetrainEstimator#resetPosition}.
     * @param distanceRightMeters            The total distance travelled by the right wheel in meters
     *                                       since the last time you called
     *                                       {@link DrivetrainEstimator#resetPosition}.
     * @return The estimated pose of the robot in meters.
     */
    public synchronized Pose2d update(Pose2d slamRobotPose, double dleftMeters, double drightMeters,
        double dthetaRadians, double timeSeconds)
    {
        var u = new MatBuilder<>(Nat.N3(), Nat.N1()).fill(dleftMeters, drightMeters, dthetaRadians);
        mPastObserverStates.put(timeSeconds, new ObserverState(mObserver, u, slamRobotPose));

        if (mPastObserverStates.size() > kMaxPastObserverStates)
        {
            mPastObserverStates.remove(mPastObserverStates.firstKey());
        }

        return update(slamRobotPose, null, u);
    public Pose2d update(
            Rotation2d gyroAngle,
            DifferentialDriveWheelSpeeds wheelVelocitiesMetersPerSecond,
            Pose2d slamraPose,
            double distanceLeftMeters, double distanceRightMeters
    ) {
        return updateWithTime(
                Timer.getFPGATimestamp(), gyroAngle, wheelVelocitiesMetersPerSecond, slamraPose,
                distanceLeftMeters, distanceRightMeters
        );
    }

    private Pose2d update(Pose2d slamRobotPose, Pose2d visionRobotPose, Matrix<N3, N1> u)
    {
        double distFromBeginningMeters = slamRobotPose.getTranslation().getDistance(mBeginningRobotPose.getTranslation());

        if (visionRobotPose != null)
        {
            mVisionObserver.setP(mObserver.getP());
            mVisionObserver.setXhat(mObserver.getXhat());

            var measStdDevs = new MatBuilder<>(Nat.N6(), Nat.N1()).fill(
                mSlamStdDevsPerMeter * distFromBeginningMeters,
                mSlamStdDevsPerMeter * distFromBeginningMeters,
                mSlamStdDevsPerMeter * distFromBeginningMeters,
                mMeasurementStdDevs.get(3, 0),
                mMeasurementStdDevs.get(4, 0),
                mMeasurementStdDevs.get(5, 0));
            var contR = StateSpaceUtils.makeCovMatrix(Nat.N6(), measStdDevs);
            var discR = StateSpaceUtils.discretizeR(contR, kNominalDt);

            var y = new MatBuilder<>(Nat.N6(), Nat.N1()).fill(slamRobotPose.getTranslation().getX(),
                slamRobotPose.getTranslation().getY(), slamRobotPose.getRotation().getRadians(),
                visionRobotPose.getTranslation().getX(), visionRobotPose.getTranslation().getY(),
                visionRobotPose.getRotation().getRadians());

            mVisionObserver.correct(Nat.N6(), u, y, this::h, discR);
            mVisionObserver.predict(u, kNominalDt);

            mObserver.setP(mVisionObserver.getP());
            mObserver.setXhat(mVisionObserver.getXhat());
        }
        else
        {
            var y = new MatBuilder<>(Nat.N3(), Nat.N1()).fill(slamRobotPose.getTranslation().getX(),
                slamRobotPose.getTranslation().getY(), slamRobotPose.getRotation().getRadians());

            var measStdDevs = new MatBuilder<>(Nat.N3(), Nat.N1()).fill(
                mSlamStdDevsPerMeter * distFromBeginningMeters,
                mSlamStdDevsPerMeter * distFromBeginningMeters,
                mSlamStdDevsPerMeter * distFromBeginningMeters);
            var contR = StateSpaceUtils.makeCovMatrix(Nat.N3(), measStdDevs);
            var discR = StateSpaceUtils.discretizeR(contR, kNominalDt);

            mObserver.correct(Nat.N3(), u, y, (x, _u) -> x, discR);
            mObserver.predict(u, kNominalDt);
        }

        return new Pose2d(mObserver.getXhat(0), mObserver.getXhat(1),
            Rotation2d.fromRadians(mObserver.getXhat(2)));
    /**
     * Updates the the Extended Kalman Filter using only wheel encoder information.
     * Note that this should be called every loop.
     *
     * @param currentTimeSeconds             Time at which this method was called, in seconds.
     * @param gyroAngle                      The current gyro angle.
     * @param wheelVelocitiesMetersPerSecond The velocities of the wheels in meters per second.
     * @param distanceLeftMeters             The total distance travelled by the left wheel in meters
     *                                       since the last time you called
     *                                       {@link DrivetrainEstimator#resetPosition}.
     * @param distanceRightMeters            The total distance travelled by the right wheel in meters
     *                                       since the last time you called
     *                                       {@link DrivetrainEstimator#resetPosition}.
     * @return The estimated pose of the robot in meters.
     */
    @SuppressWarnings({"LocalVariableName", "ParameterName"})
    public Pose2d updateWithTime(
            double currentTimeSeconds, Rotation2d gyroAngle,
            DifferentialDriveWheelSpeeds wheelVelocitiesMetersPerSecond,
            Pose2d slamraPose,
            double distanceLeftMeters, double distanceRightMeters
    ) {
        double dt = m_prevTimeSeconds >= 0 ? currentTimeSeconds - m_prevTimeSeconds : m_nominalDt;
        m_prevTimeSeconds = currentTimeSeconds;

        var angle = gyroAngle.rotateBy(m_gyroOffset);
        // Diff drive forward kinematics:
        // v_c = (v_l + v_r) / 2
        var wheelVels = wheelVelocitiesMetersPerSecond;
        var u = VecBuilder.fill(
                (wheelVels.leftMetersPerSecond + wheelVels.rightMetersPerSecond) / 2, 0,
                angle.rotateBy(m_previousAngle.inverse()).getRadians() / dt
        );
        m_previousAngle = angle;

        var localY = VecBuilder.fill(
                slamraPose.getTranslation().getX(), slamraPose.getTranslation().getY(), slamraPose.getRotation().getRadians(),
                distanceLeftMeters, distanceRightMeters, angle.getRadians()
        );
        m_latencyCompensator.addObserverState(m_observer, u, localY, currentTimeSeconds);
        m_observer.predict(u, dt);
        m_observer.correct(u, localY);

        return getEstimatedPosition();
    }

    private static class ObserverState
    {
        public final Matrix<N3, N1> xHat;
        public final Matrix<N3, N3> errorCovariances;
        public final Matrix<N3, N1> inputs;
        public final Pose2d slamMeasurements;

        public ObserverState(ExtendedKalmanFilter<N3, N3, N3> observer, Matrix<N3, N1> u,
            Pose2d y)
        {
            xHat = observer.getXhat();
            errorCovariances = observer.getP();

            inputs = u;
            slamMeasurements = y;
        }
    private static Matrix<N5, N1> fillStateVector(Pose2d pose, double leftDist, double rightDist) {
        return VecBuilder.fill(
                pose.getTranslation().getX(),
                pose.getTranslation().getY(),
                pose.getRotation().getRadians(),
                leftDist,
                rightDist
        );
    }
}

M src/main/java/com/spartronics4915/lib/subsystems/estimator/RobotStateEstimator.java => src/main/java/com/spartronics4915/lib/subsystems/estimator/RobotStateEstimator.java +33 -8
@@ 3,6 3,8 @@ package com.spartronics4915.lib.subsystems.estimator;
import edu.wpi.first.wpilibj.DriverStation;
import edu.wpi.first.wpilibj.Notifier;
import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.wpilibj.kinematics.ChassisSpeeds;
import edu.wpi.first.wpilibj.kinematics.DifferentialDriveWheelSpeeds;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;

import com.spartronics4915.lib.math.twodim.geometry.Pose2d;


@@ 81,6 83,11 @@ public class RobotStateEstimator extends SpartronicsSubsystem
                }
                else if (mBestEstimatorSource == EstimatorSource.Fused)
                {
                    SmartDashboard.putString("RobotState/visionPose",
                            Units.metersToInches(mVisionEstimate.getTranslation().getX()) + " "
                                    + Units.metersToInches(mVisionEstimate.getTranslation().getY()) + " "
                                    + mVisionEstimate.getRotation().getDegrees());
                    dashboardPutString("lastUpdate", "then: " + mTimestamp + ", now: " + Timer.getFPGATimestamp());
                    mEKF.addVisionMeasurement(this.mVisionEstimate, this.mTimestamp);
                }
            }


@@ 144,6 151,8 @@ public class RobotStateEstimator extends SpartronicsSubsystem
        mVisionResetEncoderStateMap.reset(time, pose);
        mFusedStateMap.reset(time, pose);

        mEKF.resetPosition(pose, new Rotation2d());

        mLeftPrevDist = mDrive.getLeftMotor().getEncoder().getPosition();
        mRightPrevDist = mDrive.getRightMotor().getEncoder().getPosition();



@@ 170,6 179,12 @@ public class RobotStateEstimator extends SpartronicsSubsystem
                + epose.getRotation().getDegrees());
        SmartDashboard.putNumber("RobotState/encoderVelocity", estate.predictedVelocity.dx);

        Pose2d vpose = mCameraStateMap.getLatestState().pose;
        SmartDashboard.putString("RobotState/vslamPose",
                Units.metersToInches(vpose.getTranslation().getX()) + " "
                        + Units.metersToInches(vpose.getTranslation().getY()) + " "
                        + vpose.getRotation().getDegrees());

        final RobotStateMap.State bestState = getBestRobotStateMap().getLatestState();
        Pose2d cpose = bestState.pose;



@@ 215,8 230,11 @@ public class RobotStateEstimator extends SpartronicsSubsystem
        final Twist2d iVal;
        synchronized (this)
        {
            final double leftDist = mDrive.getLeftMotor().getEncoder().getPosition();
            final double rightDist = mDrive.getRightMotor().getEncoder().getPosition();
            var leftEncoder = mDrive.getLeftMotor().getEncoder();
            var rightEncoder = mDrive.getRightMotor().getEncoder();

            final double leftDist = leftEncoder.getPosition();
            final double rightDist = rightEncoder.getPosition();
            final double leftDelta = leftDist - mLeftPrevDist;
            final double rightDelta = rightDist - mRightPrevDist;
            final Rotation2d heading = mDrive.getIMUHeading();


@@ 224,10 242,12 @@ public class RobotStateEstimator extends SpartronicsSubsystem
            mRightPrevDist = rightDist;
            iVal = mKinematics.forwardKinematics(last.pose.getRotation(), leftDelta, rightDelta,
                heading);
            

            SmartDashboard.putString("RobotState/distances", leftDist + "," + rightDist);

            if (mBestEstimatorSource == EstimatorSource.Fused)
            {
                var ekfPose = mEKF.update(mCameraStateMap.getLatestFieldToVehicle(), leftDist, rightDist, heading.getRadians() - mPrevHeading, ts);
                var ekfPose = mEKF.updateWithTime(ts, heading, new DifferentialDriveWheelSpeeds(leftEncoder.getVelocity(), rightEncoder.getVelocity()), mCameraStateMap.getLatestFieldToVehicle(), leftDist, rightDist);
                
                mPrevHeading = heading.getRadians();
                mFusedStateMap.addObservations(ts, ekfPose, new Twist2d(), new Twist2d(), 0.0);


@@ 270,8 290,10 @@ public class RobotStateEstimator extends SpartronicsSubsystem

        // We convert meters/loopinterval and radians/loopinterval to meters/sec and
        // radians/sec
        final double loopintervalToSeconds = 1 / (ts - last.timestamp);
        final Twist2d normalizedIVal = iVal.scaled(loopintervalToSeconds);
//        final double loopintervalToSeconds = 1 / (ts - last.timestamp);
//        final Twist2d normalizedIVal = iVal.scaled(loopintervalToSeconds);

        SmartDashboard.putNumber("RobotState/fusedVelocity", pVal.dx);

        if (mSLAMCamera != null)
        {


@@ 280,7 302,7 @@ public class RobotStateEstimator extends SpartronicsSubsystem
                // Sometimes (for unknown reasons) the native code can't send odometry info.
                // We throw a Java exception when this happens, but we'd like to ignore that
                // exception in this situation.
                mSLAMCamera.sendOdometry(normalizedIVal);
                mSLAMCamera.sendOdometry(pVal.dx, 0);
            }
            catch (CameraJNIException e)
            {


@@ 296,11 318,14 @@ public class RobotStateEstimator extends SpartronicsSubsystem
            return;
        }

        Logger.info("here?");

        // Callback is called from a different thread... We avoid data races
        // because RobotSteteMap is thread-safe
        mSLAMCamera.stop();
        mSLAMCamera.start((CameraUpdate update) -> {
            mCameraStateMap.addObservations(Timer.getFPGATimestamp(), update.pose, update.velocity,
            // We pass two empty Twist2ds because velocity is unused
            mCameraStateMap.addObservations(Timer.getFPGATimestamp(), update.pose, new Twist2d(),
                new Twist2d(), mDrive.getTurretAngle());
            SmartDashboard.putString("RobotState/cameraConfidence", update.confidence.toString());
        });

A src/main/java/com/spartronics4915/lib/util/VecBuilder.java => src/main/java/com/spartronics4915/lib/util/VecBuilder.java +175 -0
@@ 0,0 1,175 @@
/*----------------------------------------------------------------------------*/
/* Copyright (c) 2019-2020 FIRST. All Rights Reserved.                        */
/* Open Source Software - may be modified and shared by FRC teams. The code   */
/* must be accompanied by the FIRST BSD license file in the root directory of */
/* the project.                                                               */
/*----------------------------------------------------------------------------*/

package com.spartronics4915.lib.util;

import edu.wpi.first.wpiutil.math.MatBuilder;
import edu.wpi.first.wpiutil.math.Matrix;
import edu.wpi.first.wpiutil.math.Nat;
import edu.wpi.first.wpiutil.math.Num;
import edu.wpi.first.wpiutil.math.numbers.*;

/**
 * This class is here so that we can use the new VecBuilder.fill static method prior to its release
 * in 2021; although the 2021 state space prerelease vendordep has this new method, there is already
 * a VecBuilder in WPILib 2020, which conflicts and makes it so that we need to copy this class into
 * our code.
 *
 * DELETE ME WHEN WPILib 2021 IS RELEASED!!
 */

/**
 * A specialization of {@link MatBuilder} for constructing vectors (Nx1 matrices).
 *
 * @param <N> The dimension of the vector to be constructed.
 */
public class VecBuilder<N extends Num> extends MatBuilder<N, N1> {
  public VecBuilder(Nat<N> rows) {
    super(rows, Nat.N1());
  }

  /**
   * Returns a 1x1 vector containing the given elements.
   *
   * @param n1 the first element.
   */
  public static Matrix<N1, N1> fill(double n1) {
    return new MatBuilder<>(Nat.N1(), Nat.N1()).fill(n1);
  }

  /**
   * Returns a 2x1 vector containing the given elements.
   *
   * @param n1 the first element.
   * @param n2 the second element.
   */
  public static Matrix<N2, N1> fill(double n1, double n2) {
    return new MatBuilder<>(Nat.N2(), Nat.N1()).fill(n1, n2);
  }

  /**
   * Returns a 3x1 vector containing the given elements.
   *
   * @param n1 the first element.
   * @param n2 the second element.
   * @param n3 the third element.
   */
  public static Matrix<N3, N1> fill(double n1, double n2, double n3) {
    return new MatBuilder<>(Nat.N3(), Nat.N1()).fill(n1, n2, n3);
  }

  /**
   * Returns a 4x1 vector containing the given elements.
   *
   * @param n1 the first element.
   * @param n2 the second element.
   * @param n3 the third element.
   * @param n4 the fourth element.
   */
  public static Matrix<N4, N1> fill(double n1, double n2, double n3, double n4) {
    return new MatBuilder<>(Nat.N4(), Nat.N1()).fill(n1, n2, n3, n4);
  }

  /**
   * Returns a 5x1 vector containing the given elements.
   *
   * @param n1 the first element.
   * @param n2 the second element.
   * @param n3 the third element.
   * @param n4 the fourth element.
   * @param n5 the fifth element.
   */
  public static Matrix<N5, N1> fill(double n1, double n2, double n3, double n4, double n5) {
    return new MatBuilder<>(Nat.N5(), Nat.N1()).fill(n1, n2, n3, n4, n5);
  }

  /**
   * Returns a 6x1 vector containing the given elements.
   *
   * @param n1 the first element.
   * @param n2 the second element.
   * @param n3 the third element.
   * @param n4 the fourth element.
   * @param n5 the fifth element.
   * @param n6 the sixth element.
   */
  public static Matrix<N6, N1> fill(double n1, double n2, double n3, double n4, double n5,
                                    double n6) {
    return new MatBuilder<>(Nat.N6(), Nat.N1()).fill(n1, n2, n3, n4, n5, n6);
  }

  /**
   * Returns a 7x1 vector containing the given elements.
   *
   * @param n1 the first element.
   * @param n2 the second element.
   * @param n3 the third element.
   * @param n4 the fourth element.
   * @param n5 the fifth element.
   * @param n6 the sixth element.
   * @param n7 the seventh element.
   */
  public static Matrix<N7, N1> fill(double n1, double n2, double n3, double n4, double n5,
                                    double n6, double n7) {
    return new MatBuilder<>(Nat.N7(), Nat.N1()).fill(n1, n2, n3, n4, n5, n6, n7);
  }

  /**
   * Returns a 8x1 vector containing the given elements.
   *
   * @param n1 the first element.
   * @param n2 the second element.
   * @param n3 the third element.
   * @param n4 the fourth element.
   * @param n5 the fifth element.
   * @param n6 the sixth element.
   * @param n7 the seventh element.
   * @param n8 the eighth element.
   */
  public static Matrix<N8, N1> fill(double n1, double n2, double n3, double n4, double n5,
                                    double n6, double n7, double n8) {
    return new MatBuilder<>(Nat.N8(), Nat.N1()).fill(n1, n2, n3, n4, n5, n6, n7, n8);
  }

  /**
   * Returns a 9x1 vector containing the given elements.
   *
   * @param n1 the first element.
   * @param n2 the second element.
   * @param n3 the third element.
   * @param n4 the fourth element.
   * @param n5 the fifth element.
   * @param n6 the sixth element.
   * @param n7 the seventh element.
   * @param n8 the eighth element.
   * @param n9 the ninth element.
   */
  public static Matrix<N9, N1> fill(double n1, double n2, double n3, double n4, double n5,
                                    double n6, double n7, double n8, double n9) {
    return new MatBuilder<>(Nat.N9(), Nat.N1()).fill(n1, n2, n3, n4, n5, n6, n7, n8, n9);
  }

  /**
   * Returns a 10x1 vector containing the given elements.
   *
   * @param n1 the first element.
   * @param n2 the second element.
   * @param n3 the third element.
   * @param n4 the fourth element.
   * @param n5 the fifth element.
   * @param n6 the sixth element.
   * @param n7 the seventh element.
   * @param n8 the eighth element.
   * @param n9 the ninth element.
   * @param n10 the tenth element.
   */
  @SuppressWarnings("PMD.ExcessiveParameterList")
  public static Matrix<N10, N1> fill(double n1, double n2, double n3, double n4, double n5,
                                     double n6, double n7, double n8, double n9, double n10) {
    return new MatBuilder<>(Nat.N10(), Nat.N1()).fill(n1, n2, n3, n4, n5, n6, n7, n8, n9, n10);
  }
}

M src/test/java/com/spartronics4915/lib/hardware/sensors/TestT265Camera.java => src/test/java/com/spartronics4915/lib/hardware/sensors/TestT265Camera.java +1 -16
@@ 21,21 21,6 @@ public class TestT265Camera
    private boolean mDataRecieved = false;
    private final Object mLock = new Object();

    @Test
    public void testGeometry()
    {
        // final Pose2d origin = new Pose2d(5, -5, Rotation2d.fromDegrees(180));
        
        // final Pose2d currentPose = new Pose2d(1, 1, Rotation2d.fromDegrees(180));
        // System.out.println(origin.transformBy(currentPose));

        final Pose2d a = new Pose2d(0, 0, new Rotation2d());
        final Pose2d b = new Pose2d(1, 0, Rotation2d.fromDegrees(20));

        System.out.println(a.getTranslation().getDistance(b.getTranslation()));
        System.out.println(a.distance(b));
    }

    @Tag("hardwareDependant")
    @Test
    public void testNewCamera() throws InterruptedException


@@ 52,7 37,7 @@ public class TestT265Camera
            cam = new T265Camera(new Pose2d(), 0);

            // Just make sure this doesn't throw
            cam.sendOdometry(new Twist2d(0, 0, new Rotation2d()));
            cam.sendOdometry(0, 0);

            cam.start((CameraUpdate update) -> {
                synchronized (mLock)

M src/test/java/com/spartronics4915/lib/subsystems/estimator/DrivetrainEstimatorTest.java => src/test/java/com/spartronics4915/lib/subsystems/estimator/DrivetrainEstimatorTest.java +64 -66
@@ 5,54 5,48 @@ import java.util.List;
import java.util.Random;

import com.spartronics4915.lib.math.twodim.geometry.Pose2d;
import com.spartronics4915.lib.math.twodim.geometry.Pose2dWithCurvature;
import com.spartronics4915.lib.math.twodim.geometry.Rotation2d;
import com.spartronics4915.lib.math.twodim.geometry.Translation2d;
import com.spartronics4915.lib.math.twodim.trajectory.TrajectoryGenerator;
import com.spartronics4915.lib.util.Kinematics;
import com.spartronics4915.lib.util.VecBuilder;

import org.junit.jupiter.api.Test;
import org.knowm.xchart.SwingWrapper;
import org.knowm.xchart.XYChartBuilder;

import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.wpilibj.kinematics.ChassisSpeeds;
import edu.wpi.first.wpilibj.kinematics.DifferentialDriveKinematics;
import edu.wpi.first.wpilibj.math.StateSpaceUtils;
import edu.wpi.first.wpiutil.math.MatBuilder;
import edu.wpi.first.wpiutil.math.Matrix;
import edu.wpi.first.wpiutil.math.Nat;
import edu.wpi.first.wpiutil.math.numbers.N1;
import edu.wpi.first.wpiutil.math.numbers.N3;

public class DrivetrainEstimatorTest
{
    @Test
    public void testEstimator()
    {
        var stateStdDevs = new MatBuilder<>(Nat.N3(), Nat.N1()).fill(0.02, 0.02, 0.01);
        var measurementStdDevs = new MatBuilder<>(Nat.N6(), Nat.N1()).fill(0.1, 0.1, 0.1, 0.005,
            0.005, 0.002);
        var est = new DrivetrainEstimator(stateStdDevs, measurementStdDevs, 3, new Pose2d());

        final double dt = 0.01;
        final double dt = 0.02;
        final double visionUpdateRate = 0.2;

        var stateStdDevs = VecBuilder.fill(0.02, 0.02, 0.01, 0.1, 0.1);
        var localMeasurementStdDevs = VecBuilder.fill(0.5, 0.5, 0.5, 0.5, 0.5, 0.05);
        var globalMeasurementStdDevs = VecBuilder.fill(5, 5, 1);
        var est = new DrivetrainEstimator(
                new Rotation2d(), new Pose2d(),
                stateStdDevs, localMeasurementStdDevs, globalMeasurementStdDevs,
                dt
        );

        var traj = TrajectoryGenerator.defaultTrajectoryGenerator.generateTrajectory(
            List.of(
                new Pose2d(),
                new Pose2d(3, 3, new Rotation2d())
                // new Pose2d(), new Pose2d(20, 20, Rotation2d.fromDegrees(0)),
                // new Pose2d(23, 23, Rotation2d.fromDegrees(173)),
                // new Pose2d(54, 54, new Rotation2d())
            ),
            List.of(),
            0, 0,
            1, 1,
            false);
                List.of(
                        new Pose2d(),
                        new Pose2d(3, 3, new Rotation2d())
                        // new Pose2d(), new Pose2d(20, 20, Rotation2d.fromDegrees(0)),
                        // new Pose2d(23, 23, Rotation2d.fromDegrees(173)),
                        // new Pose2d(54, 54, new Rotation2d())
                ),
                List.of(),
                0, 0,
                1, 1,
                false);

        var kinematics = new DifferentialDriveKinematics(1);
        Pose2d lastPose = null;

        List<Double> trajXs = new ArrayList<>();
        List<Double> trajYs = new ArrayList<>();


@@ 63,66 57,68 @@ public class DrivetrainEstimatorTest
        List<Double> visionXs = new ArrayList<>();
        List<Double> visionYs = new ArrayList<>();

        var rand = new Random();
        final double steadyStateErrorX = 1.0;
        final double steadyStateErrorY = 1.0;
        var rand = new Random(4915);

        double distanceLeft = 0.0;
        double distanceRight = 0.0;

        double t = 0.0;
        Pose2d lastVisionUpdate = null;
        double lastVisionUpdateT = Double.NEGATIVE_INFINITY;
        double lastVisionUpdateTime = Double.NEGATIVE_INFINITY;

        double maxError = Double.NEGATIVE_INFINITY;
        double errorSum = 0;
        while (t <= traj.getTotalTime())
        {
            t += dt;

            var groundtruthState = traj.sample(t);
            var input = kinematics
                .toWheelSpeeds(new ChassisSpeeds(groundtruthState.state.velocity, 0.0,
                    // ds/dt * dtheta/ds = dtheta/dt
                    groundtruthState.state.velocity * groundtruthState.state.state.getCurvature()));

            Matrix<N3, N1> u = new MatBuilder<>(Nat.N3(), Nat.N1()).fill(
                input.leftMetersPerSecond * dt,
                input.rightMetersPerSecond * dt, 0.0);
            if (lastPose != null)
            {
                u.set(2, 0,
                    groundtruthState.state.state.getRotation().getRadians()
                        - lastPose.getRotation().getRadians());
            }
            u = u.plus(StateSpaceUtils.makeWhiteNoiseVector(Nat.N3(),
                new MatBuilder<>(Nat.N3(), Nat.N1()).fill(0.002, 0.002, 0.001)));
            lastPose = groundtruthState.state.state.getPose();

            Pose2d realPose = groundtruthState.state.state.getPose();

            if (lastVisionUpdateT + visionUpdateRate < t)
            if (lastVisionUpdateTime + visionUpdateRate < t)
            {
                if (lastVisionUpdate != null)
                {
                    est.addVisionMeasurement(lastVisionUpdate, lastVisionUpdateT);
                    est.addVisionMeasurement(lastVisionUpdate, lastVisionUpdateTime);
                }

                lastVisionUpdateT = t;
                lastVisionUpdate = realPose.getPose()
                    .transformBy(new Pose2d(rand.nextGaussian() * 0.05, rand.nextGaussian() * 0.05,
                        Rotation2d.fromRadians(rand.nextGaussian() * 0.002)));
                lastVisionUpdateTime = t;
                lastVisionUpdate = new Pose2d(
                    realPose.getTranslation().getX() + rand.nextGaussian() * 0.5,
                    realPose.getTranslation().getY() + rand.nextGaussian() * 0.5,
                    realPose.getRotation().getRadians() + rand.nextGaussian() * 0.01
                );

                visionXs.add(lastVisionUpdate.getTranslation().getX());
                visionYs.add(lastVisionUpdate.getTranslation().getY());
            }

            double dist = realPose.getTranslation().getDistance(new Translation2d());
            Pose2d measurementVSlam = realPose.getPose()
                .transformBy(new Pose2d(new Translation2d(steadyStateErrorX * (dist / 76.0),
                    steadyStateErrorY * (dist / 76.0)), new Rotation2d()))
                .transformBy(new Pose2d(rand.nextGaussian() * 0.05, rand.nextGaussian() * 0.05,
                    Rotation2d.fromRadians(rand.nextGaussian() * 0.001)));
            var xHat = est.update(measurementVSlam, u.get(0, 0), u.get(1, 0), u.get(2, 0), t);
            Pose2d measurementVSlam = new Pose2d(
                realPose.getTranslation().getX() + rand.nextGaussian() * 0.05,
                realPose.getTranslation().getY() + rand.nextGaussian() * 0.05,
                realPose.getRotation().getRadians() + rand.nextGaussian() * 0.5
            );

            var input = kinematics
                    .toWheelSpeeds(new ChassisSpeeds(groundtruthState.state.velocity, 0.0,
                            // ds/dt * dtheta/ds = dtheta/dt
                            groundtruthState.state.velocity * groundtruthState.state.state.getCurvature()));

            input.leftMetersPerSecond += rand.nextGaussian() * 0.02;
            input.rightMetersPerSecond += rand.nextGaussian() * 0.02;

            distanceLeft += input.leftMetersPerSecond * dt;
            distanceRight += input.rightMetersPerSecond * dt;

            var xHat = est.updateWithTime(
                    t,
                    realPose.getRotation(),//.rotateBy(Rotation2d.fromRadians(rand.nextGaussian() * 0.05)),
                    input,
                    measurementVSlam,
                    distanceLeft,
                    distanceRight
            );

            double error = groundtruthState.state.state.getTranslation()
                .getDistance(xHat.getTranslation());
                    .getDistance(xHat.getTranslation());
            if (error > maxError)
            {
                maxError = error;


@@ 135,6 131,8 @@ public class DrivetrainEstimatorTest
            observerYs.add(xHat.getTranslation().getY());
            slamXs.add(measurementVSlam.getTranslation().getX());
            slamYs.add(measurementVSlam.getTranslation().getY());

            t += dt;
        }

        System.out.println("Mean error (meters): " + errorSum / (traj.getTotalTime() / dt));

M vendordeps/wpilibStateSpace.json => vendordeps/wpilibStateSpace.json +2 -2
@@ 4,9 4,9 @@
    "version": "2020.2.5",
    "uuid": "82b203c8-9ae1-4031-914c-20460fa41cb1",
    "mavenUrls": [
        "https://maven.meshnet0.com/releases/"
        "https://maven.0x778.tk/"
    ],
    "jsonUrl": "https://maven.meshnet0.com/wpilibStateSpace-latest.json",
    "jsonUrl": "https://maven.0x778.tk/edu/wpi/first/wpilibStateSpace/wpilibStateSpace.json",
    "cppDependencies": [
        {
            "groupId": "edu.wpi.first.wpilibStateSpace",